+{% endblock %}
\ No newline at end of file
diff --git a/conductor/doc/source/_theme/theme.conf b/conductor/doc/source/_theme/theme.conf
new file mode 100644
index 00000000..1cc40044
--- /dev/null
+++ b/conductor/doc/source/_theme/theme.conf
@@ -0,0 +1,4 @@
+[theme]
+inherit = basic
+stylesheet = nature.css
+pygments_style = tango
diff --git a/windc/doc/source/conf.py b/conductor/doc/source/conf.py
similarity index 81%
rename from windc/doc/source/conf.py
rename to conductor/doc/source/conf.py
index cca6b7fc..e9b38f91 100644
--- a/windc/doc/source/conf.py
+++ b/conductor/doc/source/conf.py
@@ -1,5 +1,6 @@
+
# -*- coding: utf-8 -*-
-# Copyright (c) 2011 OpenStack, LLC.
+# Copyright (c) 2010 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,10 +16,10 @@
# limitations under the License.
#
-# Skeleton documentation build configuration file, created by
-# sphinx-quickstart on Tue May 18 13:50:15 2010.
+# Conductor documentation build configuration file, created by
+# sphinx-quickstart on Tue February 28 13:50:15 2013.
#
-# This file is execfile()'d with the current directory set to it's containing
+# This file is execfile()'d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
@@ -33,24 +34,20 @@ import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
-sys.path.append([os.path.abspath('../skeleton'),
- os.path.abspath('..'),
- os.path.abspath('../bin')
- ])
+sys.path = [os.path.abspath('../../conductor'),
+ os.path.abspath('../..'),
+ os.path.abspath('../../bin')
+ ] + sys.path
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc',
- 'sphinx.ext.coverage',
+extensions = ['sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.intersphinx',
'sphinx.ext.pngmath',
- 'sphinx.ext.graphviz',
- 'sphinx.ext.todo']
-
-todo_include_todos = True
+ 'sphinx.ext.graphviz']
# Add any paths that contain templates here, relative to this directory.
templates_path = []
@@ -69,19 +66,19 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
-project = u'Skeleton'
-copyright = u'2011-present, OpenStack, LLC.'
+project = u'Conductor'
+copyright = u'2013, Mirantis, Inc.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
-from skeleton import version as skeleton_version
+from conductor.version import version_info as conductor_version
# The full version, including alpha/beta/rc tags.
-release = skeleton_version.version_string()
+release = conductor_version.version_string_with_vcs()
# The short X.Y version.
-version = skeleton_version.canonical_version_string()
+version = conductor_version.canonical_version_string()
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@@ -98,7 +95,7 @@ version = skeleton_version.canonical_version_string()
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
-exclude_trees = []
+exclude_trees = ['api']
# The reST default role (for this markup: `text`) to use for all documents.
#default_role = None
@@ -118,7 +115,7 @@ show_authors = True
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
-modindex_common_prefix = ['skeleton.']
+modindex_common_prefix = ['portas.']
# -- Options for man page output --------------------------------------------
@@ -126,13 +123,9 @@ modindex_common_prefix = ['skeleton.']
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
man_pages = [
- ('man/skeletonapi', 'skeleton-api', u'Skeleton API Server',
- [u'OpenStack'], 1),
- ('man/skeletonregistry', 'skeleton-registry', u'Skeleton Registry Server',
- [u'OpenStack'], 1),
- ('man/skeletonmanage', 'skeleton-manage', u'Skeleton Management Utility',
- [u'OpenStack'], 1)
- ]
+ ('man/conductor', 'conductor', u'Conductor Orchestrator',
+ [u'Mirantis, Inc.'], 1)
+]
# -- Options for HTML output -------------------------------------------------
@@ -174,6 +167,8 @@ html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
+git_cmd = "git log --pretty=format:'%ad, commit %h' --date=local -n1"
+html_last_updated_fmt = os.popen(git_cmd).read()
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
@@ -187,10 +182,10 @@ html_static_path = ['_static']
#html_additional_pages = {}
# If false, no module index is generated.
-#html_use_modindex = True
+html_use_modindex = False
# If false, no index is generated.
-#html_use_index = True
+html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
@@ -207,7 +202,7 @@ html_static_path = ['_static']
#html_file_suffix = ''
# Output file base name for HTML help builder.
-htmlhelp_basename = 'skeletondoc'
+htmlhelp_basename = 'conductordoc'
# -- Options for LaTeX output ------------------------------------------------
@@ -222,8 +217,8 @@ htmlhelp_basename = 'skeletondoc'
# (source start file, target name, title, author,
# documentclass [howto/manual]).
latex_documents = [
- ('index', 'Skeleton.tex', u'Skeleton Documentation',
- u'Skeleton Team', 'manual'),
+ ('index', 'Conductor.tex', u'Conductor Documentation',
+ u'Keero Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -244,9 +239,4 @@ latex_documents = [
#latex_use_modindex = True
# Example configuration for intersphinx: refer to the Python standard library.
-intersphinx_mapping = {'python': ('http://docs.python.org/', None),
- 'dashboard': ('http://dashboard.openstack.org', None),
- 'glance': ('http://glance.openstack.org', None),
- 'keystone': ('http://keystone.openstack.org', None),
- 'nova': ('http://nova.openstack.org', None),
- 'swift': ('http://swift.openstack.org', None)}
+intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
diff --git a/windc/doc/source/index.rst b/conductor/doc/source/index.rst
similarity index 54%
rename from windc/doc/source/index.rst
rename to conductor/doc/source/index.rst
index a103a897..50ce1427 100644
--- a/windc/doc/source/index.rst
+++ b/conductor/doc/source/index.rst
@@ -1,5 +1,5 @@
..
- Copyright 2011 OpenStack, LLC.
+ Copyright 2013, Mirantis Inc.
All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -14,40 +14,7 @@
License for the specific language governing permissions and limitations
under the License.
-Welcome to Skeleton's documentation!
-===================================
+Welcome to Conductor's documentation!
+==================================
-Description of Skeleton project
-
-Concepts
-========
-
-.. toctree::
- :maxdepth: 1
-
-Using Skeleton
-==============
-
-.. toctree::
- :maxdepth: 1
-
- gettingstarted
- installing
-
-Developer Docs
-==============
-
-.. toctree::
- :maxdepth: 1
-
-Outstanding Documentation Tasks
-===============================
-
-.. todolist::
-
-Indices and tables
-==================
-
-* :ref:`genindex`
-* :ref:`modindex`
-* :ref:`search`
+We rule the world!
\ No newline at end of file
diff --git a/conductor/etc/app.config b/conductor/etc/app.config
deleted file mode 100644
index f69fe450..00000000
--- a/conductor/etc/app.config
+++ /dev/null
@@ -1,5 +0,0 @@
-[rabbitmq]
-host = localhost
-vhost = keero
-login = keero
-password = keero
\ No newline at end of file
diff --git a/dashboard/windcclient/common/__init__.py b/conductor/etc/conductor-paste.ini
similarity index 100%
rename from dashboard/windcclient/common/__init__.py
rename to conductor/etc/conductor-paste.ini
diff --git a/conductor/etc/conductor.conf b/conductor/etc/conductor.conf
new file mode 100644
index 00000000..60f3202f
--- /dev/null
+++ b/conductor/etc/conductor.conf
@@ -0,0 +1,14 @@
+[DEFAULT]
+log_file = logs/conductor.log
+debug=True
+verbose=True
+
+[heat]
+auth_url = http://172.18.124.101:5000/v2.0
+
+[rabbitmq]
+host = 172.18.124.101
+port = 5672
+virtual_host = keero
+login = keero
+password = keero
\ No newline at end of file
diff --git a/conductor/logs/.gitignore b/conductor/logs/.gitignore
new file mode 100644
index 00000000..44c5ea8f
--- /dev/null
+++ b/conductor/logs/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/conductor/openstack-common.conf b/conductor/openstack-common.conf
new file mode 100644
index 00000000..04377376
--- /dev/null
+++ b/conductor/openstack-common.conf
@@ -0,0 +1,7 @@
+[DEFAULT]
+
+# The list of modules to copy from openstack-common
+modules=setup,wsgi,config,exception,gettextutils,importutils,jsonutils,log,xmlutils,sslutils,service,notifier,local,install_venv_common,version,timeutils,eventlet_backdoor,threadgroup,loopingcall,uuidutils
+
+# The base module to hold the copy of openstack.common
+base=conductor
\ No newline at end of file
diff --git a/conductor/run_tests.sh b/conductor/run_tests.sh
new file mode 100755
index 00000000..ba2c9e01
--- /dev/null
+++ b/conductor/run_tests.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+function usage {
+ echo "Usage: $0 [OPTION]..."
+ echo "Run python-portasclient's test suite(s)"
+ echo ""
+ echo " -p, --pep8 Just run pep8"
+ echo " -h, --help Print this usage message"
+ echo ""
+ echo "This script is deprecated and currently retained for compatibility."
+ echo 'You can run the full test suite for multiple environments by running "tox".'
+ echo 'You can run tests for only python 2.7 by running "tox -e py27", or run only'
+ echo 'the pep8 tests with "tox -e pep8".'
+ exit
+}
+
+command -v tox > /dev/null 2>&1
+if [ $? -ne 0 ]; then
+ echo 'This script requires "tox" to run.'
+ echo 'You can install it with "pip install tox".'
+ exit 1;
+fi
+
+just_pep8=0
+
+function process_option {
+ case "$1" in
+ -h|--help) usage;;
+ -p|--pep8) let just_pep8=1;;
+ esac
+}
+
+for arg in "$@"; do
+ process_option $arg
+done
+
+if [ $just_pep8 -eq 1 ]; then
+ tox -e pep8
+ exit
+fi
+
+tox -e py27 $toxargs 2>&1 | tee run_tests.err.log || exit
+if [ ${PIPESTATUS[0]} -ne 0 ]; then
+ exit ${PIPESTATUS[0]}
+fi
+
+if [ -z "$toxargs" ]; then
+ tox -e pep8
+fi
diff --git a/conductor/setup.cfg b/conductor/setup.cfg
new file mode 100644
index 00000000..6e6f6554
--- /dev/null
+++ b/conductor/setup.cfg
@@ -0,0 +1,33 @@
+[build_sphinx]
+all_files = 1
+build-dir = doc/build
+source-dir = doc/source
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
+[compile_catalog]
+directory = conductor/locale
+domain = conductor
+
+[update_catalog]
+domain = conductor
+output_dir = conductor/locale
+input_file = conductor/locale/conductor.pot
+
+[extract_messages]
+keywords = _ gettext ngettext l_ lazy_gettext
+mapping_file = babel.cfg
+output_file = conductor/locale/conductor.pot
+
+[nosetests]
+# NOTE(jkoelker) To run the test suite under nose install the following
+# coverage http://pypi.python.org/pypi/coverage
+# tissue http://pypi.python.org/pypi/tissue (pep8 checker)
+# openstack-nose https://github.com/jkoelker/openstack-nose
+verbosity=2
+cover-package = conductor
+cover-html = true
+cover-erase = true
\ No newline at end of file
diff --git a/conductor/setup.py b/conductor/setup.py
new file mode 100644
index 00000000..fb9da8cd
--- /dev/null
+++ b/conductor/setup.py
@@ -0,0 +1,49 @@
+#!/usr/bin/python
+# Copyright (c) 2010 OpenStack, LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import setuptools
+
+from conductor.openstack.common import setup
+
+requires = setup.parse_requirements()
+depend_links = setup.parse_dependency_links()
+project = 'conductor'
+
+setuptools.setup(
+ name=project,
+ version=setup.get_version(project, '2013.1'),
+ description='The Conductor is orchestration engine server',
+ license='Apache License (2.0)',
+ author='Mirantis, Inc.',
+ author_email='openstack@lists.launchpad.net',
+ url='http://conductor.openstack.org/',
+ packages=setuptools.find_packages(exclude=['bin']),
+ test_suite='nose.collector',
+ cmdclass=setup.get_cmdclass(),
+ include_package_data=True,
+ install_requires=requires,
+ dependency_links=depend_links,
+ classifiers=[
+ 'Development Status :: 4 - Beta',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Operating System :: POSIX :: Linux',
+ 'Programming Language :: Python :: 2.7',
+ 'Environment :: No Input/Output (Daemon)',
+ 'Environment :: OpenStack',
+ ],
+ scripts=['bin/conductor'],
+ py_modules=[]
+)
diff --git a/conductor/test.json b/conductor/test.json
index 24270704..c2815d50 100644
--- a/conductor/test.json
+++ b/conductor/test.json
@@ -1,6 +1,7 @@
{
- "name": "MyDataCenter",
+ "name": "MyDataCenterx",
"id": "adc6d143f9584d10808c7ef4d07e4802",
+ "token": "MIINIQYJKoZIhvcNAQcCoIINEjCCDQ4CAQExCTAHBgUrDgMCGjCCC-oGCSqGSIb3DQEHAaCCC+sEggvneyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxMy0wMy0yNlQwNjo0NTozNy4zOTI0MDAiLCAiZXhwaXJlcyI6ICIyMDEzLTAzLTI3VDA2OjQ1OjM3WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogbnVsbCwgImVuYWJsZWQiOiB0cnVlLCAiaWQiOiAiMTZlYjc4Y2JiNjg4NDU5YzgzMDhkODk2NzhiY2VmNTAiLCAibmFtZSI6ICJhZG1pbiJ9fSwgInNlcnZpY2VDYXRhbG9nIjogW3siZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4xOC4xMjQuMTAxOjg3NzQvdjIvMTZlYjc4Y2JiNjg4NDU5YzgzMDhkODk2NzhiY2VmNTAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjE4LjEyNC4xMDE6ODc3NC92Mi8xNmViNzhjYmI2ODg0NTljODMwOGQ4OTY3OGJjZWY1MCIsICJpZCI6ICIwNGFlNjM2ZTdhYzc0NmJjYjExM2EwYzI5NDYzMzgzMCIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4xOC4xMjQuMTAxOjg3NzQvdjIvMTZlYjc4Y2JiNjg4NDU5YzgzMDhkODk2NzhiY2VmNTAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZSIsICJuYW1lIjogIm5vdmEifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjE4LjEyNC4xMDE6MzMzMyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xNzIuMTguMTI0LjEwMTozMzMzIiwgImlkIjogIjA5MmJkMjMyMGU5ZDRlYWY4ZDBlZjEzNDhjOGU3NTJjIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjE4LjEyNC4xMDE6MzMzMyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJzMyIsICJuYW1lIjogInMzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4xOC4xMjQuMTAxOjkyOTIiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjE4LjEyNC4xMDE6OTI5MiIsICJpZCI6ICI1ZWUzNjdjYzRhNjY0YmQzYTYyNmI2MjBkMzFhYzcwYyIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4xOC4xMjQuMTAxOjkyOTIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiaW1hZ2UiLCAibmFtZSI6ICJnbGFuY2UifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjE4LjEyNC4xMDE6ODAwMC92MSIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xNzIuMTguMTI0LjEwMTo4MDAwL3YxIiwgImlkIjogIjM3MzMzYmQwNDkxOTQzY2FiNWEyZGM5N2I5YWQzYjE2IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTcyLjE4LjEyNC4xMDE6ODAwMC92MSJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJjbG91ZGZvcm1hdGlvbiIsICJuYW1lIjogImhlYXQtY2ZuIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4xOC4xMjQuMTAxOjg3NzYvdjEvMTZlYjc4Y2JiNjg4NDU5YzgzMDhkODk2NzhiY2VmNTAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjE4LjEyNC4xMDE6ODc3Ni92MS8xNmViNzhjYmI2ODg0NTljODMwOGQ4OTY3OGJjZWY1MCIsICJpZCI6ICI4NTgwYjMzOTAxZWU0YTUyOWI0OGMyMzU0ZjFiMWNhZSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4xOC4xMjQuMTAxOjg3NzYvdjEvMTZlYjc4Y2JiNjg4NDU5YzgzMDhkODk2NzhiY2VmNTAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAidm9sdW1lIiwgIm5hbWUiOiAiY2luZGVyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4xOC4xMjQuMTAxOjg3NzMvc2VydmljZXMvQWRtaW4iLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjE4LjEyNC4xMDE6ODc3My9zZXJ2aWNlcy9DbG91ZCIsICJpZCI6ICIwYTViOTIyNTNiZjg0NTAwYTA4OWY1N2VkMmYzZDY3NSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4xOC4xMjQuMTAxOjg3NzMvc2VydmljZXMvQ2xvdWQifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiZWMyIiwgIm5hbWUiOiAiZWMyIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE3Mi4xOC4xMjQuMTAxOjgwMDQvdjEvMTZlYjc4Y2JiNjg4NDU5YzgzMDhkODk2NzhiY2VmNTAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTcyLjE4LjEyNC4xMDE6ODAwNC92MS8xNmViNzhjYmI2ODg0NTljODMwOGQ4OTY3OGJjZWY1MCIsICJpZCI6ICJhMjRjMGY1ZmUzMmQ0ZDU5YWEwMTk1Mzg3OGFlMDQwNyIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE3Mi4xOC4xMjQuMTAxOjgwMDQvdjEvMTZlYjc4Y2JiNjg4NDU5YzgzMDhkODk2NzhiY2VmNTAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAib3JjaGVzdHJhdGlvbiIsICJuYW1lIjogImhlYXQifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTcyLjE4LjEyNC4xMDE6MzUzNTcvdjIuMCIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xNzIuMTguMTI0LjEwMTo1MDAwL3YyLjAiLCAiaWQiOiAiNGM4M2VlYjk3MDA5NDg3M2FiNjg3NjUzNWJlZjgxZWEiLCAicHVibGljVVJMIjogImh0dHA6Ly8xNzIuMTguMTI0LjEwMTo1MDAwL3YyLjAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiaWRlbnRpdHkiLCAibmFtZSI6ICJrZXlzdG9uZSJ9XSwgInVzZXIiOiB7InVzZXJuYW1lIjogImFkbWluIiwgInJvbGVzX2xpbmtzIjogW10sICJpZCI6ICJmMmNkZWM4NTQ2MmQ0N2UzODQ5ZTZmMzE3NGRhMTk4NSIsICJyb2xlcyI6IFt7Im5hbWUiOiAiYWRtaW4ifV0sICJuYW1lIjogImFkbWluIn0sICJtZXRhZGF0YSI6IHsiaXNfYWRtaW4iOiAwLCAicm9sZXMiOiBbIjc4N2JlODdjMGFkMjQ3ODJiNTQ4NWU5NjNhZjllNzllIl19fX0xgf8wgfwCAQEwXDBXMQswCQYDVQQGEwJVUzEOMAwGA1UECBMFVW5zZXQxDjAMBgNVBAcTBVVuc2V0MQ4wDAYDVQQKEwVVbnNldDEYMBYGA1UEAxMPd3d3LmV4YW1wbGUuY29tAgEBMAcGBSsOAwIaMA0GCSqGSIb3DQEBAQUABIGAURfgqd8iZ-UWZTta2pyKzXBXm9nmdzlOY-TN8526LWH4jrU1uuimAZKSjZUCwmnaSvoXHLlP6CSGvNUJWDDu6YFNmDfmatVqFrTij4EFGruExmtUxmhbQOnAyhKqIxHFg2t3VKEB2tVhLGSzoSH1dM2+j0-I0JgOLWIStVFEF5A=",
"services": {
"activeDirectories": [
{
diff --git a/dashboard/windcclient/v1/__init__.py b/conductor/tests/__init__.py
similarity index 100%
rename from dashboard/windcclient/v1/__init__.py
rename to conductor/tests/__init__.py
diff --git a/windc/openstack/common/middleware/__init__.py b/conductor/tests/conductor/__init__.py
similarity index 100%
rename from windc/openstack/common/middleware/__init__.py
rename to conductor/tests/conductor/__init__.py
diff --git a/conductor/tests/conductor/test_methods.py b/conductor/tests/conductor/test_methods.py
new file mode 100644
index 00000000..5234c1e3
--- /dev/null
+++ b/conductor/tests/conductor/test_methods.py
@@ -0,0 +1,13 @@
+import unittest
+from conductor.app import ConductorWorkflowService
+import conductor.rabbitmq as rabbitmq
+from conductor.workflow import Workflow
+import conductor.xml_code_engine as engine
+
+class TestMethodsAndClasses(unittest.TestCase):
+
+ def test_init_service_class(self):
+ con = ConductorWorkflowService()
+
+ con.start()
+ con.stop()
\ No newline at end of file
diff --git a/conductor/tests/conductor/test_with_fake_service.py b/conductor/tests/conductor/test_with_fake_service.py
new file mode 100644
index 00000000..2c138157
--- /dev/null
+++ b/conductor/tests/conductor/test_with_fake_service.py
@@ -0,0 +1,11 @@
+import unittest
+from conductor.app import ConductorWorkflowService
+from conductor.openstack.common import service
+
+class TestMethodsAndClasses(unittest.TestCase):
+
+ def test_init_service_class(self):
+ launcher = service.ServiceLauncher()
+ con = ConductorWorkflowService()
+ launcher.launch_service(con)
+
\ No newline at end of file
diff --git a/conductor/tools/install_venv_common.py b/conductor/tools/install_venv_common.py
new file mode 100644
index 00000000..41306564
--- /dev/null
+++ b/conductor/tools/install_venv_common.py
@@ -0,0 +1,220 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2013 OpenStack Foundation
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Provides methods needed by installation script for OpenStack development
+virtual environments.
+
+Synced in from openstack-common
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+
+class InstallVenv(object):
+
+ def __init__(self, root, venv, pip_requires, test_requires, py_version,
+ project):
+ self.root = root
+ self.venv = venv
+ self.pip_requires = pip_requires
+ self.test_requires = test_requires
+ self.py_version = py_version
+ self.project = project
+
+ def die(self, message, *args):
+ print >> sys.stderr, message % args
+ sys.exit(1)
+
+ def check_python_version(self):
+ if sys.version_info < (2, 6):
+ self.die("Need Python Version >= 2.6")
+
+ def run_command_with_code(self, cmd, redirect_output=True,
+ check_exit_code=True):
+ """Runs a command in an out-of-process shell.
+
+ Returns the output of that command. Working directory is self.root.
+ """
+ if redirect_output:
+ stdout = subprocess.PIPE
+ else:
+ stdout = None
+
+ proc = subprocess.Popen(cmd, cwd=self.root, stdout=stdout)
+ output = proc.communicate()[0]
+ if check_exit_code and proc.returncode != 0:
+ self.die('Command "%s" failed.\n%s', ' '.join(cmd), output)
+ return (output, proc.returncode)
+
+ def run_command(self, cmd, redirect_output=True, check_exit_code=True):
+ return self.run_command_with_code(cmd, redirect_output,
+ check_exit_code)[0]
+
+ def get_distro(self):
+ if (os.path.exists('/etc/fedora-release') or
+ os.path.exists('/etc/redhat-release')):
+ return Fedora(self.root, self.venv, self.pip_requires,
+ self.test_requires, self.py_version, self.project)
+ else:
+ return Distro(self.root, self.venv, self.pip_requires,
+ self.test_requires, self.py_version, self.project)
+
+ def check_dependencies(self):
+ self.get_distro().install_virtualenv()
+
+ def create_virtualenv(self, no_site_packages=True):
+ """Creates the virtual environment and installs PIP.
+
+ Creates the virtual environment and installs PIP only into the
+ virtual environment.
+ """
+ if not os.path.isdir(self.venv):
+ print 'Creating venv...',
+ if no_site_packages:
+ self.run_command(['virtualenv', '-q', '--no-site-packages',
+ self.venv])
+ else:
+ self.run_command(['virtualenv', '-q', self.venv])
+ print 'done.'
+ print 'Installing pip in venv...',
+ if not self.run_command(['tools/with_venv.sh', 'easy_install',
+ 'pip>1.0']).strip():
+ self.die("Failed to install pip.")
+ print 'done.'
+ else:
+ print "venv already exists..."
+ pass
+
+ def pip_install(self, *args):
+ self.run_command(['tools/with_venv.sh',
+ 'pip', 'install', '--upgrade'] + list(args),
+ redirect_output=False)
+
+ def install_dependencies(self):
+ print 'Installing dependencies with pip (this can take a while)...'
+
+ # First things first, make sure our venv has the latest pip and
+ # distribute.
+ # NOTE: we keep pip at version 1.1 since the most recent version causes
+ # the .venv creation to fail. See:
+ # https://bugs.launchpad.net/nova/+bug/1047120
+ self.pip_install('pip==1.1')
+ self.pip_install('distribute')
+
+ # Install greenlet by hand - just listing it in the requires file does
+ # not
+ # get it installed in the right order
+ self.pip_install('greenlet')
+
+ self.pip_install('-r', self.pip_requires)
+ self.pip_install('-r', self.test_requires)
+
+ def post_process(self):
+ self.get_distro().post_process()
+
+ def parse_args(self, argv):
+ """Parses command-line arguments."""
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-n', '--no-site-packages',
+ action='store_true',
+ help="Do not inherit packages from global Python "
+ "install")
+ return parser.parse_args(argv[1:])
+
+
+class Distro(InstallVenv):
+
+ def check_cmd(self, cmd):
+ return bool(self.run_command(['which', cmd],
+ check_exit_code=False).strip())
+
+ def install_virtualenv(self):
+ if self.check_cmd('virtualenv'):
+ return
+
+ if self.check_cmd('easy_install'):
+ print 'Installing virtualenv via easy_install...',
+ if self.run_command(['easy_install', 'virtualenv']):
+ print 'Succeeded'
+ return
+ else:
+ print 'Failed'
+
+ self.die('ERROR: virtualenv not found.\n\n%s development'
+ ' requires virtualenv, please install it using your'
+ ' favorite package management tool' % self.project)
+
+ def post_process(self):
+ """Any distribution-specific post-processing gets done here.
+
+ In particular, this is useful for applying patches to code inside
+ the venv.
+ """
+ pass
+
+
+class Fedora(Distro):
+ """This covers all Fedora-based distributions.
+
+ Includes: Fedora, RHEL, CentOS, Scientific Linux
+ """
+
+ def check_pkg(self, pkg):
+ return self.run_command_with_code(['rpm', '-q', pkg],
+ check_exit_code=False)[1] == 0
+
+ def yum_install(self, pkg, **kwargs):
+ print "Attempting to install '%s' via yum" % pkg
+ self.run_command(['sudo', 'yum', 'install', '-y', pkg], **kwargs)
+
+ def apply_patch(self, originalfile, patchfile):
+ self.run_command(['patch', '-N', originalfile, patchfile],
+ check_exit_code=False)
+
+ def install_virtualenv(self):
+ if self.check_cmd('virtualenv'):
+ return
+
+ if not self.check_pkg('python-virtualenv'):
+ self.yum_install('python-virtualenv', check_exit_code=False)
+
+ super(Fedora, self).install_virtualenv()
+
+ def post_process(self):
+ """Workaround for a bug in eventlet.
+
+ This currently affects RHEL6.1, but the fix can safely be
+ applied to all RHEL and Fedora distributions.
+
+ This can be removed when the fix is applied upstream.
+
+ Nova: https://bugs.launchpad.net/nova/+bug/884915
+ Upstream: https://bitbucket.org/which_linden/eventlet/issue/89
+ """
+
+ # Install "patch" program if it's not there
+ if not self.check_pkg('patch'):
+ self.yum_install('patch')
+
+ # Apply the eventlet patch
+ self.apply_patch(os.path.join(self.venv, 'lib', self.py_version,
+ 'site-packages',
+ 'eventlet/green/subprocess.py'),
+ 'contrib/redhat-eventlet.patch')
diff --git a/conductor/tools/pip-requires b/conductor/tools/pip-requires
index a7bcbfe9..ac910b04 100644
--- a/conductor/tools/pip-requires
+++ b/conductor/tools/pip-requires
@@ -1,3 +1,10 @@
-pika
-tornado
-jsonpath
\ No newline at end of file
+anyjson
+eventlet>=0.9.12
+jsonpath
+puka
+Paste
+PasteDeploy
+iso8601>=0.1.4
+python-heatclient
+
+http://tarballs.openstack.org/oslo-config/oslo-config-2013.1b4.tar.gz#egg=oslo-config
diff --git a/dashboard/ReadMe.txt b/dashboard/ReadMe.txt
deleted file mode 100644
index 45d191b7..00000000
--- a/dashboard/ReadMe.txt
+++ /dev/null
@@ -1,31 +0,0 @@
-# TO DO:
-# 1. Add new functional for services and data centers
-# 2. Fix issue with list of services: services table shoudl show services for
-# specific data center
-
-This file is described how to install new tab on horizon dashboard.
-We should do the following:
- 1. Copy directory 'windc' to directory '/opt/stack/horizon/openstack_dashboard/dashboards/project'
- 2. Copy api/windc.py to directory '/opt/stack/horizon/openstack_dashboard/api'
- 3. Copy directory 'windcclient' to directory '/opt/stack/horizon/'
- 4. Edit file '/opt/stack/horizon/openstack_dashboard/dashboards/project/dashboard.py'
- Add line with windc project:
-
- ...
-class BasePanels(horizon.PanelGroup):
- slug = "compute"
- name = _("Manage Compute")
- panels = ('overview',
- 'instances',
- 'volumes',
- 'images_and_snapshots',
- 'access_and_security',
- 'networks',
- 'routers',
- 'windc')
-
- ...
-
- 5. Run the test Django server:
- cd /opt/stack/horizon
- python manage.py runserver 67.207.197.36:8080
\ No newline at end of file
diff --git a/dashboard/api/windc.py b/dashboard/api/windc.py
deleted file mode 100644
index 0739722d..00000000
--- a/dashboard/api/windc.py
+++ /dev/null
@@ -1,126 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2012 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-#
-# Copyright 2012 Nebula, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import logging
-import urlparse
-
-from django.utils.decorators import available_attrs
-from portasclient.v1.client import Client as windc_client
-
-LOG = logging.getLogger(__name__)
-
-
-def windcclient(request):
- url = "http://127.0.0.1:8082"
- LOG.debug('windcclient connection created using token "%s" and url "%s"'
- % (request.user.token, url))
- return windc_client(endpoint=url, token=request.user.token.token['id'])
-
-
-def datacenters_create(request, parameters):
- name = parameters.get('name', '')
- return windcclient(request).environments.create(name)
-
-
-def datacenters_delete(request, datacenter_id):
- return windcclient(request).environments.delete(datacenter_id)
-
-
-def datacenters_get(request, datacenter_id):
- return windcclient(request).environments.get(datacenter_id)
-
-
-def datacenters_list(request):
- return windcclient(request).environments.list()
-
-
-def datacenters_deploy(request, datacenter_id):
- sessions = windcclient(request).sessions.list(datacenter_id)
- for session in sessions:
- if session.state == 'open':
- session_id = session.id
- if not session_id:
- return "Sorry, nothing to deploy."
- return windcclient(request).sessions.deploy(datacenter_id, session_id)
-
-
-def services_create(request, datacenter, parameters):
- session_id = windcclient(request).sessions.list(datacenter)[0].id
- if parameters['service_type'] == 'Active Directory':
- res = windcclient(request).activeDirectories.create(datacenter,
- session_id,
- parameters)
- else:
- res = windcclient(request).webServers.create(datacenter,
- session_id,
- parameters)
-
- return res
-
-
-def services_list(request, datacenter_id):
- session_id = None
- sessions = windcclient(request).sessions.list(datacenter_id)
- LOG.critical('DC ID: ' + str(datacenter_id))
-
- for s in sessions:
- if s.state in ['open', 'deploying']:
- session_id = s.id
-
- if session_id is None:
- session_id = windcclient(request).sessions.configure(datacenter_id).id
-
- services = windcclient(request).activeDirectories.list(datacenter_id,
- session_id)
- services += windcclient(request).webServers.list(datacenter_id, session_id)
-
- return services
-
-
-def services_get(request, datacenter_id, service_id):
- services = services_list(request, datacenter_id)
-
- for service in services:
- if service.id is service_id:
- return service
-
-
-def services_delete(request, datacenter_id, service_id):
- services = services_list(request, datacenter_id)
-
- session_id = None
- sessions = windcclient(request).sessions.list(datacenter_id)
- for session in sessions:
- if session.state == 'open':
- session_id = session.id
-
- if session_id is None:
- raise Exception("Sorry, you can not delete this service now.")
-
- for service in services:
- if service.id is service_id:
- if service.type is 'Active Directory':
- windcclient(request).activeDirectories.delete(datacenter_id,
- session_id,
- service_id)
- elif service.type is 'IIS':
- windcclient(request).webServers.delete(datacenter_id,
- session_id,
- service_id)
diff --git a/dashboard/windc/templates/windc/_services.html b/dashboard/windc/templates/windc/_services.html
deleted file mode 100644
index 18695084..00000000
--- a/dashboard/windc/templates/windc/_services.html
+++ /dev/null
@@ -1,3 +0,0 @@
-{% load i18n sizeformat %}
-
-
{% trans "Services" %}
\ No newline at end of file
diff --git a/dashboard/windcclient/common/base.py b/dashboard/windcclient/common/base.py
deleted file mode 100644
index 9f035044..00000000
--- a/dashboard/windcclient/common/base.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# Copyright 2012 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-"""
-Base utilities to build API operation managers and objects on top of.
-"""
-
-
-def getid(obj):
- """
- Abstracts the common pattern of allowing both an object or an object's ID
- (UUID) as a parameter when dealing with relationships.
- """
- try:
- return obj.id
- except AttributeError:
- return obj
-
-
-class Manager(object):
- """
- Managers interact with a particular type of API and provide CRUD
- operations for them.
- """
- resource_class = None
-
- def __init__(self, api):
- self.api = api
-
- def _list(self, url, response_key, obj_class=None, body=None):
- resp, body = self.api.client.json_request('GET', url, body=body)
-
- if obj_class is None:
- obj_class = self.resource_class
-
- data = body[response_key]
- return [obj_class(self, res, loaded=True) for res in data if res]
-
- def _delete(self, url):
- self.api.client.raw_request('DELETE', url)
-
- def _update(self, url, body, response_key=None):
- resp, body = self.api.client.json_request('PUT', url, body=body)
- # PUT requests may not return a body
- if body:
- return self.resource_class(self, body[response_key])
-
- def _create(self, url, body, response_key, return_raw=False):
- resp, body = self.api.client.json_request('POST', url, body=body)
- if return_raw:
- return body[response_key]
- return self.resource_class(self, body[response_key])
-
- def _get(self, url, response_key, return_raw=False):
- resp, body = self.api.client.json_request('GET', url)
- if return_raw:
- return body[response_key]
- return self.resource_class(self, body[response_key])
-
-
-class Resource(object):
- """
- A resource represents a particular instance of an object (tenant, user,
- etc). This is pretty much just a bag for attributes.
-
- :param manager: Manager object
- :param info: dictionary representing resource attributes
- :param loaded: prevent lazy-loading if set to True
- """
- def __init__(self, manager, info, loaded=False):
- self.manager = manager
- self._info = info
- self._add_details(info)
- self._loaded = loaded
-
- def _add_details(self, info):
- for (k, v) in info.iteritems():
- setattr(self, k, v)
-
- def __getattr__(self, k):
- if k not in self.__dict__:
- #NOTE(bcwaldon): disallow lazy-loading if already loaded once
- if not self.is_loaded():
- self.get()
- return self.__getattr__(k)
-
- raise AttributeError(k)
- else:
- return self.__dict__[k]
-
- def __repr__(self):
- reprkeys = sorted(k for k in self.__dict__.keys() if k[0] != '_' and
- k != 'manager')
- info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
- return "<%s %s>" % (self.__class__.__name__, info)
-
- def get_info(self):
- if not self.is_loaded():
- self.get()
- if self._info:
- return self._info.copy()
- return {}
-
- def get(self):
- # set_loaded() first ... so if we have to bail, we know we tried.
- self.set_loaded(True)
- if not hasattr(self.manager, 'get'):
- return
-
- new = self.manager.get(self.id)
- if new:
- self._info = new._info
- self._add_details(new._info)
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return False
- if hasattr(self, 'id') and hasattr(other, 'id'):
- return self.id == other.id
- return self._info == other._info
-
- def is_loaded(self):
- return self._loaded
-
- def set_loaded(self, val):
- self._loaded = val
diff --git a/dashboard/windcclient/common/client.py b/dashboard/windcclient/common/client.py
deleted file mode 100644
index 1ad4ed84..00000000
--- a/dashboard/windcclient/common/client.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# Copyright 2012 OpenStack LLC.
-# All Rights Reserved
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-"""
-OpenStack Client interface. Handles the REST calls and responses.
-"""
-
-import httplib2
-import copy
-import logging
-import json
-
-from . import exceptions
-from . import utils
-from .service_catalog import ServiceCatalog
-
-
-logger = logging.getLogger(__name__)
-
-
-class HTTPClient(httplib2.Http):
-
- USER_AGENT = 'python-windcclient'
-
- def __init__(self, endpoint=None, token=None, username=None,
- password=None, tenant_name=None, tenant_id=None,
- region_name=None, auth_url=None, auth_tenant_id=None,
- timeout=600, insecure=False):
- super(HTTPClient, self).__init__(timeout=timeout)
- self.endpoint = endpoint
- self.auth_token = token
- self.auth_url = auth_url
- self.auth_tenant_id = auth_tenant_id
- self.username = username
- self.password = password
- self.tenant_name = tenant_name
- self.tenant_id = tenant_id
- self.region_name = region_name
- self.force_exception_to_status_code = True
- self.disable_ssl_certificate_validation = insecure
- if self.endpoint is None:
- self.authenticate()
-
- def _http_request(self, url, method, **kwargs):
- """ Send an http request with the specified characteristics.
- """
-
- kwargs['headers'] = copy.deepcopy(kwargs.get('headers', {}))
- kwargs['headers'].setdefault('User-Agent', self.USER_AGENT)
- if self.auth_token:
- kwargs['headers'].setdefault('X-Auth-Token', self.auth_token)
-
- resp, body = super(HTTPClient, self).request(url, method, **kwargs)
-
- if logger.isEnabledFor(logging.DEBUG):
- utils.http_log(logger, (url, method,), kwargs, resp, body)
-
- if resp.status in (301, 302, 305):
- return self._http_request(resp['location'], method, **kwargs)
-
- return resp, body
-
- def _json_request(self, method, url, **kwargs):
- """ Wrapper around _http_request to handle setting headers,
- JSON enconding/decoding and error handling.
- """
-
- kwargs.setdefault('headers', {})
- kwargs['headers'].setdefault('Content-Type', 'application/json')
-
- if 'body' in kwargs and kwargs['body'] is not None:
- kwargs['body'] = json.dumps(kwargs['body'])
-
- resp, body = self._http_request(url, method, **kwargs)
-
- if body:
- try:
- body = json.loads(body)
- except ValueError:
- logger.debug("Could not decode JSON from body: %s" % body)
- else:
- logger.debug("No body was returned.")
- body = None
-
- if 400 <= resp.status < 600:
- # DELETE THIS STRING
- logger.exception(url)
- raise exceptions.from_response(resp, body)
-
- return resp, body
-
- def raw_request(self, method, url, **kwargs):
- url = self.endpoint + url
-
- kwargs.setdefault('headers', {})
- kwargs['headers'].setdefault('Content-Type',
- 'application/octet-stream')
-
- resp, body = self._http_request(url, method, **kwargs)
-
- if 400 <= resp.status < 600:
- logger.exception(url)
- raise exceptions.from_response(resp, body)
-
- return resp, body
-
- def json_request(self, method, url, **kwargs):
- url = self.endpoint + url
- resp, body = self._json_request(method, url, **kwargs)
- return resp, body
-
- def authenticate(self):
- token_url = self.auth_url + "/tokens"
- body = {'auth': {'passwordCredentials': {'username': self.username,
- 'password': self.password}}}
- if self.tenant_id:
- body['auth']['tenantId'] = self.tenant_id
- elif self.tenant_name:
- body['auth']['tenantName'] = self.tenant_name
-
- tmp_follow_all_redirects = self.follow_all_redirects
- self.follow_all_redirects = True
- try:
- resp, body = self._json_request('POST', token_url, body=body)
- finally:
- self.follow_all_redirects = tmp_follow_all_redirects
-
- try:
- self.service_catalog = ServiceCatalog(body['access'])
- token = self.service_catalog.get_token()
- self.auth_token = token['id']
- self.auth_tenant_id = token['tenant_id']
- except KeyError:
- logger.exception("Parse service catalog failed.")
- raise exceptions.AuthorizationFailure()
-
- self.endpoint = self.service_catalog.url_for(attr='region',
- filter_value=self.region_name)
diff --git a/dashboard/windcclient/common/exceptions.py b/dashboard/windcclient/common/exceptions.py
deleted file mode 100644
index 4d17b8db..00000000
--- a/dashboard/windcclient/common/exceptions.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# Copyright 2010 Jacob Kaplan-Moss
-"""
-Exception definitions.
-"""
-
-
-class UnsupportedVersion(Exception):
- """Indicates that the user is trying to use an unsupported
- version of the API"""
- pass
-
-
-class CommandError(Exception):
- pass
-
-
-class AuthorizationFailure(Exception):
- pass
-
-
-class NoUniqueMatch(Exception):
- pass
-
-
-class NoTokenLookupException(Exception):
- """This form of authentication does not support looking up
- endpoints from an existing token."""
- pass
-
-
-class EndpointNotFound(Exception):
- """Could not find Service or Region in Service Catalog."""
- pass
-
-
-class AmbiguousEndpoints(Exception):
- """Found more than one matching endpoint in Service Catalog."""
- def __init__(self, endpoints=None):
- self.endpoints = endpoints
-
- def __str__(self):
- return "AmbiguousEndpoints: %s" % repr(self.endpoints)
-
-
-class ClientException(Exception):
- """
- The base exception class for all exceptions this library raises.
- """
- def __init__(self, code, message=None, details=None):
- self.code = code
- self.message = message or self.__class__.message
- self.details = details
-
- def __str__(self):
- return "%s (HTTP %s)" % (self.message, self.code)
-
-
-class BadRequest(ClientException):
- """
- HTTP 400 - Bad request: you sent some malformed data.
- """
- http_status = 400
- message = "Bad request"
-
-
-class Unauthorized(ClientException):
- """
- HTTP 401 - Unauthorized: bad credentials.
- """
- http_status = 401
- message = "Unauthorized"
-
-
-class Forbidden(ClientException):
- """
- HTTP 403 - Forbidden: your credentials don't give you access to this
- resource.
- """
- http_status = 403
- message = "Forbidden"
-
-
-class NotFound(ClientException):
- """
- HTTP 404 - Not found
- """
- http_status = 404
- message = "Not found"
-
-
-class OverLimit(ClientException):
- """
- HTTP 413 - Over limit: you're over the API limits for this time period.
- """
- http_status = 413
- message = "Over limit"
-
-
-# NotImplemented is a python keyword.
-class HTTPNotImplemented(ClientException):
- """
- HTTP 501 - Not Implemented: the server does not support this operation.
- """
- http_status = 501
- message = "Not Implemented"
-
-
-# In Python 2.4 Exception is old-style and thus doesn't have a __subclasses__()
-# so we can do this:
-# _code_map = dict((c.http_status, c)
-# for c in ClientException.__subclasses__())
-#
-# Instead, we have to hardcode it:
-_code_map = dict((c.http_status, c) for c in [BadRequest, Unauthorized,
- Forbidden, NotFound, OverLimit, HTTPNotImplemented])
-
-
-def from_response(response, body):
- """
- Return an instance of an ClientException or subclass
- based on an httplib2 response.
-
- Usage::
-
- resp, body = http.request(...)
- if resp.status != 200:
- raise exception_from_response(resp, body)
- """
- cls = _code_map.get(response.status, ClientException)
- if body:
- if hasattr(body, 'keys'):
- error = body[body.keys()[0]]
- message = error.get('message', None)
- details = error.get('details', None)
- else:
- message = 'n/a'
- details = body
- return cls(code=response.status, message=message, details=details)
- else:
- return cls(code=response.status)
diff --git a/dashboard/windcclient/common/service_catalog.py b/dashboard/windcclient/common/service_catalog.py
deleted file mode 100644
index d2a91d67..00000000
--- a/dashboard/windcclient/common/service_catalog.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright 2011 OpenStack LLC.
-# Copyright 2011, Piston Cloud Computing, Inc.
-# Copyright 2011 Nebula, Inc.
-#
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from . import exceptions
-
-
-class ServiceCatalog(object):
- """Helper methods for dealing with a Keystone Service Catalog."""
-
- def __init__(self, resource_dict):
- self.catalog = resource_dict
-
- def get_token(self):
- """Fetch token details fron service catalog"""
- token = {'id': self.catalog['token']['id'],
- 'expires': self.catalog['token']['expires']}
- try:
- token['user_id'] = self.catalog['user']['id']
- token['tenant_id'] = self.catalog['token']['tenant']['id']
- except:
- # just leave the tenant and user out if it doesn't exist
- pass
- return token
-
- def url_for(self, attr=None, filter_value=None,
- service_type='loadbalancer', endpoint_type='publicURL'):
- """Fetch an endpoint from the service catalog.
-
- Fetch the specified endpoint from the service catalog for
- a particular endpoint attribute. If no attribute is given, return
- the first endpoint of the specified type.
-
- See tests for a sample service catalog.
- """
- catalog = self.catalog.get('serviceCatalog', [])
-
- for service in catalog:
- if service['type'] != service_type:
- continue
-
- endpoints = service['endpoints']
- for endpoint in endpoints:
- if not filter_value or endpoint.get(attr) == filter_value:
- return endpoint[endpoint_type]
-
- raise exceptions.EndpointNotFound('Endpoint not found.')
diff --git a/dashboard/windcclient/common/utils.py b/dashboard/windcclient/common/utils.py
deleted file mode 100644
index cabcba88..00000000
--- a/dashboard/windcclient/common/utils.py
+++ /dev/null
@@ -1,291 +0,0 @@
-import os
-import re
-import sys
-import uuid
-import logging
-import prettytable
-
-from . import exceptions
-
-
-def arg(*args, **kwargs):
- """Decorator for CLI args."""
- def _decorator(func):
- add_arg(func, *args, **kwargs)
- return func
- return _decorator
-
-
-def env(*vars, **kwargs):
- """
- returns the first environment variable set
- if none are non-empty, defaults to '' or keyword arg default
- """
- for v in vars:
- value = os.environ.get(v, None)
- if value:
- return value
- return kwargs.get('default', '')
-
-
-def add_arg(f, *args, **kwargs):
- """Bind CLI arguments to a shell.py `do_foo` function."""
-
- if not hasattr(f, 'arguments'):
- f.arguments = []
-
- # NOTE(sirp): avoid dups that can occur when the module is shared across
- # tests.
- if (args, kwargs) not in f.arguments:
- # Because of the sematics of decorator composition if we just append
- # to the options list positional options will appear to be backwards.
- f.arguments.insert(0, (args, kwargs))
-
-
-def add_resource_manager_extra_kwargs_hook(f, hook):
- """Adds hook to bind CLI arguments to ResourceManager calls.
-
- The `do_foo` calls in shell.py will receive CLI args and then in turn pass
- them through to the ResourceManager. Before passing through the args, the
- hooks registered here will be called, giving us a chance to add extra
- kwargs (taken from the command-line) to what's passed to the
- ResourceManager.
- """
- if not hasattr(f, 'resource_manager_kwargs_hooks'):
- f.resource_manager_kwargs_hooks = []
-
- names = [h.__name__ for h in f.resource_manager_kwargs_hooks]
- if hook.__name__ not in names:
- f.resource_manager_kwargs_hooks.append(hook)
-
-
-def get_resource_manager_extra_kwargs(f, args, allow_conflicts=False):
- """Return extra_kwargs by calling resource manager kwargs hooks."""
- hooks = getattr(f, "resource_manager_kwargs_hooks", [])
- extra_kwargs = {}
- for hook in hooks:
- hook_name = hook.__name__
- hook_kwargs = hook(args)
-
- conflicting_keys = set(hook_kwargs.keys()) & set(extra_kwargs.keys())
- if conflicting_keys and not allow_conflicts:
- raise Exception("Hook '%(hook_name)s' is attempting to redefine"
- " attributes '%(conflicting_keys)s'" % locals())
-
- extra_kwargs.update(hook_kwargs)
-
- return extra_kwargs
-
-
-def unauthenticated(f):
- """
- Adds 'unauthenticated' attribute to decorated function.
- Usage:
- @unauthenticated
- def mymethod(f):
- ...
- """
- f.unauthenticated = True
- return f
-
-
-def isunauthenticated(f):
- """
- Checks to see if the function is marked as not requiring authentication
- with the @unauthenticated decorator. Returns True if decorator is
- set to True, False otherwise.
- """
- return getattr(f, 'unauthenticated', False)
-
-
-def service_type(stype):
- """
- Adds 'service_type' attribute to decorated function.
- Usage:
- @service_type('volume')
- def mymethod(f):
- ...
- """
- def inner(f):
- f.service_type = stype
- return f
- return inner
-
-
-def get_service_type(f):
- """
- Retrieves service type from function
- """
- return getattr(f, 'service_type', None)
-
-
-def pretty_choice_list(l):
- return ', '.join("'%s'" % i for i in l)
-
-
-def print_list(objs, fields, formatters={}, sortby_index=0):
- if sortby_index == None:
- sortby = None
- else:
- sortby = fields[sortby_index]
-
- pt = prettytable.PrettyTable([f for f in fields], caching=False)
- pt.align = 'l'
-
- for o in objs:
- row = []
- for field in fields:
- if field in formatters:
- row.append(formatters[field](o))
- else:
- field_name = field.lower().replace(' ', '_')
- data = getattr(o, field_name, '')
- row.append(data)
- pt.add_row(row)
-
- print pt.get_string(sortby=sortby)
-
-
-def print_flat_list(lst, field):
- pt = prettytable.PrettyTable(field)
- for el in lst:
- pt.add_row([el])
- print pt.get_string()
-
-
-def print_dict(d, property="Property"):
- pt = prettytable.PrettyTable([property, 'Value'], caching=False)
- pt.align = 'l'
- [pt.add_row(list(r)) for r in d.iteritems()]
- print pt.get_string(sortby=property)
-
-
-def find_resource(manager, name_or_id):
- """Helper for the _find_* methods."""
- # first try to get entity as integer id
- try:
- if isinstance(name_or_id, int) or name_or_id.isdigit():
- return manager.get(int(name_or_id))
- except exceptions.NotFound:
- pass
-
- # now try to get entity as uuid
- try:
- uuid.UUID(str(name_or_id))
- return manager.get(name_or_id)
- except (ValueError, exceptions.NotFound):
- pass
-
- try:
- try:
- return manager.find(human_id=name_or_id)
- except exceptions.NotFound:
- pass
-
- # finally try to find entity by name
- try:
- return manager.find(name=name_or_id)
- except exceptions.NotFound:
- try:
- # Volumes does not have name, but display_name
- return manager.find(display_name=name_or_id)
- except exceptions.NotFound:
- msg = "No %s with a name or ID of '%s' exists." % \
- (manager.resource_class.__name__.lower(), name_or_id)
- raise exceptions.CommandError(msg)
- except exceptions.NoUniqueMatch:
- msg = ("Multiple %s matches found for '%s', use an ID to be more"
- " specific." % (manager.resource_class.__name__.lower(),
- name_or_id))
- raise exceptions.CommandError(msg)
-
-
-def _format_servers_list_networks(server):
- output = []
- for (network, addresses) in server.networks.items():
- if len(addresses) == 0:
- continue
- addresses_csv = ', '.join(addresses)
- group = "%s=%s" % (network, addresses_csv)
- output.append(group)
-
- return '; '.join(output)
-
-
-class HookableMixin(object):
- """Mixin so classes can register and run hooks."""
- _hooks_map = {}
-
- @classmethod
- def add_hook(cls, hook_type, hook_func):
- if hook_type not in cls._hooks_map:
- cls._hooks_map[hook_type] = []
-
- cls._hooks_map[hook_type].append(hook_func)
-
- @classmethod
- def run_hooks(cls, hook_type, *args, **kwargs):
- hook_funcs = cls._hooks_map.get(hook_type) or []
- for hook_func in hook_funcs:
- hook_func(*args, **kwargs)
-
-
-def safe_issubclass(*args):
- """Like issubclass, but will just return False if not a class."""
-
- try:
- if issubclass(*args):
- return True
- except TypeError:
- pass
-
- return False
-
-
-def import_class(import_str):
- """Returns a class from a string including module and class."""
- mod_str, _sep, class_str = import_str.rpartition('.')
- __import__(mod_str)
- return getattr(sys.modules[mod_str], class_str)
-
-_slugify_strip_re = re.compile(r'[^\w\s-]')
-_slugify_hyphenate_re = re.compile(r'[-\s]+')
-
-
-# http://code.activestate.com/recipes/
-# 577257-slugify-make-a-string-usable-in-a-url-or-filename/
-def slugify(value):
- """
- Normalizes string, converts to lowercase, removes non-alpha characters,
- and converts spaces to hyphens.
-
- From Django's "django/template/defaultfilters.py".
- """
- import unicodedata
- if not isinstance(value, unicode):
- value = unicode(value)
- value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
- value = unicode(_slugify_strip_re.sub('', value).strip().lower())
- return _slugify_hyphenate_re.sub('-', value)
-
-
-def http_log(logger, args, kwargs, resp, body):
-# if not logger.isEnabledFor(logging.DEBUG):
-# return
-
- string_parts = ['curl -i']
- for element in args:
- if element in ('GET', 'POST'):
- string_parts.append(' -X %s' % element)
- else:
- string_parts.append(' %s' % element)
-
- for element in kwargs['headers']:
- header = ' -H "%s: %s"' % (element, kwargs['headers'][element])
- string_parts.append(header)
-
- logger.debug("REQ: %s\n" % "".join(string_parts))
- if 'body' in kwargs and kwargs['body']:
- logger.debug("REQ BODY: %s\n" % (kwargs['body']))
- logger.debug("RESP:%s\n", resp)
- logger.debug("RESP BODY:%s\n", body)
diff --git a/dashboard/windcclient/shell.py b/dashboard/windcclient/shell.py
deleted file mode 100644
index 196c7a7b..00000000
--- a/dashboard/windcclient/shell.py
+++ /dev/null
@@ -1,285 +0,0 @@
-# Copyright 2010 Jacob Kaplan-Moss
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Command-line interface to the OpenStack LBaaS API.
-"""
-
-import argparse
-import httplib2
-import os
-import sys
-import logging
-
-from balancerclient.common import exceptions as exc
-from balancerclient.common import utils
-from balancerclient.v1 import shell as shell_v1
-
-
-LOG = logging.getLogger(__name__)
-
-
-class OpenStackBalancerShell(object):
-
- def get_base_parser(self):
- parser = argparse.ArgumentParser(
- prog='balancer',
- description=__doc__.strip(),
- epilog='See "balancer help COMMAND" '
- 'for help on a specific command.',
- add_help=False,
- formatter_class=OpenStackHelpFormatter,
- )
-
- # Global arguments
- parser.add_argument('-h',
- '--help',
- action='store_true',
- help=argparse.SUPPRESS)
-
- parser.add_argument('--debug',
- default=False,
- action='store_true',
- help=argparse.SUPPRESS)
-
- parser.add_argument('--os_username',
- metavar='',
- default=utils.env('OS_USERNAME'),
- help='Defaults to env[OS_USERNAME]')
-
- parser.add_argument('--os_password',
- metavar='',
- default=utils.env('OS_PASSWORD'),
- help='Defaults to env[OS_PASSWORD]')
-
- parser.add_argument('--os_tenant_name',
- metavar='',
- default=utils.env('OS_TENANT_NAME'),
- help='Defaults to env[OS_TENANT_NAME]')
-
- parser.add_argument('--os_tenant_id',
- metavar='',
- default=utils.env('OS_TENANT_ID'),
- help='Defaults to env[OS_TENANT_ID]')
-
- parser.add_argument('--os_auth_url',
- metavar='',
- default=utils.env('OS_AUTH_URL'),
- help='Defaults to env[OS_AUTH_URL]')
-
- parser.add_argument('--os_region_name',
- metavar='',
- default=utils.env('OS_REGION_NAME'),
- help='Defaults to env[OS_REGION_NAME]')
-
- parser.add_argument('--os_balancer_api_version',
- metavar='',
- default=utils.env('OS_BALANCER_API_VERSION',
- 'KEYSTONE_VERSION'),
- help='Defaults to env[OS_BALANCER_API_VERSION]'
- ' or 2.0')
-
- parser.add_argument('--token',
- metavar='',
- default=utils.env('SERVICE_TOKEN'),
- help='Defaults to env[SERVICE_TOKEN]')
-
- parser.add_argument('--endpoint',
- metavar='',
- default=utils.env('SERVICE_ENDPOINT'),
- help='Defaults to env[SERVICE_ENDPOINT]')
-
- return parser
-
- def get_subcommand_parser(self, version):
- parser = self.get_base_parser()
-
- self.subcommands = {}
- subparsers = parser.add_subparsers(metavar='')
-
- try:
- actions_module = {
- '1': shell_v1,
- }[version]
- except KeyError:
- actions_module = shell_v1
-
- self._find_actions(subparsers, actions_module)
- self._find_actions(subparsers, self)
-
- return parser
-
- def _find_actions(self, subparsers, actions_module):
- for attr in (a for a in dir(actions_module) if a.startswith('do_')):
- # I prefer to be hypen-separated instead of underscores.
- command = attr[3:].replace('_', '-')
- callback = getattr(actions_module, attr)
- desc = callback.__doc__ or ''
- help = desc.strip().split('\n')[0]
- arguments = getattr(callback, 'arguments', [])
-
- subparser = subparsers.add_parser(
- command,
- help=help,
- description=desc,
- add_help=False,
- formatter_class=OpenStackHelpFormatter)
- subparser.add_argument('-h', '--help', action='help',
- help=argparse.SUPPRESS)
- self.subcommands[command] = subparser
- for (args, kwargs) in arguments:
- subparser.add_argument(*args, **kwargs)
- subparser.set_defaults(func=callback)
-
- def main(self, argv):
- # Parse args once to find version
- parser = self.get_base_parser()
- (options, args) = parser.parse_known_args(argv)
-
- # build available subcommands based on version
- api_version = options.os_balancer_api_version
- subcommand_parser = self.get_subcommand_parser(api_version)
- self.parser = subcommand_parser
-
- # Handle top-level --help/-h before attempting to parse
- # a command off the command line
- if not argv or options.help:
- self.do_help(options)
- return 0
-
- # Parse args again and call whatever callback was selected
- args = subcommand_parser.parse_args(argv)
-
- # Deal with global arguments
- if args.debug:
- httplib2.debuglevel = 1
-
- # Short-circuit and deal with help command right away.
- if args.func == self.do_help:
- self.do_help(args)
- return 0
-
- #FIXME(usrleon): Here should be restrict for project id same as
- # for username or apikey but for compatibility it is not.
-
- if not utils.isunauthenticated(args.func):
- # if the user hasn't provided any auth data
- if not (args.token or args.endpoint or args.os_username or
- args.os_password or args.os_auth_url):
- raise exc.CommandError('Expecting authentication method via \n'
- ' either a service token, '
- '--token or env[SERVICE_TOKEN], \n'
- ' or credentials, '
- '--os_username or env[OS_USERNAME].')
-
- # if it looks like the user wants to provide a service token
- # but is missing something
- if args.token or args.endpoint and not (
- args.token and args.endpoint):
- if not args.token:
- raise exc.CommandError(
- 'Expecting a token provided via either --token or '
- 'env[SERVICE_TOKEN]')
-
- if not args.endpoint:
- raise exc.CommandError(
- 'Expecting an endpoint provided via either --endpoint '
- 'or env[SERVICE_ENDPOINT]')
-
- # if it looks like the user wants to provide a credentials
- # but is missing something
- if ((args.os_username or args.os_password or args.os_auth_url)
- and not (args.os_username and args.os_password and
- args.os_auth_url)):
- if not args.os_username:
- raise exc.CommandError(
- 'Expecting a username provided via either '
- '--os_username or env[OS_USERNAME]')
-
- if not args.os_password:
- raise exc.CommandError(
- 'Expecting a password provided via either '
- '--os_password or env[OS_PASSWORD]')
-
- if not args.os_auth_url:
- raise exc.CommandError(
- 'Expecting an auth URL via either --os_auth_url or '
- 'env[OS_AUTH_URL]')
-
- if utils.isunauthenticated(args.func):
- self.cs = shell_generic.CLIENT_CLASS(endpoint=args.os_auth_url)
- else:
- token = None
- endpoint = None
- if args.token and args.endpoint:
- token = args.token
- endpoint = args.endpoint
- api_version = options.os_balancer_api_version
- self.cs = self.get_api_class(api_version)(
- username=args.os_username,
- tenant_name=args.os_tenant_name,
- tenant_id=args.os_tenant_id,
- token=token,
- endpoint=endpoint,
- password=args.os_password,
- auth_url=args.os_auth_url,
- region_name=args.os_region_name)
-
- try:
- args.func(self.cs, args)
- except exc.Unauthorized:
- raise exc.CommandError("Invalid OpenStack LBaaS credentials.")
- except exc.AuthorizationFailure:
- raise exc.CommandError("Unable to authorize user")
-
- def get_api_class(self, version):
- try:
- return {
- "1": shell_v1.CLIENT_CLASS,
- }[version]
- except KeyError:
- return shell_v1.CLIENT_CLASS
-
- @utils.arg('command', metavar='', nargs='?',
- help='Display help for ')
- def do_help(self, args):
- """
- Display help about this program or one of its subcommands.
- """
- if getattr(args, 'command', None):
- if args.command in self.subcommands:
- self.subcommands[args.command].print_help()
- else:
- raise exc.CommandError("'%s' is not a valid subcommand" %
- args.command)
- else:
- self.parser.print_help()
-
-
-# I'm picky about my shell help.
-class OpenStackHelpFormatter(argparse.HelpFormatter):
- def start_section(self, heading):
- # Title-case the headings
- heading = '%s%s' % (heading[0].upper(), heading[1:])
- super(OpenStackHelpFormatter, self).start_section(heading)
-
-
-def main():
- try:
- return OpenStackBalancerShell().main(sys.argv[1:])
- except Exception, err:
- LOG.exception("The operation executed with an error %r." % err)
- raise
diff --git a/dashboard/windcclient/v1/client.py b/dashboard/windcclient/v1/client.py
deleted file mode 100644
index 39287737..00000000
--- a/dashboard/windcclient/v1/client.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2012 OpenStack LLC.
-# All Rights Reserved
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-from windcclient.common import client
-from . import datacenters
-from . import services
-
-
-class Client(object):
- """Client for the WinDC v1 API."""
-
- def __init__(self, **kwargs):
- self.client = client.HTTPClient(**kwargs)
- self.datacenters = datacenters.DCManager(self)
- self.services = services.DCServiceManager(self)
diff --git a/dashboard/windcclient/v1/datacenters.py b/dashboard/windcclient/v1/datacenters.py
deleted file mode 100644
index 3d184f1a..00000000
--- a/dashboard/windcclient/v1/datacenters.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2012 OpenStack LLC.
-# All Rights Reserved
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-from windcclient.common import base
-
-
-class DC(base.Resource):
-
- def __repr__(self):
- return "" % self._info
-
-
-class DCManager(base.Manager):
- resource_class = DC
-
- def list(self):
- return self._list('/datacenters', 'datacenters')
-
- def create(self, name, **extra):
- body = {'name': name, 'services': {}}
- body.update(extra)
- return self._create('/datacenters', body, 'datacenter')
-
- def delete(self, datacenter_id):
- return self._delete("/datacenters/%s" % datacenter_id)
-
- def get(self, datacenter_id):
- return self._get("/datacenters/%s" % datacenter_id,
- 'datacenter')
diff --git a/dashboard/windcclient/v1/services.py b/dashboard/windcclient/v1/services.py
deleted file mode 100644
index 809a30c9..00000000
--- a/dashboard/windcclient/v1/services.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2012 OpenStack LLC.
-# All Rights Reserved
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-from windcclient.common import base
-
-
-class DCService(base.Resource):
-
- def __repr__(self):
- return "" % self._info
-
-
-class DCServiceManager(base.Manager):
- resource_class = DCService
-
- def list(self, datacenter):
- return self._list("/datacenters/%s/services" % base.getid(datacenter),
- 'services')
-
- def create(self, datacenter, parameters):
- body = {'dc_count': 1,}
- body.update(parameters)
- return self._create("/datacenters/%s/services" % base.getid(datacenter),
- body, 'service')
-
- def delete(self, datacenter_id, service_id):
- return self._delete("/datacenters/%s/services/%s" % \
- (datacenter_id, service_id))
-
- def get(self, datacenter, service):
- return self._get("/datacenters/%s/services/%s" % \
- (base.getid(datacenter),
- base.getid(service)),
- 'service')
diff --git a/portas/etc/portas-api.conf b/portas/etc/portas-api.conf
index be932a26..a292b82e 100644
--- a/portas/etc/portas-api.conf
+++ b/portas/etc/portas-api.conf
@@ -27,7 +27,6 @@ reports_queue = task-reports
[rabbitmq]
host = localhost
port = 5672
-use_ssl = false
-userid = keero
-password = keero
virtual_host = keero
+login = keero
+password = keero
diff --git a/portas/portas/api/middleware/context.py b/portas/portas/api/middleware/context.py
index fc81c5c3..b7ff31da 100644
--- a/portas/portas/api/middleware/context.py
+++ b/portas/portas/api/middleware/context.py
@@ -72,7 +72,8 @@ class ContextMiddleware(BaseContextMiddleware):
'user': req.headers.get('X-User-Id'),
'tenant': req.headers.get('X-Tenant-Id'),
'roles': roles,
- 'auth_tok': req.headers.get('X-Auth-Token', deprecated_token),
+ 'auth_token': req.headers.get('X-Auth-Token',
+ deprecated_token),
'service_catalog': service_catalog,
'session': req.headers.get('X-Configuration-Session')
}
diff --git a/portas/portas/api/v1/__init__.py b/portas/portas/api/v1/__init__.py
index 2c08d566..062b734d 100644
--- a/portas/portas/api/v1/__init__.py
+++ b/portas/portas/api/v1/__init__.py
@@ -28,10 +28,10 @@ def get_env_status(environment_id, session_id):
if not session_id:
variants = ['open', 'deploying']
- session = unit.query(Session).filter(Session.environment_id ==
- environment_id and
- Session.state.in_(variants)
- ).first()
+ session = unit.query(Session).filter(
+ Session.environment_id == environment_id and
+ Session.state.in_(variants)
+ ).first()
if session:
session_id = session.id
else:
@@ -79,10 +79,10 @@ def get_service_status(environment_id, session_id, service):
entities = [u['id'] for u in service['units']]
reports_count = unit.query(Status).filter(
- Status.environment_id == environment_id
- and Status.session_id == session_id
- and Status.entity_id.in_(entities)
- ).count()
+ Status.environment_id == environment_id
+ and Status.session_id == session_id
+ and Status.entity_id.in_(entities)
+ ).count()
if session_state == 'deployed':
status = 'finished'
diff --git a/portas/portas/api/v1/environments.py b/portas/portas/api/v1/environments.py
index 3c7f83c2..2473d4dd 100644
--- a/portas/portas/api/v1/environments.py
+++ b/portas/portas/api/v1/environments.py
@@ -101,7 +101,7 @@ class Controller(object):
connection = amqp.Connection('{0}:{1}'.
format(rabbitmq.host, rabbitmq.port),
virtual_host=rabbitmq.virtual_host,
- userid=rabbitmq.userid,
+ userid=rabbitmq.login,
password=rabbitmq.password,
ssl=rabbitmq.use_ssl, insist=True)
channel = connection.channel()
diff --git a/portas/portas/api/v1/sessions.py b/portas/portas/api/v1/sessions.py
index 5fb30c05..8d533c2e 100644
--- a/portas/portas/api/v1/sessions.py
+++ b/portas/portas/api/v1/sessions.py
@@ -31,17 +31,16 @@ class Controller(object):
log.debug(_('Session:Configure '.format(environment_id)))
params = {'environment_id': environment_id,
- 'user_id': request.context.user,
- 'state': 'open'}
+ 'user_id': request.context.user, 'state': 'open'}
session = Session()
session.update(params)
unit = get_session()
- if unit.query(Session).filter(Session.environment_id == environment_id
- and
- Session.state.in_(['open', 'deploing'])
- ).first():
+ if unit.query(Session).filter(
+ Session.environment_id == environment_id and
+ Session.state.in_(['open', 'deploying'])
+ ).first():
log.info('There is already open session for this environment')
raise exc.HTTPConflict
@@ -55,8 +54,8 @@ class Controller(object):
return session.to_dict()
def show(self, request, environment_id, session_id):
- log.debug(_('Session:Show '.
- format(environment_id, session_id)))
+ log.debug(_('Session:Show '.format(environment_id, session_id)))
unit = get_session()
session = unit.query(Session).get(session_id)
@@ -68,8 +67,8 @@ class Controller(object):
return session.to_dict()
def delete(self, request, environment_id, session_id):
- log.debug(_('Session:Delete '.
- format(environment_id, session_id)))
+ log.debug(_('Session:Delete '.format(environment_id, session_id)))
unit = get_session()
session = unit.query(Session).get(session_id)
@@ -85,17 +84,42 @@ class Controller(object):
return None
def reports(self, request, environment_id, session_id):
- log.debug(_('Session:Reports '.
- format(environment_id, session_id)))
+ log.debug(_('Session:Reports '.format(environment_id, session_id)))
unit = get_session()
- statuses = unit.query(Status).filter_by(session_id=session_id)
+ statuses = unit.query(Status).filter_by(session_id=session_id).all()
+ result = statuses
- return {'reports': [status.to_dict() for status in statuses]}
+ if 'service_id' in request.GET:
+ service_id = request.GET['service_id']
+
+ environment = unit.query(Session).get(session_id).description
+ services = []
+ if 'services' in environment and 'activeDirectories' in\
+ environment['services']:
+ services += environment['services']['activeDirectories']
+
+ if 'services' in environment and 'webServers' in\
+ environment['services']:
+ services += environment['services']['webServers']
+
+ service = [service for service in services
+ if service['id'] == service_id][0]
+
+ if service:
+ entities = [u['id'] for u in service['units']]
+ entities.append(service_id)
+ result = []
+ for status in statuses:
+ if status.entity_id in entities:
+ result.append(status)
+
+ return {'reports': [status.to_dict() for status in result]}
def deploy(self, request, environment_id, session_id):
- log.debug(_('Session:Deploy '.
- format(environment_id, session_id)))
+ log.debug(_('Session:Deploy '.format(environment_id, session_id)))
unit = get_session()
session = unit.query(Session).get(session_id)
@@ -115,7 +139,7 @@ class Controller(object):
connection = amqp.Connection('{0}:{1}'.
format(rabbitmq.host, rabbitmq.port),
virtual_host=rabbitmq.virtual_host,
- userid=rabbitmq.userid,
+ userid=rabbitmq.login,
password=rabbitmq.password,
ssl=rabbitmq.use_ssl, insist=True)
channel = connection.channel()
diff --git a/portas/portas/api/v1/webservers.py b/portas/portas/api/v1/webservers.py
index ddc73c35..b0e23568 100644
--- a/portas/portas/api/v1/webservers.py
+++ b/portas/portas/api/v1/webservers.py
@@ -36,7 +36,7 @@ class Controller(object):
for unit in webServer['units']:
unit_count += 1
unit['id'] = uuidutils.generate_uuid()
- unit['name'] = 'iis{0}'.format(unit_count)
+ unit['name'] = webServer['name'] + '_instance_' + str(unit_count)
draft = prepare_draft(draft)
draft['services']['webServers'].append(webServer)
diff --git a/portas/portas/common/config.py b/portas/portas/common/config.py
index 58a6378a..a04dcafe 100644
--- a/portas/portas/common/config.py
+++ b/portas/portas/common/config.py
@@ -52,7 +52,7 @@ rabbit_opts = [
cfg.StrOpt('host', default='localhost'),
cfg.IntOpt('port', default=5672),
cfg.BoolOpt('use_ssl', default=False),
- cfg.StrOpt('userid', default='guest'),
+ cfg.StrOpt('login', default='guest'),
cfg.StrOpt('password', default='guest'),
cfg.StrOpt('virtual_host', default='/'),
]
diff --git a/portas/portas/common/service.py b/portas/portas/common/service.py
index 238304ee..634f1edb 100644
--- a/portas/portas/common/service.py
+++ b/portas/portas/common/service.py
@@ -30,7 +30,7 @@ class TaskResultHandlerService(service.Service):
connection = amqp.Connection('{0}:{1}'.
format(rabbitmq.host, rabbitmq.port),
virtual_host=rabbitmq.virtual_host,
- userid=rabbitmq.userid,
+ userid=rabbitmq.login,
password=rabbitmq.password,
ssl=rabbitmq.use_ssl, insist=True)
ch = connection.channel()
@@ -80,7 +80,7 @@ def handle_result(msg):
'orchestration engine:\n{0}'.format(msg.body)))
environment_result = anyjson.deserialize(msg.body)
- if environment_result['deleted']:
+ if 'deleted' in environment_result:
log.debug(_('Result for environment {0} is dropped. '
'Environment is deleted'.format(environment_result['id'])))
diff --git a/portas/portas/context.py b/portas/portas/context.py
index c541829a..9db108dc 100644
--- a/portas/portas/context.py
+++ b/portas/portas/context.py
@@ -24,10 +24,10 @@ class RequestContext(object):
accesses the system, as well as additional request information.
"""
- def __init__(self, auth_tok=None, user=None, tenant=None,
+ def __init__(self, auth_token=None, user=None, tenant=None,
roles=None, service_catalog=None, session=None):
- self.auth_tok = auth_tok
+ self.auth_token = auth_token
self.user = user
self.tenant = tenant
self.roles = roles or []
@@ -51,7 +51,7 @@ class RequestContext(object):
'project_id': self.tenant,
'roles': self.roles,
- 'auth_token': self.auth_tok,
+ 'auth_token': self.auth_token,
'session': self.session
}
diff --git a/portas/portas/tests/api/simple_test.py b/portas/portas/tests/api/simple_test.py
index c8d86a51..836eed38 100644
--- a/portas/portas/tests/api/simple_test.py
+++ b/portas/portas/tests/api/simple_test.py
@@ -1,5 +1,5 @@
import unittest
-
+import portas.api.v1 as api
class Test(unittest.TestCase):
def test(self):
diff --git a/python-portasclient/portasclient/v1/services.py b/python-portasclient/portasclient/v1/services.py
index d195c6cc..2f87be03 100644
--- a/python-portasclient/portasclient/v1/services.py
+++ b/python-portasclient/portasclient/v1/services.py
@@ -12,6 +12,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
+from mercurial import patch
from portasclient.common import base
@@ -49,10 +50,9 @@ class ActiveDirectoryManager(base.Manager):
def delete(self, environment_id, session_id, service_id):
headers = {'X-Configuration-Session': session_id}
path = 'environments/{id}/activeDirectories/{active_directory_id}'
+ path = path.format(id=environment_id, active_directory_id=service_id)
- return self._delete(path.format(id=environment_id,
- active_directory_id=service_id),
- headers=headers)
+ return self._delete(path, headers=headers)
class WebServer(base.Resource):
diff --git a/python-portasclient/portasclient/v1/sessions.py b/python-portasclient/portasclient/v1/sessions.py
index f5b32471..83bfd4d1 100644
--- a/python-portasclient/portasclient/v1/sessions.py
+++ b/python-portasclient/portasclient/v1/sessions.py
@@ -54,11 +54,13 @@ class SessionManager(base.Manager):
path.format(id=environment_id,
session_id=session_id))
- def reports(self, environment_id, session_id):
+ def reports(self, environment_id, session_id, service_id=None):
path = 'environments/{id}/sessions/{session_id}/reports'
- resp, body = self.api.json_request('GET',
- path.format(id=environment_id,
- session_id=session_id))
+ path = path.format(id=environment_id, session_id=session_id)
+ if service_id:
+ path += '?service_id={0}'.format(service_id)
+
+ resp, body = self.api.json_request('GET', path)
data = body.get('reports', [])
return [Status(self, res, loaded=True) for res in data if res]
diff --git a/python-portasclient/tests/portasclient/.project b/python-portasclient/tests/portasclient/.project
new file mode 100644
index 00000000..a099945c
--- /dev/null
+++ b/python-portasclient/tests/portasclient/.project
@@ -0,0 +1,17 @@
+
+
+ portasclient
+
+
+
+
+
+ org.python.pydev.PyDevBuilder
+
+
+
+
+
+ org.python.pydev.pythonNature
+
+
diff --git a/python-portasclient/tests/portasclient/.pydevproject b/python-portasclient/tests/portasclient/.pydevproject
new file mode 100644
index 00000000..5d866acd
--- /dev/null
+++ b/python-portasclient/tests/portasclient/.pydevproject
@@ -0,0 +1,8 @@
+
+
+
+/portasclient
+
+python 2.7
+Default
+
diff --git a/windc/openstack/oldcommon/middleware/__init__.py b/python-portasclient/tests/portasclient/__init__.py
similarity index 100%
rename from windc/openstack/oldcommon/middleware/__init__.py
rename to python-portasclient/tests/portasclient/__init__.py
diff --git a/python-portasclient/tests/portasclient/test_client_with_fake_http.py b/python-portasclient/tests/portasclient/test_client_with_fake_http.py
new file mode 100644
index 00000000..26cb6b22
--- /dev/null
+++ b/python-portasclient/tests/portasclient/test_client_with_fake_http.py
@@ -0,0 +1,194 @@
+import unittest
+import logging
+from httpretty import HTTPretty, httprettified
+from portasclient.client import Client
+
+
+LOG = logging.getLogger('Unit tests')
+
+
+class UnitTestsForClassesAndFunctions(unittest.TestCase):
+
+ @httprettified
+ def test_client_env_list_with_empty_list(self):
+ HTTPretty.register_uri(HTTPretty.GET,
+ "http://no-resolved-host:8001/environments",
+ body='{"environments": []}',
+ adding_headers={
+ 'Content-Type': 'application/json',})
+ endpoint = 'http://no-resolved-host:8001'
+ test_client = Client('1', endpoint=endpoint, token='1', timeout=10)
+
+ result = test_client.environments.list()
+ assert result == []
+
+ @httprettified
+ def test_client_env_list_with_elements(self):
+ body = ('{"environments":['
+ '{"id": "0ce373a477f211e187a55404a662f968",'
+ '"name": "dc1",'
+ '"created": "2010-11-30T03:23:42Z",'
+ '"updated": "2010-11-30T03:23:44Z",'
+ '"tenant-id": "0849006f7ce94961b3aab4e46d6f229a"},'
+ '{"id": "0ce373a477f211e187a55404a662f961",'
+ '"name": "dc2",'
+ '"created": "2010-11-30T03:23:42Z",'
+ '"updated": "2010-11-30T03:23:44Z",'
+ '"tenant-id": "0849006f7ce94961b3aab4e4626f229a"}'
+ ']}')
+ HTTPretty.register_uri(HTTPretty.GET,
+ "http://no-resolved-host:8001/environments",
+ body=body,
+ adding_headers={
+ 'Content-Type': 'application/json',})
+ endpoint = 'http://no-resolved-host:8001'
+ test_client = Client('1', endpoint=endpoint, token='1', timeout=10)
+
+ result = test_client.environments.list()
+ assert result[0].name == 'dc1'
+ assert result[-1].name == 'dc2'
+
+ @httprettified
+ def test_client_env_create(self):
+ body = ('{"id": "0ce373a477f211e187a55404a662f968",'
+ '"name": "test",'
+ '"created": "2010-11-30T03:23:42Z",'
+ '"updated": "2010-11-30T03:23:44Z",'
+ '"tenant-id": "0849006f7ce94961b3aab4e46d6f229a"}'
+ )
+ HTTPretty.register_uri(HTTPretty.POST,
+ "http://no-resolved-host:8001/environments",
+ body=body,
+ adding_headers={
+ 'Content-Type': 'application/json',})
+ endpoint = 'http://no-resolved-host:8001'
+ test_client = Client('1', endpoint=endpoint, token='1', timeout=10)
+
+ result = test_client.environments.create('test')
+ assert result.name == 'test'
+
+ @httprettified
+ def test_client_ad_list(self):
+ body = ('{"activeDirectories": [{'
+ '"id": "1",'
+ '"name": "dc1",'
+ '"created": "2010-11-30T03:23:42Z",'
+ '"updated": "2010-11-30T03:23:44Z",'
+ '"configuration": "standalone",'
+ '"units": [{'
+ '"id": "0ce373a477f211e187a55404a662f961",'
+ '"type": "master",'
+ '"location": "test"}]}]}')
+ url = ("http://no-resolved-host:8001/environments"
+ "/1/activeDirectories")
+ HTTPretty.register_uri(HTTPretty.GET, url,
+ body=body,
+ adding_headers={
+ 'Content-Type': 'application/json',})
+ endpoint = 'http://no-resolved-host:8001'
+ test_client = Client('1', endpoint=endpoint, token='1', timeout=10)
+
+ result = test_client.activeDirectories.list('1', 'test')
+ assert result[0].name == 'dc1'
+
+ @httprettified
+ def test_client_ad_create(self):
+ body = ('{'
+ '"id": "1",'
+ '"name": "ad1",'
+ '"created": "2010-11-30T03:23:42Z",'
+ '"updated": "2010-11-30T03:23:44Z",'
+ '"configuration": "standalone",'
+ '"units": [{'
+ '"id": "0ce373a477f211e187a55404a662f961",'
+ '"type": "master",'
+ '"location": "test"}]}')
+ url = ("http://no-resolved-host:8001/environments"
+ "/1/activeDirectories")
+ HTTPretty.register_uri(HTTPretty.POST, url,
+ body=body,
+ adding_headers={
+ 'Content-Type': 'application/json',})
+ endpoint = 'http://no-resolved-host:8001'
+ test_client = Client('1', endpoint=endpoint, token='1', timeout=10)
+
+ result = test_client.activeDirectories.create('1', 'test', 'ad1')
+ assert result.name == 'ad1'
+
+ @httprettified
+ def test_client_ad_list_without_elements(self):
+ body = ('{"activeDirectories": []}')
+ url = ("http://no-resolved-host:8001/environments"
+ "/1/activeDirectories")
+ HTTPretty.register_uri(HTTPretty.GET, url,
+ body=body,
+ adding_headers={
+ 'Content-Type': 'application/json',})
+ endpoint = 'http://no-resolved-host:8001'
+ test_client = Client('1', endpoint=endpoint, token='1', timeout=10)
+
+ result = test_client.activeDirectories.list('1', 'test')
+ assert result == []
+
+ @httprettified
+ def test_client_iis_list(self):
+ body = ('{"webServers": [{'
+ '"id": "1",'
+ '"name": "iis11",'
+ '"created": "2010-11-30T03:23:42Z",'
+ '"updated": "2010-11-30T03:23:44Z",'
+ '"domain": "acme",'
+ '"units": [{'
+ '"id": "0ce373a477f211e187a55404a662f961",'
+ '"endpoint": {"host": "1.1.1.1"},'
+ '"location": "test"}]}]}')
+ url = ("http://no-resolved-host:8001/environments"
+ "/1/webServers")
+ HTTPretty.register_uri(HTTPretty.GET, url,
+ body=body,
+ adding_headers={
+ 'Content-Type': 'application/json',})
+ endpoint = 'http://no-resolved-host:8001'
+ test_client = Client('1', endpoint=endpoint, token='1', timeout=10)
+
+ result = test_client.webServers.list('1', 'test')
+ assert result[0].name == 'iis11'
+
+ @httprettified
+ def test_client_iis_create(self):
+ body = ('{'
+ '"id": "1",'
+ '"name": "iis12",'
+ '"created": "2010-11-30T03:23:42Z",'
+ '"updated": "2010-11-30T03:23:44Z",'
+ '"domain": "acme",'
+ '"units": [{'
+ '"id": "0ce373a477f211e187a55404a662f961",'
+ '"endpoint": {"host": "1.1.1.1"},'
+ '"location": "test"}]}')
+ url = ("http://no-resolved-host:8001/environments"
+ "/1/webServers")
+ HTTPretty.register_uri(HTTPretty.POST, url,
+ body=body,
+ adding_headers={
+ 'Content-Type': 'application/json',})
+ endpoint = 'http://no-resolved-host:8001'
+ test_client = Client('1', endpoint=endpoint, token='1', timeout=10)
+
+ result = test_client.webServers.create('1', 'test', 'iis12')
+ assert result.name == 'iis12'
+
+ @httprettified
+ def test_client_iis_list_without_elements(self):
+ body = ('{"webServers": []}')
+ url = ("http://no-resolved-host:8001/environments"
+ "/1/webServers")
+ HTTPretty.register_uri(HTTPretty.GET, url,
+ body=body,
+ adding_headers={
+ 'Content-Type': 'application/json',})
+ endpoint = 'http://no-resolved-host:8001'
+ test_client = Client('1', endpoint=endpoint, token='1', timeout=10)
+
+ result = test_client.webServers.list('1', 'test')
+ assert result == []
\ No newline at end of file
diff --git a/python-portasclient/tests/portasclient/test_methods.py b/python-portasclient/tests/portasclient/test_methods.py
new file mode 100644
index 00000000..4ca048ce
--- /dev/null
+++ b/python-portasclient/tests/portasclient/test_methods.py
@@ -0,0 +1,467 @@
+import unittest
+import logging
+from mock import MagicMock
+
+from portasclient.client import Client
+import portasclient.v1.environments as environments
+import portasclient.v1.services as services
+import portasclient.v1.sessions as sessions
+
+def my_mock(*a, **b):
+ return [a, b]
+
+LOG = logging.getLogger('Unit tests')
+api = MagicMock(json_request=my_mock)
+
+
+class UnitTestsForClassesAndFunctions(unittest.TestCase):
+
+ def test_create_client_instance(self):
+
+ endpoint = 'http://no-resolved-host:8001'
+ test_client = Client('1', endpoint=endpoint, token='1', timeout=10)
+
+ assert test_client.environments is not None
+ assert test_client.sessions is not None
+ assert test_client.activeDirectories is not None
+ assert test_client.webServers is not None
+
+ def test_env_manager_list(self):
+ manager = environments.EnvironmentManager(api)
+ result = manager.list()
+ assert result == []
+
+ def test_env_manager_create(self):
+ manager = environments.EnvironmentManager(api)
+ result = manager.create('test')
+ assert result.body == {'name': 'test'}
+
+ def test_env_manager_create_with_named_parameters(self):
+ manager = environments.EnvironmentManager(api)
+ result = manager.create(name='test')
+ assert result.body == {'name': 'test'}
+
+ def test_env_manager_create_negative_without_parameters(self):
+ result = 'Exception'
+ manager = environments.EnvironmentManager(api)
+ try:
+ result = manager.create()
+ except TypeError:
+ pass
+ assert result is 'Exception'
+
+ def test_env_manager_delete(self):
+ manager = environments.EnvironmentManager(api)
+ result = manager.delete('test')
+ assert result is None
+
+ def test_env_manager_delete_with_named_parameters(self):
+ manager = environments.EnvironmentManager(api)
+ result = manager.delete(environment_id='1')
+ assert result is None
+
+ def test_env_manager_delete_negative_without_parameters(self):
+ result = 'Exception'
+ manager = environments.EnvironmentManager(api)
+ try:
+ result = manager.delete()
+ except TypeError:
+ pass
+ assert result is 'Exception'
+
+ def test_env_manager_update(self):
+ manager = environments.EnvironmentManager(api)
+ result = manager.update('1', 'test')
+ assert result.body == {'name': 'test'}
+
+ def test_env_manager_update_with_named_parameters(self):
+ manager = environments.EnvironmentManager(api)
+ result = manager.update(environment_id='1',
+ name='test')
+ assert result.body == {'name': 'test'}
+
+ def test_env_manager_update_negative_with_one_parameter(self):
+ result = 'Exception'
+ manager = environments.EnvironmentManager(api)
+ try:
+ result = manager.update('test')
+ except TypeError:
+ pass
+ assert result is 'Exception'
+
+ def test_env_manager_update_negative_without_parameters(self):
+ result = 'Exception'
+ manager = environments.EnvironmentManager(api)
+ try:
+ result = manager.update()
+ except TypeError:
+ pass
+ assert result is 'Exception'
+
+ def test_env_manager_get(self):
+ manager = environments.EnvironmentManager(api)
+ result = manager.get('test')
+ ## WTF?
+ assert result.manager is not None
+
+ def test_env(self):
+ environment = environments.Environment(api, api)
+ assert environment.data() is not None
+
+ def test_ad_manager_list_with_one_parameter(self):
+ manager = services.ActiveDirectoryManager(api)
+ result = manager.list('datacenter1')
+ assert result == []
+
+ def test_ad_manager_list_with_all_parameters(self):
+ manager = services.ActiveDirectoryManager(api)
+ result = manager.list('test', '1')
+ assert result == []
+
+ def test_ad_manager_list_with_named_parameters(self):
+ manager = services.ActiveDirectoryManager(api)
+ result = manager.list(environment_id='test', session_id='1')
+ assert result == []
+
+ def test_ad_manager_list_with_named_parameter(self):
+ manager = services.ActiveDirectoryManager(api)
+ result = manager.list(environment_id='test')
+ assert result == []
+
+ def test_ad_manager_list_negative_without_parameters(self):
+ result = 'Exception'
+ manager = services.ActiveDirectoryManager(api)
+ try:
+ result = manager.list()
+ except TypeError:
+ pass
+ assert result is 'Exception'
+
+ def test_ad_manager_create(self):
+ manager = services.ActiveDirectoryManager(api)
+ result = manager.create('datacenter1', 'session1', 'test')
+ assert result.headers == {'X-Configuration-Session': 'session1'}
+ assert result.body == 'test'
+
+ def test_ad_manager_create_with_named_parameters(self):
+ manager = services.ActiveDirectoryManager(api)
+ result = manager.create(environment_id='datacenter1',
+ session_id='session2',
+ active_directory='test2')
+ assert result.headers == {'X-Configuration-Session': 'session2'}
+ assert result.body == 'test2'
+
+ def test_ad_manager_create_negative_with_two_parameters(self):
+ result = 'Exception'
+ manager = services.ActiveDirectoryManager(api)
+ try:
+ result = manager.create('datacenter1', 'session1')
+ except TypeError:
+ pass
+ assert result is 'Exception'
+
+ def test_ad_manager_create_negative_with_one_parameter(self):
+ result = 'Exception'
+ manager = services.ActiveDirectoryManager(api)
+ try:
+ result = manager.create('datacenter1')
+ except TypeError:
+ pass
+ assert result is 'Exception'
+
+ def test_ad_manager_create_negative_without_parameters(self):
+ result = 'Exception'
+ manager = services.ActiveDirectoryManager(api)
+ try:
+ result = manager.create()
+ except TypeError:
+ pass
+ assert result is 'Exception'
+
+ def test_ad_manager_delete(self):
+ manager = services.ActiveDirectoryManager(api)
+ result = manager.delete('datacenter1', 'session1', 'test')
+ assert result is None
+
+ def test_ad_manager_delete_with_named_parameters(self):
+ manager = services.ActiveDirectoryManager(api)
+ result = manager.delete(environment_id='datacenter1',
+ session_id='session1',
+ service_id='test')
+ assert result is None
+
+ def test_ad_manager_delete_negative_with_two_parameters(self):
+ result = 'Exception'
+ manager = services.ActiveDirectoryManager(api)
+ try:
+ result = manager.delete('datacenter1', 'session1')
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_ad_manager_delete_negative_with_one_parameter(self):
+ result = 'Exception'
+ manager = services.ActiveDirectoryManager(api)
+ try:
+ result = manager.delete('datacenter1')
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_ad_manager_delete_negative_without_parameters(self):
+ result = 'Exception'
+ manager = services.ActiveDirectoryManager(api)
+ try:
+ result = manager.delete()
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_iis_manager_list_with_one_parameter(self):
+ manager = services.WebServerManager(api)
+ result = manager.list('datacenter1')
+ assert result == []
+
+ def test_iis_manager_list_with_named_parameter(self):
+ manager = services.WebServerManager(api)
+ result = manager.list(environment_id='datacenter1')
+ assert result == []
+
+ def test_iis_manager_list_with_all_parameters(self):
+ manager = services.WebServerManager(api)
+ result = manager.list('test', '1')
+ assert result == []
+
+ def test_iis_manager_list_with_named_parameters(self):
+ manager = services.WebServerManager(api)
+ result = manager.list(environment_id='test',
+ session_id='1')
+ assert result == []
+
+ def test_iis_manager_list_negative_without_parameters(self):
+ result = 'Exception'
+ manager = services.WebServerManager(api)
+ try:
+ result = manager.list()
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_iis_manager_create(self):
+ manager = services.WebServerManager(api)
+ result = manager.create('datacenter1', 'session1', 'test')
+ assert result.headers == {'X-Configuration-Session': 'session1'}
+ assert result.body == 'test'
+
+ def test_iis_manager_create_with_named_parameters(self):
+ manager = services.WebServerManager(api)
+ result = manager.create(environment_id='datacenter',
+ session_id='session',
+ web_server='test2')
+ assert result.headers == {'X-Configuration-Session': 'session'}
+ assert result.body == 'test2'
+
+ def test_iis_manager_create_negative_with_two_parameters(self):
+ result = 'Exception'
+ manager = services.WebServerManager(api)
+ try:
+ result = manager.create('datacenter1', 'session1')
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_iis_manager_create_negative_with_one_parameter(self):
+ result = 'Exception'
+ manager = services.WebServerManager(api)
+ try:
+ result = manager.create('datacenter1')
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_iis_manager_create_negative_without_parameters(self):
+ result = 'Exception'
+ manager = services.WebServerManager(api)
+ try:
+ result = manager.create()
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_iis_manager_delete(self):
+ manager = services.WebServerManager(api)
+ result = manager.delete('datacenter1', 'session1', 'test')
+ assert result is None
+
+ def test_iis_manager_delete_with_named_parameters(self):
+ manager = services.WebServerManager(api)
+ result = manager.delete(environment_id='datacenter',
+ session_id='session',
+ service_id='test')
+ assert result is None
+
+ def test_iis_manager_delete_negative_with_two_parameters(self):
+ result = 'Exception'
+ manager = services.WebServerManager(api)
+ try:
+ result = manager.delete('datacenter1', 'session1')
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_iis_manager_delete_negative_with_one_parameter(self):
+ result = 'Exception'
+ manager = services.WebServerManager(api)
+ try:
+ result = manager.delete('datacenter1')
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_iis_manager_delete_negative_without_parameters(self):
+ result = 'Exception'
+ manager = services.WebServerManager(api)
+ try:
+ result = manager.delete()
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_service_ad(self):
+ service_ad = services.ActiveDirectory(api, api)
+ assert service_ad.data() is not None
+
+ def test_service_iis(self):
+ service_iis = services.ActiveDirectory(api, api)
+ assert service_iis.data() is not None
+
+ def test_session_manager_list(self):
+ manager = sessions.SessionManager(api)
+ result = manager.list('datacenter1')
+ assert result == []
+
+ def test_session_manager_list_with_named_parameters(self):
+ manager = sessions.SessionManager(api)
+ result = manager.list(environment_id='datacenter1')
+ assert result == []
+
+ def test_session_manager_list_negative_without_parameters(self):
+ result = 'Exception'
+ manager = sessions.SessionManager(api)
+ try:
+ result = manager.list()
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_session_manager_delete(self):
+ manager = sessions.SessionManager(api)
+ result = manager.delete('datacenter1', 'session1')
+ assert result is None
+
+ def test_session_manager_delete_with_named_parameters(self):
+ manager = sessions.SessionManager(api)
+ result = manager.delete(environment_id='datacenter1',
+ session_id='session1')
+ assert result is None
+
+ def test_session_manager_delete_negative_with_one_parameter(self):
+ result = 'Exception'
+ manager = sessions.SessionManager(api)
+ try:
+ result = manager.delete('datacenter1')
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_session_manager_delete_negative_without_parameters(self):
+ result = 'Exception'
+ manager = sessions.SessionManager(api)
+ try:
+ result = manager.delete()
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_session_manager_get(self):
+ manager = sessions.SessionManager(api)
+ result = manager.get('datacenter1', 'session1')
+ # WTF?
+ assert result.manager is not None
+
+ def test_session_manager_configure(self):
+ manager = sessions.SessionManager(api)
+ result = manager.configure('datacenter1')
+ assert result is not None
+
+ def test_session_manager_configure_with_named_parameter(self):
+ manager = sessions.SessionManager(api)
+ result = manager.configure(environment_id='datacenter1')
+ assert result is not None
+
+ def test_session_manager_configure_negative_without_parameters(self):
+ result = 'Exception'
+ manager = sessions.SessionManager(api)
+ try:
+ result = manager.configure()
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_session_manager_deploy(self):
+ manager = sessions.SessionManager(api)
+ result = manager.deploy('datacenter1', '1')
+ assert result is None
+
+ def test_session_manager_deploy_with_named_parameters(self):
+ manager = sessions.SessionManager(api)
+ result = manager.deploy(environment_id='datacenter1',
+ session_id='1')
+ assert result is None
+
+ def test_session_manager_deploy_negative_with_one_parameter(self):
+ result = 'Exception'
+ manager = sessions.SessionManager(api)
+ try:
+ result = manager.deploy('datacenter1')
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_session_manager_deploy_negative_without_parameters(self):
+ result = 'Exception'
+ manager = sessions.SessionManager(api)
+ try:
+ result = manager.deploy()
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_session_manager_reports(self):
+ manager = sessions.SessionManager(api)
+ result = manager.reports('datacenter1', '1')
+ assert result == []
+
+ def test_session_manager_reports_with_named_parameters(self):
+ manager = sessions.SessionManager(api)
+ result = manager.reports(environment_id='datacenter1',
+ session_id='1')
+ assert result == []
+
+ def test_session_manager_reports_negative_with_one_parameter(self):
+ result = 'Exception'
+ manager = sessions.SessionManager(api)
+ try:
+ result = manager.reports('datacenter1')
+ except TypeError:
+ pass
+ assert result == 'Exception'
+
+ def test_session_manager_reports_negative_without_parameters(self):
+ result = 'Exception'
+ manager = sessions.SessionManager(api)
+ try:
+ result = manager.reports()
+ except TypeError:
+ pass
+ assert result == 'Exception'
diff --git a/python-portasclient/tests/test_sanity.py b/python-portasclient/tests/test_sanity.py
deleted file mode 100644
index 87da75da..00000000
--- a/python-portasclient/tests/test_sanity.py
+++ /dev/null
@@ -1,155 +0,0 @@
-import os
-import unittest
-import logging
-from mock import MagicMock
-from mock import patch
-
-from portasclient.client import Client as CommonClient
-from portasclient.v1 import Client
-import portasclient.v1.environments as environments
-import portasclient.v1.services as services
-import portasclient.v1.sessions as sessions
-
-import portasclient.shell as shell
-import portasclient.common.http as http
-
-
-LOG = logging.getLogger('Unit tests')
-
-
-def my_mock(*a, **b):
- return [a, b]
-
-
-api = MagicMock(json_request=my_mock)
-
-
-class SanityUnitTests(unittest.TestCase):
-
- def test_create_client_instance(self):
-
- endpoint = 'http://localhost:8001'
- test_client = Client(endpoint=endpoint, token='1', timeout=10)
-
- assert test_client.environments is not None
- assert test_client.sessions is not None
- assert test_client.activeDirectories is not None
- assert test_client.webServers is not None
-
- def test_common_client(self):
- endpoint = 'http://localhost:8001'
- test_client = CommonClient('1', endpoint=endpoint, token='1', timeout=10)
-
- assert test_client.environments is not None
- assert test_client.sessions is not None
- assert test_client.activeDirectories is not None
- assert test_client.webServers is not None
-
- def test_env_manager_list(self):
- manager = environments.EnvironmentManager(api)
- result = manager.list()
- assert result == []
-
- def test_env_manager_create(self):
- manager = environments.EnvironmentManager(api)
- result = manager.create('test')
- assert result.headers == {}
- assert result.body == {'name': 'test'}
-
- def test_env_manager_delete(self):
- manager = environments.EnvironmentManager(api)
- result = manager.delete('test')
- assert result is None
-
- def test_env_manager_update(self):
- manager = environments.EnvironmentManager(api)
- result = manager.update('1', 'test')
- assert result.body == {'name': 'test'}
-
- def test_env_manager_get(self):
- manager = environments.EnvironmentManager(api)
- result = manager.get('test')
- ## WTF?
- assert result.manager is not None
-
- def test_env(self):
- environment = environments.Environment(api, api)
- assert environment.data() is not None
-
- def test_ad_manager_list(self):
- manager = services.ActiveDirectoryManager(api)
- result = manager.list('datacenter1')
- assert result == []
- result = manager.list('datacenter1', '1')
- assert result == []
-
- def test_ad_manager_create(self):
- manager = services.ActiveDirectoryManager(api)
- result = manager.create('datacenter1', 'session1', 'test')
- assert result.headers == {'X-Configuration-Session': 'session1'}
- assert result.body == 'test'
-
- #@unittest.skip("https://mirantis.jira.com/browse/KEERO-218")
- def test_ad_manager_delete(self):
- manager = services.ActiveDirectoryManager(api)
- result = manager.delete('datacenter1', 'session1', 'test')
- assert result is None
-
- def test_iis_manager_list(self):
- manager = services.WebServerManager(api)
- result = manager.list('datacenter1')
- assert result == []
- result = manager.list('datacenter1', '1')
- assert result == []
-
- def test_iis_manager_create(self):
- manager = services.WebServerManager(api)
- result = manager.create('datacenter1', 'session1', 'test')
- assert result.headers == {'X-Configuration-Session': 'session1'}
- assert result.body == 'test'
-
- #@unittest.skip("https://mirantis.jira.com/browse/KEERO-218")
- def test_iis_manager_delete(self):
- manager = services.WebServerManager(api)
- result = manager.delete('datacenter1', 'session1', 'test')
- assert result is None
-
- def test_service_ad(self):
- service_ad = services.ActiveDirectory(api, api)
- assert service_ad.data() is not None
-
- def test_service_iis(self):
- service_iis = services.ActiveDirectory(api, api)
- assert service_iis.data() is not None
-
- def test_session_manager_list(self):
- manager = sessions.SessionManager(api)
- result = manager.list('datacenter1')
- assert result == []
-
- def test_session_manager_delete(self):
- manager = sessions.SessionManager(api)
- result = manager.delete('datacenter1', 'session1')
- assert result is None
-
- def test_session_manager_get(self):
- manager = sessions.SessionManager(api)
- result = manager.get('datacenter1', 'session1')
- # WTF?
- assert result.manager is not None
-
- def test_session_manager_configure(self):
- manager = sessions.SessionManager(api)
- result = manager.configure('datacenter1')
- assert result.headers == {}
-
- def test_session_manager_deploy(self):
- manager = sessions.SessionManager(api)
- result = manager.deploy('datacenter1', '1')
- assert result is None
-
- #@unittest.skip("https://mirantis.jira.com/browse/KEERO-219")
- def test_session_manager_reports(self):
- manager = sessions.SessionManager(api)
- result = manager.reports('datacenter1', '1')
- assert result == []
\ No newline at end of file
diff --git a/tabula/.gitignore b/tabula/.gitignore
new file mode 100644
index 00000000..61cac8c5
--- /dev/null
+++ b/tabula/.gitignore
@@ -0,0 +1,17 @@
+*.pyc
+*.swp
+.environment_version
+.selenium_log
+.coverage*
+.noseids
+.venv
+coverage.xml
+pep8.txt
+pylint.txt
+reports
+tabula/local/local_settings.py
+/static/
+docs/build/
+docs/source/sourcecode
+build
+dist
diff --git a/tabula/README.rst b/tabula/README.rst
new file mode 100644
index 00000000..b5eaa700
--- /dev/null
+++ b/tabula/README.rst
@@ -0,0 +1,34 @@
+====================================
+Horizon Customization Demo Dashboard
+====================================
+
+This Django project demonstrates how the `Horizon`_ app can be used to
+construct customized dashboards (for OpenStack or anything else).
+
+The ``horizon`` module is pulled down from GitHub during setup
+(see setup instructions below) and added to the virtual environment.
+
+.. _Horizon: http://github.com/openstack/horizon
+
+Setup Instructions
+==================
+
+The following should get you started::
+
+ $ git clone https://github.com/gabrielhurley/horizon_demo.git
+ $ cd horizon_demo
+ $ python tools/install_venv.py
+ $ cp demo_dashboard/local/local_settings.py.example demo_dashboard/local/local_settings.py
+
+Edit the ``local_settings.py`` file as needed.
+
+When you're ready to run the development server::
+
+ $ ./run_tests.sh --runserver
+
+Using Fake Test Data
+====================
+
+If you want a more interesting visualization demo, you can uncomment line
+24 of ``dashboards/visualizations/flocking/views.py`` to load fake instance
+data instead of using data from a real Nova endpoint.
\ No newline at end of file
diff --git a/tabula/bin/less/lessc b/tabula/bin/less/lessc
new file mode 100755
index 00000000..30ae3520
--- /dev/null
+++ b/tabula/bin/less/lessc
@@ -0,0 +1,139 @@
+#!/usr/bin/env node
+
+var path = require('path'),
+ fs = require('fs'),
+ sys = require('util'),
+ os = require('os');
+
+var less = require('../lib/less');
+var args = process.argv.slice(1);
+var options = {
+ compress: false,
+ yuicompress: false,
+ optimization: 1,
+ silent: false,
+ paths: [],
+ color: true,
+ strictImports: false
+};
+
+args = args.filter(function (arg) {
+ var match;
+
+ if (match = arg.match(/^-I(.+)$/)) {
+ options.paths.push(match[1]);
+ return false;
+ }
+
+ if (match = arg.match(/^--?([a-z][0-9a-z-]*)(?:=([^\s]+))?$/i)) { arg = match[1] }
+ else { return arg }
+
+ switch (arg) {
+ case 'v':
+ case 'version':
+ sys.puts("lessc " + less.version.join('.') + " (LESS Compiler) [JavaScript]");
+ process.exit(0);
+ case 'verbose':
+ options.verbose = true;
+ break;
+ case 's':
+ case 'silent':
+ options.silent = true;
+ break;
+ case 'strict-imports':
+ options.strictImports = true;
+ break;
+ case 'h':
+ case 'help':
+ sys.puts("usage: lessc source [destination]");
+ process.exit(0);
+ case 'x':
+ case 'compress':
+ options.compress = true;
+ break;
+ case 'yui-compress':
+ options.yuicompress = true;
+ break;
+ case 'no-color':
+ options.color = false;
+ break;
+ case 'include-path':
+ options.paths = match[2].split(os.type().match(/Windows/) ? ';' : ':')
+ .map(function(p) {
+ if (p) {
+ return path.resolve(process.cwd(), p);
+ }
+ });
+ break;
+ case 'O0': options.optimization = 0; break;
+ case 'O1': options.optimization = 1; break;
+ case 'O2': options.optimization = 2; break;
+ }
+});
+
+var input = args[1];
+if (input && input != '-') {
+ input = path.resolve(process.cwd(), input);
+}
+var output = args[2];
+if (output) {
+ output = path.resolve(process.cwd(), output);
+}
+
+var css, fd, tree;
+
+if (! input) {
+ sys.puts("lessc: no input files");
+ process.exit(1);
+}
+
+var parseLessFile = function (e, data) {
+ if (e) {
+ sys.puts("lessc: " + e.message);
+ process.exit(1);
+ }
+
+ new(less.Parser)({
+ paths: [path.dirname(input)].concat(options.paths),
+ optimization: options.optimization,
+ filename: input,
+ strictImports: options.strictImports
+ }).parse(data, function (err, tree) {
+ if (err) {
+ less.writeError(err, options);
+ process.exit(1);
+ } else {
+ try {
+ css = tree.toCSS({
+ compress: options.compress,
+ yuicompress: options.yuicompress
+ });
+ if (output) {
+ fd = fs.openSync(output, "w");
+ fs.writeSync(fd, css, 0, "utf8");
+ } else {
+ sys.print(css);
+ }
+ } catch (e) {
+ less.writeError(e, options);
+ process.exit(2);
+ }
+ }
+ });
+};
+
+if (input != '-') {
+ fs.readFile(input, 'utf-8', parseLessFile);
+} else {
+ process.stdin.resume();
+ process.stdin.setEncoding('utf8');
+
+ var buffer = '';
+ process.stdin.on('data', function(data) {
+ buffer += data;
+ });
+
+ process.stdin.on('end', function() {
+ parseLessFile(false, buffer);
+ });
+}
diff --git a/tabula/bin/lib/less/browser.js b/tabula/bin/lib/less/browser.js
new file mode 100644
index 00000000..cab913be
--- /dev/null
+++ b/tabula/bin/lib/less/browser.js
@@ -0,0 +1,380 @@
+//
+// browser.js - client-side engine
+//
+
+var isFileProtocol = (location.protocol === 'file:' ||
+ location.protocol === 'chrome:' ||
+ location.protocol === 'chrome-extension:' ||
+ location.protocol === 'resource:');
+
+less.env = less.env || (location.hostname == '127.0.0.1' ||
+ location.hostname == '0.0.0.0' ||
+ location.hostname == 'localhost' ||
+ location.port.length > 0 ||
+ isFileProtocol ? 'development'
+ : 'production');
+
+// Load styles asynchronously (default: false)
+//
+// This is set to `false` by default, so that the body
+// doesn't start loading before the stylesheets are parsed.
+// Setting this to `true` can result in flickering.
+//
+less.async = false;
+
+// Interval between watch polls
+less.poll = less.poll || (isFileProtocol ? 1000 : 1500);
+
+//
+// Watch mode
+//
+less.watch = function () { return this.watchMode = true };
+less.unwatch = function () { return this.watchMode = false };
+
+if (less.env === 'development') {
+ less.optimization = 0;
+
+ if (/!watch/.test(location.hash)) {
+ less.watch();
+ }
+ less.watchTimer = setInterval(function () {
+ if (less.watchMode) {
+ loadStyleSheets(function (e, root, _, sheet, env) {
+ if (root) {
+ createCSS(root.toCSS(), sheet, env.lastModified);
+ }
+ });
+ }
+ }, less.poll);
+} else {
+ less.optimization = 3;
+}
+
+var cache;
+
+try {
+ cache = (typeof(window.localStorage) === 'undefined') ? null : window.localStorage;
+} catch (_) {
+ cache = null;
+}
+
+//
+// Get all tags with the 'rel' attribute set to "stylesheet/less"
+//
+var links = document.getElementsByTagName('link');
+var typePattern = /^text\/(x-)?less$/;
+
+less.sheets = [];
+
+for (var i = 0; i < links.length; i++) {
+ if (links[i].rel === 'stylesheet/less' || (links[i].rel.match(/stylesheet/) &&
+ (links[i].type.match(typePattern)))) {
+ less.sheets.push(links[i]);
+ }
+}
+
+
+less.refresh = function (reload) {
+ var startTime, endTime;
+ startTime = endTime = new(Date);
+
+ loadStyleSheets(function (e, root, _, sheet, env) {
+ if (env.local) {
+ log("loading " + sheet.href + " from cache.");
+ } else {
+ log("parsed " + sheet.href + " successfully.");
+ createCSS(root.toCSS(), sheet, env.lastModified);
+ }
+ log("css for " + sheet.href + " generated in " + (new(Date) - endTime) + 'ms');
+ (env.remaining === 0) && log("css generated in " + (new(Date) - startTime) + 'ms');
+ endTime = new(Date);
+ }, reload);
+
+ loadStyles();
+};
+less.refreshStyles = loadStyles;
+
+less.refresh(less.env === 'development');
+
+function loadStyles() {
+ var styles = document.getElementsByTagName('style');
+ for (var i = 0; i < styles.length; i++) {
+ if (styles[i].type.match(typePattern)) {
+ new(less.Parser)().parse(styles[i].innerHTML || '', function (e, tree) {
+ var css = tree.toCSS();
+ var style = styles[i];
+ style.type = 'text/css';
+ if (style.styleSheet) {
+ style.styleSheet.cssText = css;
+ } else {
+ style.innerHTML = css;
+ }
+ });
+ }
+ }
+}
+
+function loadStyleSheets(callback, reload) {
+ for (var i = 0; i < less.sheets.length; i++) {
+ loadStyleSheet(less.sheets[i], callback, reload, less.sheets.length - (i + 1));
+ }
+}
+
+function loadStyleSheet(sheet, callback, reload, remaining) {
+ var url = window.location.href.replace(/[#?].*$/, '');
+ var href = sheet.href.replace(/\?.*$/, '');
+ var css = cache && cache.getItem(href);
+ var timestamp = cache && cache.getItem(href + ':timestamp');
+ var styles = { css: css, timestamp: timestamp };
+
+ // Stylesheets in IE don't always return the full path
+ if (! /^(https?|file):/.test(href)) {
+ if (href.charAt(0) == "/") {
+ href = window.location.protocol + "//" + window.location.host + href;
+ } else {
+ href = url.slice(0, url.lastIndexOf('/') + 1) + href;
+ }
+ }
+ var filename = href.match(/([^\/]+)$/)[1];
+
+ xhr(sheet.href, sheet.type, function (data, lastModified) {
+ if (!reload && styles && lastModified &&
+ (new(Date)(lastModified).valueOf() ===
+ new(Date)(styles.timestamp).valueOf())) {
+ // Use local copy
+ createCSS(styles.css, sheet);
+ callback(null, null, data, sheet, { local: true, remaining: remaining });
+ } else {
+ // Use remote copy (re-parse)
+ try {
+ new(less.Parser)({
+ optimization: less.optimization,
+ paths: [href.replace(/[\w\.-]+$/, '')],
+ mime: sheet.type,
+ filename: filename
+ }).parse(data, function (e, root) {
+ if (e) { return error(e, href) }
+ try {
+ callback(e, root, data, sheet, { local: false, lastModified: lastModified, remaining: remaining });
+ removeNode(document.getElementById('less-error-message:' + extractId(href)));
+ } catch (e) {
+ error(e, href);
+ }
+ });
+ } catch (e) {
+ error(e, href);
+ }
+ }
+ }, function (status, url) {
+ throw new(Error)("Couldn't load " + url + " (" + status + ")");
+ });
+}
+
+function extractId(href) {
+ return href.replace(/^[a-z]+:\/\/?[^\/]+/, '' ) // Remove protocol & domain
+ .replace(/^\//, '' ) // Remove root /
+ .replace(/\?.*$/, '' ) // Remove query
+ .replace(/\.[^\.\/]+$/, '' ) // Remove file extension
+ .replace(/[^\.\w-]+/g, '-') // Replace illegal characters
+ .replace(/\./g, ':'); // Replace dots with colons(for valid id)
+}
+
+function createCSS(styles, sheet, lastModified) {
+ var css;
+
+ // Strip the query-string
+ var href = sheet.href ? sheet.href.replace(/\?.*$/, '') : '';
+
+ // If there is no title set, use the filename, minus the extension
+ var id = 'less:' + (sheet.title || extractId(href));
+
+ // If the stylesheet doesn't exist, create a new node
+ if ((css = document.getElementById(id)) === null) {
+ css = document.createElement('style');
+ css.type = 'text/css';
+ css.media = sheet.media || 'screen';
+ css.id = id;
+ document.getElementsByTagName('head')[0].appendChild(css);
+ }
+
+ if (css.styleSheet) { // IE
+ try {
+ css.styleSheet.cssText = styles;
+ } catch (e) {
+ throw new(Error)("Couldn't reassign styleSheet.cssText.");
+ }
+ } else {
+ (function (node) {
+ if (css.childNodes.length > 0) {
+ if (css.firstChild.nodeValue !== node.nodeValue) {
+ css.replaceChild(node, css.firstChild);
+ }
+ } else {
+ css.appendChild(node);
+ }
+ })(document.createTextNode(styles));
+ }
+
+ // Don't update the local store if the file wasn't modified
+ if (lastModified && cache) {
+ log('saving ' + href + ' to cache.');
+ cache.setItem(href, styles);
+ cache.setItem(href + ':timestamp', lastModified);
+ }
+}
+
+function xhr(url, type, callback, errback) {
+ var xhr = getXMLHttpRequest();
+ var async = isFileProtocol ? false : less.async;
+
+ if (typeof(xhr.overrideMimeType) === 'function') {
+ xhr.overrideMimeType('text/css');
+ }
+ xhr.open('GET', url, async);
+ xhr.setRequestHeader('Accept', type || 'text/x-less, text/css; q=0.9, */*; q=0.5');
+ xhr.send(null);
+
+ if (isFileProtocol) {
+ if (xhr.status === 0 || (xhr.status >= 200 && xhr.status < 300)) {
+ callback(xhr.responseText);
+ } else {
+ errback(xhr.status, url);
+ }
+ } else if (async) {
+ xhr.onreadystatechange = function () {
+ if (xhr.readyState == 4) {
+ handleResponse(xhr, callback, errback);
+ }
+ };
+ } else {
+ handleResponse(xhr, callback, errback);
+ }
+
+ function handleResponse(xhr, callback, errback) {
+ if (xhr.status >= 200 && xhr.status < 300) {
+ callback(xhr.responseText,
+ xhr.getResponseHeader("Last-Modified"));
+ } else if (typeof(errback) === 'function') {
+ errback(xhr.status, url);
+ }
+ }
+}
+
+function getXMLHttpRequest() {
+ if (window.XMLHttpRequest) {
+ return new(XMLHttpRequest);
+ } else {
+ try {
+ return new(ActiveXObject)("MSXML2.XMLHTTP.3.0");
+ } catch (e) {
+ log("browser doesn't support AJAX.");
+ return null;
+ }
+ }
+}
+
+function removeNode(node) {
+ return node && node.parentNode.removeChild(node);
+}
+
+function log(str) {
+ if (less.env == 'development' && typeof(console) !== "undefined") { console.log('less: ' + str) }
+}
+
+function error(e, href) {
+ var id = 'less-error-message:' + extractId(href);
+ var template = '
' + (e.message || 'There is an error in your .less file') +
+ '
' + '
in ' + filename + " ";
+
+ var errorline = function (e, i, classname) {
+ if (e.extract[i]) {
+ error.push(template.replace(/\{line\}/, parseInt(e.line) + (i - 1))
+ .replace(/\{class\}/, classname)
+ .replace(/\{content\}/, e.extract[i]));
+ }
+ };
+
+ if (e.stack) {
+ content += ' ' + e.stack.split('\n').slice(1).join(' ');
+ } else if (e.extract) {
+ errorline(e, 0, '');
+ errorline(e, 1, 'line');
+ errorline(e, 2, '');
+ content += 'on line ' + e.line + ', column ' + (e.column + 1) + ':
' +
+ '
' + error.join('') + '
';
+ }
+ elem.innerHTML = content;
+
+ // CSS for error messages
+ createCSS([
+ '.less-error-message ul, .less-error-message li {',
+ 'list-style-type: none;',
+ 'margin-right: 15px;',
+ 'padding: 4px 0;',
+ 'margin: 0;',
+ '}',
+ '.less-error-message label {',
+ 'font-size: 12px;',
+ 'margin-right: 15px;',
+ 'padding: 4px 0;',
+ 'color: #cc7777;',
+ '}',
+ '.less-error-message pre {',
+ 'color: #dd6666;',
+ 'padding: 4px 0;',
+ 'margin: 0;',
+ 'display: inline-block;',
+ '}',
+ '.less-error-message pre.line {',
+ 'color: #ff0000;',
+ '}',
+ '.less-error-message h3 {',
+ 'font-size: 20px;',
+ 'font-weight: bold;',
+ 'padding: 15px 0 5px 0;',
+ 'margin: 0;',
+ '}',
+ '.less-error-message a {',
+ 'color: #10a',
+ '}',
+ '.less-error-message .error {',
+ 'color: red;',
+ 'font-weight: bold;',
+ 'padding-bottom: 2px;',
+ 'border-bottom: 1px dashed red;',
+ '}'
+ ].join('\n'), { title: 'error-message' });
+
+ elem.style.cssText = [
+ "font-family: Arial, sans-serif",
+ "border: 1px solid #e00",
+ "background-color: #eee",
+ "border-radius: 5px",
+ "-webkit-border-radius: 5px",
+ "-moz-border-radius: 5px",
+ "color: #e00",
+ "padding: 15px",
+ "margin-bottom: 15px"
+ ].join(';');
+
+ if (less.env == 'development') {
+ timer = setInterval(function () {
+ if (document.body) {
+ if (document.getElementById(id)) {
+ document.body.replaceChild(elem, document.getElementById(id));
+ } else {
+ document.body.insertBefore(elem, document.body.firstChild);
+ }
+ clearInterval(timer);
+ }
+ }, 10);
+ }
+}
+
diff --git a/tabula/bin/lib/less/colors.js b/tabula/bin/lib/less/colors.js
new file mode 100644
index 00000000..ed4c2838
--- /dev/null
+++ b/tabula/bin/lib/less/colors.js
@@ -0,0 +1,152 @@
+(function (tree) {
+ tree.colors = {
+ 'aliceblue':'#f0f8ff',
+ 'antiquewhite':'#faebd7',
+ 'aqua':'#00ffff',
+ 'aquamarine':'#7fffd4',
+ 'azure':'#f0ffff',
+ 'beige':'#f5f5dc',
+ 'bisque':'#ffe4c4',
+ 'black':'#000000',
+ 'blanchedalmond':'#ffebcd',
+ 'blue':'#0000ff',
+ 'blueviolet':'#8a2be2',
+ 'brown':'#a52a2a',
+ 'burlywood':'#deb887',
+ 'cadetblue':'#5f9ea0',
+ 'chartreuse':'#7fff00',
+ 'chocolate':'#d2691e',
+ 'coral':'#ff7f50',
+ 'cornflowerblue':'#6495ed',
+ 'cornsilk':'#fff8dc',
+ 'crimson':'#dc143c',
+ 'cyan':'#00ffff',
+ 'darkblue':'#00008b',
+ 'darkcyan':'#008b8b',
+ 'darkgoldenrod':'#b8860b',
+ 'darkgray':'#a9a9a9',
+ 'darkgrey':'#a9a9a9',
+ 'darkgreen':'#006400',
+ 'darkkhaki':'#bdb76b',
+ 'darkmagenta':'#8b008b',
+ 'darkolivegreen':'#556b2f',
+ 'darkorange':'#ff8c00',
+ 'darkorchid':'#9932cc',
+ 'darkred':'#8b0000',
+ 'darksalmon':'#e9967a',
+ 'darkseagreen':'#8fbc8f',
+ 'darkslateblue':'#483d8b',
+ 'darkslategray':'#2f4f4f',
+ 'darkslategrey':'#2f4f4f',
+ 'darkturquoise':'#00ced1',
+ 'darkviolet':'#9400d3',
+ 'deeppink':'#ff1493',
+ 'deepskyblue':'#00bfff',
+ 'dimgray':'#696969',
+ 'dimgrey':'#696969',
+ 'dodgerblue':'#1e90ff',
+ 'firebrick':'#b22222',
+ 'floralwhite':'#fffaf0',
+ 'forestgreen':'#228b22',
+ 'fuchsia':'#ff00ff',
+ 'gainsboro':'#dcdcdc',
+ 'ghostwhite':'#f8f8ff',
+ 'gold':'#ffd700',
+ 'goldenrod':'#daa520',
+ 'gray':'#808080',
+ 'grey':'#808080',
+ 'green':'#008000',
+ 'greenyellow':'#adff2f',
+ 'honeydew':'#f0fff0',
+ 'hotpink':'#ff69b4',
+ 'indianred':'#cd5c5c',
+ 'indigo':'#4b0082',
+ 'ivory':'#fffff0',
+ 'khaki':'#f0e68c',
+ 'lavender':'#e6e6fa',
+ 'lavenderblush':'#fff0f5',
+ 'lawngreen':'#7cfc00',
+ 'lemonchiffon':'#fffacd',
+ 'lightblue':'#add8e6',
+ 'lightcoral':'#f08080',
+ 'lightcyan':'#e0ffff',
+ 'lightgoldenrodyellow':'#fafad2',
+ 'lightgray':'#d3d3d3',
+ 'lightgrey':'#d3d3d3',
+ 'lightgreen':'#90ee90',
+ 'lightpink':'#ffb6c1',
+ 'lightsalmon':'#ffa07a',
+ 'lightseagreen':'#20b2aa',
+ 'lightskyblue':'#87cefa',
+ 'lightslategray':'#778899',
+ 'lightslategrey':'#778899',
+ 'lightsteelblue':'#b0c4de',
+ 'lightyellow':'#ffffe0',
+ 'lime':'#00ff00',
+ 'limegreen':'#32cd32',
+ 'linen':'#faf0e6',
+ 'magenta':'#ff00ff',
+ 'maroon':'#800000',
+ 'mediumaquamarine':'#66cdaa',
+ 'mediumblue':'#0000cd',
+ 'mediumorchid':'#ba55d3',
+ 'mediumpurple':'#9370d8',
+ 'mediumseagreen':'#3cb371',
+ 'mediumslateblue':'#7b68ee',
+ 'mediumspringgreen':'#00fa9a',
+ 'mediumturquoise':'#48d1cc',
+ 'mediumvioletred':'#c71585',
+ 'midnightblue':'#191970',
+ 'mintcream':'#f5fffa',
+ 'mistyrose':'#ffe4e1',
+ 'moccasin':'#ffe4b5',
+ 'navajowhite':'#ffdead',
+ 'navy':'#000080',
+ 'oldlace':'#fdf5e6',
+ 'olive':'#808000',
+ 'olivedrab':'#6b8e23',
+ 'orange':'#ffa500',
+ 'orangered':'#ff4500',
+ 'orchid':'#da70d6',
+ 'palegoldenrod':'#eee8aa',
+ 'palegreen':'#98fb98',
+ 'paleturquoise':'#afeeee',
+ 'palevioletred':'#d87093',
+ 'papayawhip':'#ffefd5',
+ 'peachpuff':'#ffdab9',
+ 'peru':'#cd853f',
+ 'pink':'#ffc0cb',
+ 'plum':'#dda0dd',
+ 'powderblue':'#b0e0e6',
+ 'purple':'#800080',
+ 'red':'#ff0000',
+ 'rosybrown':'#bc8f8f',
+ 'royalblue':'#4169e1',
+ 'saddlebrown':'#8b4513',
+ 'salmon':'#fa8072',
+ 'sandybrown':'#f4a460',
+ 'seagreen':'#2e8b57',
+ 'seashell':'#fff5ee',
+ 'sienna':'#a0522d',
+ 'silver':'#c0c0c0',
+ 'skyblue':'#87ceeb',
+ 'slateblue':'#6a5acd',
+ 'slategray':'#708090',
+ 'slategrey':'#708090',
+ 'snow':'#fffafa',
+ 'springgreen':'#00ff7f',
+ 'steelblue':'#4682b4',
+ 'tan':'#d2b48c',
+ 'teal':'#008080',
+ 'thistle':'#d8bfd8',
+ 'tomato':'#ff6347',
+ 'transparent':'rgba(0,0,0,0)',
+ 'turquoise':'#40e0d0',
+ 'violet':'#ee82ee',
+ 'wheat':'#f5deb3',
+ 'white':'#ffffff',
+ 'whitesmoke':'#f5f5f5',
+ 'yellow':'#ffff00',
+ 'yellowgreen':'#9acd32'
+ };
+})(require('./tree'));
diff --git a/tabula/bin/lib/less/cssmin.js b/tabula/bin/lib/less/cssmin.js
new file mode 100644
index 00000000..427de71c
--- /dev/null
+++ b/tabula/bin/lib/less/cssmin.js
@@ -0,0 +1,355 @@
+/**
+ * cssmin.js
+ * Author: Stoyan Stefanov - http://phpied.com/
+ * This is a JavaScript port of the CSS minification tool
+ * distributed with YUICompressor, itself a port
+ * of the cssmin utility by Isaac Schlueter - http://foohack.com/
+ * Permission is hereby granted to use the JavaScript version under the same
+ * conditions as the YUICompressor (original YUICompressor note below).
+ */
+
+/*
+* YUI Compressor
+* http://developer.yahoo.com/yui/compressor/
+* Author: Julien Lecomte - http://www.julienlecomte.net/
+* Copyright (c) 2011 Yahoo! Inc. All rights reserved.
+* The copyrights embodied in the content of this file are licensed
+* by Yahoo! Inc. under the BSD (revised) open source license.
+*/
+var YAHOO = YAHOO || {};
+YAHOO.compressor = YAHOO.compressor || {};
+
+/**
+ * Utility method to replace all data urls with tokens before we start
+ * compressing, to avoid performance issues running some of the subsequent
+ * regexes against large strings chunks.
+ *
+ * @private
+ * @method _extractDataUrls
+ * @param {String} css The input css
+ * @param {Array} The global array of tokens to preserve
+ * @returns String The processed css
+ */
+YAHOO.compressor._extractDataUrls = function (css, preservedTokens) {
+
+ // Leave data urls alone to increase parse performance.
+ var maxIndex = css.length - 1,
+ appendIndex = 0,
+ startIndex,
+ endIndex,
+ terminator,
+ foundTerminator,
+ sb = [],
+ m,
+ preserver,
+ token,
+ pattern = /url\(\s*(["']?)data\:/g;
+
+ // Since we need to account for non-base64 data urls, we need to handle
+ // ' and ) being part of the data string. Hence switching to indexOf,
+ // to determine whether or not we have matching string terminators and
+ // handling sb appends directly, instead of using matcher.append* methods.
+
+ while ((m = pattern.exec(css)) !== null) {
+
+ startIndex = m.index + 4; // "url(".length()
+ terminator = m[1]; // ', " or empty (not quoted)
+
+ if (terminator.length === 0) {
+ terminator = ")";
+ }
+
+ foundTerminator = false;
+
+ endIndex = pattern.lastIndex - 1;
+
+ while(foundTerminator === false && endIndex+1 <= maxIndex) {
+ endIndex = css.indexOf(terminator, endIndex + 1);
+
+ // endIndex == 0 doesn't really apply here
+ if ((endIndex > 0) && (css.charAt(endIndex - 1) !== '\\')) {
+ foundTerminator = true;
+ if (")" != terminator) {
+ endIndex = css.indexOf(")", endIndex);
+ }
+ }
+ }
+
+ // Enough searching, start moving stuff over to the buffer
+ sb.push(css.substring(appendIndex, m.index));
+
+ if (foundTerminator) {
+ token = css.substring(startIndex, endIndex);
+ token = token.replace(/\s+/g, "");
+ preservedTokens.push(token);
+
+ preserver = "url(___YUICSSMIN_PRESERVED_TOKEN_" + (preservedTokens.length - 1) + "___)";
+ sb.push(preserver);
+
+ appendIndex = endIndex + 1;
+ } else {
+ // No end terminator found, re-add the whole match. Should we throw/warn here?
+ sb.push(css.substring(m.index, pattern.lastIndex));
+ appendIndex = pattern.lastIndex;
+ }
+ }
+
+ sb.push(css.substring(appendIndex));
+
+ return sb.join("");
+};
+
+/**
+ * Utility method to compress hex color values of the form #AABBCC to #ABC.
+ *
+ * DOES NOT compress CSS ID selectors which match the above pattern (which would break things).
+ * e.g. #AddressForm { ... }
+ *
+ * DOES NOT compress IE filters, which have hex color values (which would break things).
+ * e.g. filter: chroma(color="#FFFFFF");
+ *
+ * DOES NOT compress invalid hex values.
+ * e.g. background-color: #aabbccdd
+ *
+ * @private
+ * @method _compressHexColors
+ * @param {String} css The input css
+ * @returns String The processed css
+ */
+YAHOO.compressor._compressHexColors = function(css) {
+
+ // Look for hex colors inside { ... } (to avoid IDs) and which don't have a =, or a " in front of them (to avoid filters)
+ var pattern = /(\=\s*?["']?)?#([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])(\}|[^0-9a-f{][^{]*?\})/gi,
+ m,
+ index = 0,
+ isFilter,
+ sb = [];
+
+ while ((m = pattern.exec(css)) !== null) {
+
+ sb.push(css.substring(index, m.index));
+
+ isFilter = m[1];
+
+ if (isFilter) {
+ // Restore, maintain case, otherwise filter will break
+ sb.push(m[1] + "#" + (m[2] + m[3] + m[4] + m[5] + m[6] + m[7]));
+ } else {
+ if (m[2].toLowerCase() == m[3].toLowerCase() &&
+ m[4].toLowerCase() == m[5].toLowerCase() &&
+ m[6].toLowerCase() == m[7].toLowerCase()) {
+
+ // Compress.
+ sb.push("#" + (m[3] + m[5] + m[7]).toLowerCase());
+ } else {
+ // Non compressible color, restore but lower case.
+ sb.push("#" + (m[2] + m[3] + m[4] + m[5] + m[6] + m[7]).toLowerCase());
+ }
+ }
+
+ index = pattern.lastIndex = pattern.lastIndex - m[8].length;
+ }
+
+ sb.push(css.substring(index));
+
+ return sb.join("");
+};
+
+YAHOO.compressor.cssmin = function (css, linebreakpos) {
+
+ var startIndex = 0,
+ endIndex = 0,
+ i = 0, max = 0,
+ preservedTokens = [],
+ comments = [],
+ token = '',
+ totallen = css.length,
+ placeholder = '';
+
+ css = this._extractDataUrls(css, preservedTokens);
+
+ // collect all comment blocks...
+ while ((startIndex = css.indexOf("/*", startIndex)) >= 0) {
+ endIndex = css.indexOf("*/", startIndex + 2);
+ if (endIndex < 0) {
+ endIndex = totallen;
+ }
+ token = css.slice(startIndex + 2, endIndex);
+ comments.push(token);
+ css = css.slice(0, startIndex + 2) + "___YUICSSMIN_PRESERVE_CANDIDATE_COMMENT_" + (comments.length - 1) + "___" + css.slice(endIndex);
+ startIndex += 2;
+ }
+
+ // preserve strings so their content doesn't get accidentally minified
+ css = css.replace(/("([^\\"]|\\.|\\)*")|('([^\\']|\\.|\\)*')/g, function (match) {
+ var i, max, quote = match.substring(0, 1);
+
+ match = match.slice(1, -1);
+
+ // maybe the string contains a comment-like substring?
+ // one, maybe more? put'em back then
+ if (match.indexOf("___YUICSSMIN_PRESERVE_CANDIDATE_COMMENT_") >= 0) {
+ for (i = 0, max = comments.length; i < max; i = i + 1) {
+ match = match.replace("___YUICSSMIN_PRESERVE_CANDIDATE_COMMENT_" + i + "___", comments[i]);
+ }
+ }
+
+ // minify alpha opacity in filter strings
+ match = match.replace(/progid:DXImageTransform\.Microsoft\.Alpha\(Opacity=/gi, "alpha(opacity=");
+
+ preservedTokens.push(match);
+ return quote + "___YUICSSMIN_PRESERVED_TOKEN_" + (preservedTokens.length - 1) + "___" + quote;
+ });
+
+ // strings are safe, now wrestle the comments
+ for (i = 0, max = comments.length; i < max; i = i + 1) {
+
+ token = comments[i];
+ placeholder = "___YUICSSMIN_PRESERVE_CANDIDATE_COMMENT_" + i + "___";
+
+ // ! in the first position of the comment means preserve
+ // so push to the preserved tokens keeping the !
+ if (token.charAt(0) === "!") {
+ preservedTokens.push(token);
+ css = css.replace(placeholder, "___YUICSSMIN_PRESERVED_TOKEN_" + (preservedTokens.length - 1) + "___");
+ continue;
+ }
+
+ // \ in the last position looks like hack for Mac/IE5
+ // shorten that to /*\*/ and the next one to /**/
+ if (token.charAt(token.length - 1) === "\\") {
+ preservedTokens.push("\\");
+ css = css.replace(placeholder, "___YUICSSMIN_PRESERVED_TOKEN_" + (preservedTokens.length - 1) + "___");
+ i = i + 1; // attn: advancing the loop
+ preservedTokens.push("");
+ css = css.replace("___YUICSSMIN_PRESERVE_CANDIDATE_COMMENT_" + i + "___", "___YUICSSMIN_PRESERVED_TOKEN_" + (preservedTokens.length - 1) + "___");
+ continue;
+ }
+
+ // keep empty comments after child selectors (IE7 hack)
+ // e.g. html >/**/ body
+ if (token.length === 0) {
+ startIndex = css.indexOf(placeholder);
+ if (startIndex > 2) {
+ if (css.charAt(startIndex - 3) === '>') {
+ preservedTokens.push("");
+ css = css.replace(placeholder, "___YUICSSMIN_PRESERVED_TOKEN_" + (preservedTokens.length - 1) + "___");
+ }
+ }
+ }
+
+ // in all other cases kill the comment
+ css = css.replace("/*" + placeholder + "*/", "");
+ }
+
+
+ // Normalize all whitespace strings to single spaces. Easier to work with that way.
+ css = css.replace(/\s+/g, " ");
+
+ // Remove the spaces before the things that should not have spaces before them.
+ // But, be careful not to turn "p :link {...}" into "p:link{...}"
+ // Swap out any pseudo-class colons with the token, and then swap back.
+ css = css.replace(/(^|\})(([^\{:])+:)+([^\{]*\{)/g, function (m) {
+ return m.replace(":", "___YUICSSMIN_PSEUDOCLASSCOLON___");
+ });
+ css = css.replace(/\s+([!{};:>+\(\)\],])/g, '$1');
+ css = css.replace(/___YUICSSMIN_PSEUDOCLASSCOLON___/g, ":");
+
+ // retain space for special IE6 cases
+ css = css.replace(/:first-(line|letter)(\{|,)/g, ":first-$1 $2");
+
+ // no space after the end of a preserved comment
+ css = css.replace(/\*\/ /g, '*/');
+
+
+ // If there is a @charset, then only allow one, and push to the top of the file.
+ css = css.replace(/^(.*)(@charset "[^"]*";)/gi, '$2$1');
+ css = css.replace(/^(\s*@charset [^;]+;\s*)+/gi, '$1');
+
+ // Put the space back in some cases, to support stuff like
+ // @media screen and (-webkit-min-device-pixel-ratio:0){
+ css = css.replace(/\band\(/gi, "and (");
+
+
+ // Remove the spaces after the things that should not have spaces after them.
+ css = css.replace(/([!{}:;>+\(\[,])\s+/g, '$1');
+
+ // remove unnecessary semicolons
+ css = css.replace(/;+\}/g, "}");
+
+ // Replace 0(px,em,%) with 0.
+ css = css.replace(/([\s:])(0)(px|em|%|in|cm|mm|pc|pt|ex)/gi, "$1$2");
+
+ // Replace 0 0 0 0; with 0.
+ css = css.replace(/:0 0 0 0(;|\})/g, ":0$1");
+ css = css.replace(/:0 0 0(;|\})/g, ":0$1");
+ css = css.replace(/:0 0(;|\})/g, ":0$1");
+
+ // Replace background-position:0; with background-position:0 0;
+ // same for transform-origin
+ css = css.replace(/(background-position|transform-origin|webkit-transform-origin|moz-transform-origin|o-transform-origin|ms-transform-origin):0(;|\})/gi, function(all, prop, tail) {
+ return prop.toLowerCase() + ":0 0" + tail;
+ });
+
+ // Replace 0.6 to .6, but only when preceded by : or a white-space
+ css = css.replace(/(:|\s)0+\.(\d+)/g, "$1.$2");
+
+ // Shorten colors from rgb(51,102,153) to #336699
+ // This makes it more likely that it'll get further compressed in the next step.
+ css = css.replace(/rgb\s*\(\s*([0-9,\s]+)\s*\)/gi, function () {
+ var i, rgbcolors = arguments[1].split(',');
+ for (i = 0; i < rgbcolors.length; i = i + 1) {
+ rgbcolors[i] = parseInt(rgbcolors[i], 10).toString(16);
+ if (rgbcolors[i].length === 1) {
+ rgbcolors[i] = '0' + rgbcolors[i];
+ }
+ }
+ return '#' + rgbcolors.join('');
+ });
+
+ // Shorten colors from #AABBCC to #ABC.
+ css = this._compressHexColors(css);
+
+ // border: none -> border:0
+ css = css.replace(/(border|border-top|border-right|border-bottom|border-right|outline|background):none(;|\})/gi, function(all, prop, tail) {
+ return prop.toLowerCase() + ":0" + tail;
+ });
+
+ // shorter opacity IE filter
+ css = css.replace(/progid:DXImageTransform\.Microsoft\.Alpha\(Opacity=/gi, "alpha(opacity=");
+
+ // Remove empty rules.
+ css = css.replace(/[^\};\{\/]+\{\}/g, "");
+
+ if (linebreakpos >= 0) {
+ // Some source control tools don't like it when files containing lines longer
+ // than, say 8000 characters, are checked in. The linebreak option is used in
+ // that case to split long lines after a specific column.
+ startIndex = 0;
+ i = 0;
+ while (i < css.length) {
+ i = i + 1;
+ if (css[i - 1] === '}' && i - startIndex > linebreakpos) {
+ css = css.slice(0, i) + '\n' + css.slice(i);
+ startIndex = i;
+ }
+ }
+ }
+
+ // Replace multiple semi-colons in a row by a single one
+ // See SF bug #1980989
+ css = css.replace(/;;+/g, ";");
+
+ // restore preserved comments and strings
+ for (i = 0, max = preservedTokens.length; i < max; i = i + 1) {
+ css = css.replace("___YUICSSMIN_PRESERVED_TOKEN_" + i + "___", preservedTokens[i]);
+ }
+
+ // Trim the final string (for any leading or trailing white spaces)
+ css = css.replace(/^\s+|\s+$/g, "");
+
+ return css;
+
+};
+
+exports.compressor = YAHOO.compressor;
diff --git a/tabula/bin/lib/less/functions.js b/tabula/bin/lib/less/functions.js
new file mode 100644
index 00000000..6eb34bac
--- /dev/null
+++ b/tabula/bin/lib/less/functions.js
@@ -0,0 +1,228 @@
+(function (tree) {
+
+tree.functions = {
+ rgb: function (r, g, b) {
+ return this.rgba(r, g, b, 1.0);
+ },
+ rgba: function (r, g, b, a) {
+ var rgb = [r, g, b].map(function (c) { return number(c) }),
+ a = number(a);
+ return new(tree.Color)(rgb, a);
+ },
+ hsl: function (h, s, l) {
+ return this.hsla(h, s, l, 1.0);
+ },
+ hsla: function (h, s, l, a) {
+ h = (number(h) % 360) / 360;
+ s = number(s); l = number(l); a = number(a);
+
+ var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s;
+ var m1 = l * 2 - m2;
+
+ return this.rgba(hue(h + 1/3) * 255,
+ hue(h) * 255,
+ hue(h - 1/3) * 255,
+ a);
+
+ function hue(h) {
+ h = h < 0 ? h + 1 : (h > 1 ? h - 1 : h);
+ if (h * 6 < 1) return m1 + (m2 - m1) * h * 6;
+ else if (h * 2 < 1) return m2;
+ else if (h * 3 < 2) return m1 + (m2 - m1) * (2/3 - h) * 6;
+ else return m1;
+ }
+ },
+ hue: function (color) {
+ return new(tree.Dimension)(Math.round(color.toHSL().h));
+ },
+ saturation: function (color) {
+ return new(tree.Dimension)(Math.round(color.toHSL().s * 100), '%');
+ },
+ lightness: function (color) {
+ return new(tree.Dimension)(Math.round(color.toHSL().l * 100), '%');
+ },
+ alpha: function (color) {
+ return new(tree.Dimension)(color.toHSL().a);
+ },
+ saturate: function (color, amount) {
+ var hsl = color.toHSL();
+
+ hsl.s += amount.value / 100;
+ hsl.s = clamp(hsl.s);
+ return hsla(hsl);
+ },
+ desaturate: function (color, amount) {
+ var hsl = color.toHSL();
+
+ hsl.s -= amount.value / 100;
+ hsl.s = clamp(hsl.s);
+ return hsla(hsl);
+ },
+ lighten: function (color, amount) {
+ var hsl = color.toHSL();
+
+ hsl.l += amount.value / 100;
+ hsl.l = clamp(hsl.l);
+ return hsla(hsl);
+ },
+ darken: function (color, amount) {
+ var hsl = color.toHSL();
+
+ hsl.l -= amount.value / 100;
+ hsl.l = clamp(hsl.l);
+ return hsla(hsl);
+ },
+ fadein: function (color, amount) {
+ var hsl = color.toHSL();
+
+ hsl.a += amount.value / 100;
+ hsl.a = clamp(hsl.a);
+ return hsla(hsl);
+ },
+ fadeout: function (color, amount) {
+ var hsl = color.toHSL();
+
+ hsl.a -= amount.value / 100;
+ hsl.a = clamp(hsl.a);
+ return hsla(hsl);
+ },
+ fade: function (color, amount) {
+ var hsl = color.toHSL();
+
+ hsl.a = amount.value / 100;
+ hsl.a = clamp(hsl.a);
+ return hsla(hsl);
+ },
+ spin: function (color, amount) {
+ var hsl = color.toHSL();
+ var hue = (hsl.h + amount.value) % 360;
+
+ hsl.h = hue < 0 ? 360 + hue : hue;
+
+ return hsla(hsl);
+ },
+ //
+ // Copyright (c) 2006-2009 Hampton Catlin, Nathan Weizenbaum, and Chris Eppstein
+ // http://sass-lang.com
+ //
+ mix: function (color1, color2, weight) {
+ var p = weight.value / 100.0;
+ var w = p * 2 - 1;
+ var a = color1.toHSL().a - color2.toHSL().a;
+
+ var w1 = (((w * a == -1) ? w : (w + a) / (1 + w * a)) + 1) / 2.0;
+ var w2 = 1 - w1;
+
+ var rgb = [color1.rgb[0] * w1 + color2.rgb[0] * w2,
+ color1.rgb[1] * w1 + color2.rgb[1] * w2,
+ color1.rgb[2] * w1 + color2.rgb[2] * w2];
+
+ var alpha = color1.alpha * p + color2.alpha * (1 - p);
+
+ return new(tree.Color)(rgb, alpha);
+ },
+ greyscale: function (color) {
+ return this.desaturate(color, new(tree.Dimension)(100));
+ },
+ e: function (str) {
+ return new(tree.Anonymous)(str instanceof tree.JavaScript ? str.evaluated : str);
+ },
+ escape: function (str) {
+ return new(tree.Anonymous)(encodeURI(str.value).replace(/=/g, "%3D").replace(/:/g, "%3A").replace(/#/g, "%23").replace(/;/g, "%3B").replace(/\(/g, "%28").replace(/\)/g, "%29"));
+ },
+ '%': function (quoted /* arg, arg, ...*/) {
+ var args = Array.prototype.slice.call(arguments, 1),
+ str = quoted.value;
+
+ for (var i = 0; i < args.length; i++) {
+ str = str.replace(/%[sda]/i, function(token) {
+ var value = token.match(/s/i) ? args[i].value : args[i].toCSS();
+ return token.match(/[A-Z]$/) ? encodeURIComponent(value) : value;
+ });
+ }
+ str = str.replace(/%%/g, '%');
+ return new(tree.Quoted)('"' + str + '"', str);
+ },
+ round: function (n) {
+ return this._math('round', n);
+ },
+ ceil: function (n) {
+ return this._math('ceil', n);
+ },
+ floor: function (n) {
+ return this._math('floor', n);
+ },
+ _math: function (fn, n) {
+ if (n instanceof tree.Dimension) {
+ return new(tree.Dimension)(Math[fn](number(n)), n.unit);
+ } else if (typeof(n) === 'number') {
+ return Math[fn](n);
+ } else {
+ throw { type: "Argument", message: "argument must be a number" };
+ }
+ },
+ argb: function (color) {
+ return new(tree.Anonymous)(color.toARGB());
+
+ },
+ percentage: function (n) {
+ return new(tree.Dimension)(n.value * 100, '%');
+ },
+ color: function (n) {
+ if (n instanceof tree.Quoted) {
+ return new(tree.Color)(n.value.slice(1));
+ } else {
+ throw { type: "Argument", message: "argument must be a string" };
+ }
+ },
+ iscolor: function (n) {
+ return this._isa(n, tree.Color);
+ },
+ isnumber: function (n) {
+ return this._isa(n, tree.Dimension);
+ },
+ isstring: function (n) {
+ return this._isa(n, tree.Quoted);
+ },
+ iskeyword: function (n) {
+ return this._isa(n, tree.Keyword);
+ },
+ isurl: function (n) {
+ return this._isa(n, tree.URL);
+ },
+ ispixel: function (n) {
+ return (n instanceof tree.Dimension) && n.unit === 'px' ? tree.True : tree.False;
+ },
+ ispercentage: function (n) {
+ return (n instanceof tree.Dimension) && n.unit === '%' ? tree.True : tree.False;
+ },
+ isem: function (n) {
+ return (n instanceof tree.Dimension) && n.unit === 'em' ? tree.True : tree.False;
+ },
+ _isa: function (n, Type) {
+ return (n instanceof Type) ? tree.True : tree.False;
+ }
+};
+
+function hsla(hsla) {
+ return tree.functions.hsla(hsla.h, hsla.s, hsla.l, hsla.a);
+}
+
+function number(n) {
+ if (n instanceof tree.Dimension) {
+ return parseFloat(n.unit == '%' ? n.value / 100 : n.value);
+ } else if (typeof(n) === 'number') {
+ return n;
+ } else {
+ throw {
+ error: "RuntimeError",
+ message: "color functions take numbers as parameters"
+ };
+ }
+}
+
+function clamp(val) {
+ return Math.min(1, Math.max(0, val));
+}
+
+})(require('./tree'));
diff --git a/tabula/bin/lib/less/index.js b/tabula/bin/lib/less/index.js
new file mode 100644
index 00000000..a11fa998
--- /dev/null
+++ b/tabula/bin/lib/less/index.js
@@ -0,0 +1,148 @@
+var path = require('path'),
+ sys = require('util'),
+ fs = require('fs');
+
+var less = {
+ version: [1, 3, 0],
+ Parser: require('./parser').Parser,
+ importer: require('./parser').importer,
+ tree: require('./tree'),
+ render: function (input, options, callback) {
+ options = options || {};
+
+ if (typeof(options) === 'function') {
+ callback = options, options = {};
+ }
+
+ var parser = new(less.Parser)(options),
+ ee;
+
+ if (callback) {
+ parser.parse(input, function (e, root) {
+ callback(e, root && root.toCSS && root.toCSS(options));
+ });
+ } else {
+ ee = new(require('events').EventEmitter);
+
+ process.nextTick(function () {
+ parser.parse(input, function (e, root) {
+ if (e) { ee.emit('error', e) }
+ else { ee.emit('success', root.toCSS(options)) }
+ });
+ });
+ return ee;
+ }
+ },
+ writeError: function (ctx, options) {
+ options = options || {};
+
+ var message = "";
+ var extract = ctx.extract;
+ var error = [];
+ var stylize = options.color ? less.stylize : function (str) { return str };
+
+ if (options.silent) { return }
+
+ if (ctx.stack) { return sys.error(stylize(ctx.stack, 'red')) }
+
+ if (!ctx.hasOwnProperty('index')) {
+ return sys.error(ctx.stack || ctx.message);
+ }
+
+ if (typeof(extract[0]) === 'string') {
+ error.push(stylize((ctx.line - 1) + ' ' + extract[0], 'grey'));
+ }
+
+ if (extract[1]) {
+ error.push(ctx.line + ' ' + extract[1].slice(0, ctx.column)
+ + stylize(stylize(stylize(extract[1][ctx.column], 'bold')
+ + extract[1].slice(ctx.column + 1), 'red'), 'inverse'));
+ }
+
+ if (typeof(extract[2]) === 'string') {
+ error.push(stylize((ctx.line + 1) + ' ' + extract[2], 'grey'));
+ }
+ error = error.join('\n') + '\033[0m\n';
+
+ message += stylize(ctx.type + 'Error: ' + ctx.message, 'red');
+ ctx.filename && (message += stylize(' in ', 'red') + ctx.filename +
+ stylize(':' + ctx.line + ':' + ctx.column, 'grey'));
+
+ sys.error(message, error);
+
+ if (ctx.callLine) {
+ sys.error(stylize('from ', 'red') + (ctx.filename || ''));
+ sys.error(stylize(ctx.callLine, 'grey') + ' ' + ctx.callExtract);
+ }
+ }
+};
+
+['color', 'directive', 'operation', 'dimension',
+ 'keyword', 'variable', 'ruleset', 'element',
+ 'selector', 'quoted', 'expression', 'rule',
+ 'call', 'url', 'alpha', 'import',
+ 'mixin', 'comment', 'anonymous', 'value',
+ 'javascript', 'assignment', 'condition', 'paren',
+ 'media'
+].forEach(function (n) {
+ require('./tree/' + n);
+});
+
+less.Parser.importer = function (file, paths, callback, env) {
+ var pathname;
+
+ // TODO: Undo this at some point,
+ // or use different approach.
+ paths.unshift('.');
+
+ for (var i = 0; i < paths.length; i++) {
+ try {
+ pathname = path.join(paths[i], file);
+ fs.statSync(pathname);
+ break;
+ } catch (e) {
+ pathname = null;
+ }
+ }
+
+ if (pathname) {
+ fs.readFile(pathname, 'utf-8', function(e, data) {
+ if (e) return callback(e);
+
+ new(less.Parser)({
+ paths: [path.dirname(pathname)].concat(paths),
+ filename: pathname
+ }).parse(data, function (e, root) {
+ callback(e, root, data);
+ });
+ });
+ } else {
+ if (typeof(env.errback) === "function") {
+ env.errback(file, paths, callback);
+ } else {
+ callback({ type: 'File', message: "'" + file + "' wasn't found.\n" });
+ }
+ }
+}
+
+require('./functions');
+require('./colors');
+
+for (var k in less) { exports[k] = less[k] }
+
+// Stylize a string
+function stylize(str, style) {
+ var styles = {
+ 'bold' : [1, 22],
+ 'inverse' : [7, 27],
+ 'underline' : [4, 24],
+ 'yellow' : [33, 39],
+ 'green' : [32, 39],
+ 'red' : [31, 39],
+ 'grey' : [90, 39]
+ };
+ return '\033[' + styles[style][0] + 'm' + str +
+ '\033[' + styles[style][1] + 'm';
+}
+less.stylize = stylize;
+
diff --git a/tabula/bin/lib/less/parser.js b/tabula/bin/lib/less/parser.js
new file mode 100644
index 00000000..d732e1b1
--- /dev/null
+++ b/tabula/bin/lib/less/parser.js
@@ -0,0 +1,1334 @@
+var less, tree;
+
+if (typeof environment === "object" && ({}).toString.call(environment) === "[object Environment]") {
+ // Rhino
+ // Details on how to detect Rhino: https://github.com/ringo/ringojs/issues/88
+ if (typeof(window) === 'undefined') { less = {} }
+ else { less = window.less = {} }
+ tree = less.tree = {};
+ less.mode = 'rhino';
+} else if (typeof(window) === 'undefined') {
+ // Node.js
+ less = exports,
+ tree = require('./tree');
+ less.mode = 'node';
+} else {
+ // Browser
+ if (typeof(window.less) === 'undefined') { window.less = {} }
+ less = window.less,
+ tree = window.less.tree = {};
+ less.mode = 'browser';
+}
+//
+// less.js - parser
+//
+// A relatively straight-forward predictive parser.
+// There is no tokenization/lexing stage, the input is parsed
+// in one sweep.
+//
+// To make the parser fast enough to run in the browser, several
+// optimization had to be made:
+//
+// - Matching and slicing on a huge input is often cause of slowdowns.
+// The solution is to chunkify the input into smaller strings.
+// The chunks are stored in the `chunks` var,
+// `j` holds the current chunk index, and `current` holds
+// the index of the current chunk in relation to `input`.
+// This gives us an almost 4x speed-up.
+//
+// - In many cases, we don't need to match individual tokens;
+// for example, if a value doesn't hold any variables, operations
+// or dynamic references, the parser can effectively 'skip' it,
+// treating it as a literal.
+// An example would be '1px solid #000' - which evaluates to itself,
+// we don't need to know what the individual components are.
+// The drawback, of course is that you don't get the benefits of
+// syntax-checking on the CSS. This gives us a 50% speed-up in the parser,
+// and a smaller speed-up in the code-gen.
+//
+//
+// Token matching is done with the `$` function, which either takes
+// a terminal string or regexp, or a non-terminal function to call.
+// It also takes care of moving all the indices forwards.
+//
+//
+less.Parser = function Parser(env) {
+ var input, // LeSS input string
+ i, // current index in `input`
+ j, // current chunk
+ temp, // temporarily holds a chunk's state, for backtracking
+ memo, // temporarily holds `i`, when backtracking
+ furthest, // furthest index the parser has gone to
+ chunks, // chunkified input
+ current, // index of current chunk, in `input`
+ parser;
+
+ var that = this;
+
+ // This function is called after all files
+ // have been imported through `@import`.
+ var finish = function () {};
+
+ var imports = this.imports = {
+ paths: env && env.paths || [], // Search paths, when importing
+ queue: [], // Files which haven't been imported yet
+ files: {}, // Holds the imported parse trees
+ contents: {}, // Holds the imported file contents
+ mime: env && env.mime, // MIME type of .less files
+ error: null, // Error in parsing/evaluating an import
+ push: function (path, callback) {
+ var that = this;
+ this.queue.push(path);
+
+ //
+ // Import a file asynchronously
+ //
+ less.Parser.importer(path, this.paths, function (e, root, contents) {
+ that.queue.splice(that.queue.indexOf(path), 1); // Remove the path from the queue
+
+ var imported = path in that.files;
+
+ that.files[path] = root; // Store the root
+ that.contents[path] = contents;
+
+ if (e && !that.error) { that.error = e }
+
+ callback(e, root, imported);
+
+ if (that.queue.length === 0) { finish() } // Call `finish` if we're done importing
+ }, env);
+ }
+ };
+
+ function save() { temp = chunks[j], memo = i, current = i }
+ function restore() { chunks[j] = temp, i = memo, current = i }
+
+ function sync() {
+ if (i > current) {
+ chunks[j] = chunks[j].slice(i - current);
+ current = i;
+ }
+ }
+ //
+ // Parse from a token, regexp or string, and move forward if match
+ //
+ function $(tok) {
+ var match, args, length, c, index, endIndex, k, mem;
+
+ //
+ // Non-terminal
+ //
+ if (tok instanceof Function) {
+ return tok.call(parser.parsers);
+ //
+ // Terminal
+ //
+ // Either match a single character in the input,
+ // or match a regexp in the current chunk (chunk[j]).
+ //
+ } else if (typeof(tok) === 'string') {
+ match = input.charAt(i) === tok ? tok : null;
+ length = 1;
+ sync ();
+ } else {
+ sync ();
+
+ if (match = tok.exec(chunks[j])) {
+ length = match[0].length;
+ } else {
+ return null;
+ }
+ }
+
+ // The match is confirmed, add the match length to `i`,
+ // and consume any extra white-space characters (' ' || '\n')
+ // which come after that. The reason for this is that LeSS's
+ // grammar is mostly white-space insensitive.
+ //
+ if (match) {
+ mem = i += length;
+ endIndex = i + chunks[j].length - length;
+
+ while (i < endIndex) {
+ c = input.charCodeAt(i);
+ if (! (c === 32 || c === 10 || c === 9)) { break }
+ i++;
+ }
+ chunks[j] = chunks[j].slice(length + (i - mem));
+ current = i;
+
+ if (chunks[j].length === 0 && j < chunks.length - 1) { j++ }
+
+ if(typeof(match) === 'string') {
+ return match;
+ } else {
+ return match.length === 1 ? match[0] : match;
+ }
+ }
+ }
+
+ function expect(arg, msg) {
+ var result = $(arg);
+ if (! result) {
+ error(msg || (typeof(arg) === 'string' ? "expected '" + arg + "' got '" + input.charAt(i) + "'"
+ : "unexpected token"));
+ } else {
+ return result;
+ }
+ }
+
+ function error(msg, type) {
+ throw { index: i, type: type || 'Syntax', message: msg };
+ }
+
+ // Same as $(), but don't change the state of the parser,
+ // just return the match.
+ function peek(tok) {
+ if (typeof(tok) === 'string') {
+ return input.charAt(i) === tok;
+ } else {
+ if (tok.test(chunks[j])) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+ }
+
+ function basename(pathname) {
+ if (less.mode === 'node') {
+ return require('path').basename(pathname);
+ } else {
+ return pathname.match(/[^\/]+$/)[0];
+ }
+ }
+
+ function getInput(e, env) {
+ if (e.filename && env.filename && (e.filename !== env.filename)) {
+ return parser.imports.contents[basename(e.filename)];
+ } else {
+ return input;
+ }
+ }
+
+ function getLocation(index, input) {
+ for (var n = index, column = -1;
+ n >= 0 && input.charAt(n) !== '\n';
+ n--) { column++ }
+
+ return { line: typeof(index) === 'number' ? (input.slice(0, index).match(/\n/g) || "").length : null,
+ column: column };
+ }
+
+ function LessError(e, env) {
+ var input = getInput(e, env),
+ loc = getLocation(e.index, input),
+ line = loc.line,
+ col = loc.column,
+ lines = input.split('\n');
+
+ this.type = e.type || 'Syntax';
+ this.message = e.message;
+ this.filename = e.filename || env.filename;
+ this.index = e.index;
+ this.line = typeof(line) === 'number' ? line + 1 : null;
+ this.callLine = e.call && (getLocation(e.call, input).line + 1);
+ this.callExtract = lines[getLocation(e.call, input).line];
+ this.stack = e.stack;
+ this.column = col;
+ this.extract = [
+ lines[line - 1],
+ lines[line],
+ lines[line + 1]
+ ];
+ }
+
+ this.env = env = env || {};
+
+ // The optimization level dictates the thoroughness of the parser,
+ // the lower the number, the less nodes it will create in the tree.
+ // This could matter for debugging, or if you want to access
+ // the individual nodes in the tree.
+ this.optimization = ('optimization' in this.env) ? this.env.optimization : 1;
+
+ this.env.filename = this.env.filename || null;
+
+ //
+ // The Parser
+ //
+ return parser = {
+
+ imports: imports,
+ //
+ // Parse an input string into an abstract syntax tree,
+ // call `callback` when done.
+ //
+ parse: function (str, callback) {
+ var root, start, end, zone, line, lines, buff = [], c, error = null;
+
+ i = j = current = furthest = 0;
+ input = str.replace(/\r\n/g, '\n');
+
+ // Split the input into chunks.
+ chunks = (function (chunks) {
+ var j = 0,
+ skip = /[^"'`\{\}\/\(\)\\]+/g,
+ comment = /\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g,
+ string = /"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'|`((?:[^`\\\r\n]|\\.)*)`/g,
+ level = 0,
+ match,
+ chunk = chunks[0],
+ inParam;
+
+ for (var i = 0, c, cc; i < input.length; i++) {
+ skip.lastIndex = i;
+ if (match = skip.exec(input)) {
+ if (match.index === i) {
+ i += match[0].length;
+ chunk.push(match[0]);
+ }
+ }
+ c = input.charAt(i);
+ comment.lastIndex = string.lastIndex = i;
+
+ if (match = string.exec(input)) {
+ if (match.index === i) {
+ i += match[0].length;
+ chunk.push(match[0]);
+ c = input.charAt(i);
+ }
+ }
+
+ if (!inParam && c === '/') {
+ cc = input.charAt(i + 1);
+ if (cc === '/' || cc === '*') {
+ if (match = comment.exec(input)) {
+ if (match.index === i) {
+ i += match[0].length;
+ chunk.push(match[0]);
+ c = input.charAt(i);
+ }
+ }
+ }
+ }
+
+ switch (c) {
+ case '{': if (! inParam) { level ++; chunk.push(c); break }
+ case '}': if (! inParam) { level --; chunk.push(c); chunks[++j] = chunk = []; break }
+ case '(': if (! inParam) { inParam = true; chunk.push(c); break }
+ case ')': if ( inParam) { inParam = false; chunk.push(c); break }
+ default: chunk.push(c);
+ }
+ }
+ if (level > 0) {
+ error = new(LessError)({
+ index: i,
+ type: 'Parse',
+ message: "missing closing `}`",
+ filename: env.filename
+ }, env);
+ }
+
+ return chunks.map(function (c) { return c.join('') });;
+ })([[]]);
+
+ if (error) {
+ return callback(error);
+ }
+
+ // Start with the primary rule.
+ // The whole syntax tree is held under a Ruleset node,
+ // with the `root` property set to true, so no `{}` are
+ // output. The callback is called when the input is parsed.
+ try {
+ root = new(tree.Ruleset)([], $(this.parsers.primary));
+ root.root = true;
+ } catch (e) {
+ return callback(new(LessError)(e, env));
+ }
+
+ root.toCSS = (function (evaluate) {
+ var line, lines, column;
+
+ return function (options, variables) {
+ var frames = [], importError;
+
+ options = options || {};
+ //
+ // Allows setting variables with a hash, so:
+ //
+ // `{ color: new(tree.Color)('#f01') }` will become:
+ //
+ // new(tree.Rule)('@color',
+ // new(tree.Value)([
+ // new(tree.Expression)([
+ // new(tree.Color)('#f01')
+ // ])
+ // ])
+ // )
+ //
+ if (typeof(variables) === 'object' && !Array.isArray(variables)) {
+ variables = Object.keys(variables).map(function (k) {
+ var value = variables[k];
+
+ if (! (value instanceof tree.Value)) {
+ if (! (value instanceof tree.Expression)) {
+ value = new(tree.Expression)([value]);
+ }
+ value = new(tree.Value)([value]);
+ }
+ return new(tree.Rule)('@' + k, value, false, 0);
+ });
+ frames = [new(tree.Ruleset)(null, variables)];
+ }
+
+ try {
+ var css = evaluate.call(this, { frames: frames })
+ .toCSS([], { compress: options.compress || false });
+ } catch (e) {
+ throw new(LessError)(e, env);
+ }
+
+ if ((importError = parser.imports.error)) { // Check if there was an error during importing
+ if (importError instanceof LessError) throw importError;
+ else throw new(LessError)(importError, env);
+ }
+
+ if (options.yuicompress && less.mode === 'node') {
+ return require('./cssmin').compressor.cssmin(css);
+ } else if (options.compress) {
+ return css.replace(/(\s)+/g, "$1");
+ } else {
+ return css;
+ }
+ };
+ })(root.eval);
+
+ // If `i` is smaller than the `input.length - 1`,
+ // it means the parser wasn't able to parse the whole
+ // string, so we've got a parsing error.
+ //
+ // We try to extract a \n delimited string,
+ // showing the line where the parse error occured.
+ // We split it up into two parts (the part which parsed,
+ // and the part which didn't), so we can color them differently.
+ if (i < input.length - 1) {
+ i = furthest;
+ lines = input.split('\n');
+ line = (input.slice(0, i).match(/\n/g) || "").length + 1;
+
+ for (var n = i, column = -1; n >= 0 && input.charAt(n) !== '\n'; n--) { column++ }
+
+ error = {
+ type: "Parse",
+ message: "Syntax Error on line " + line,
+ index: i,
+ filename: env.filename,
+ line: line,
+ column: column,
+ extract: [
+ lines[line - 2],
+ lines[line - 1],
+ lines[line]
+ ]
+ };
+ }
+
+ if (this.imports.queue.length > 0) {
+ finish = function () { callback(error, root) };
+ } else {
+ callback(error, root);
+ }
+ },
+
+ //
+ // Here in, the parsing rules/functions
+ //
+ // The basic structure of the syntax tree generated is as follows:
+ //
+ // Ruleset -> Rule -> Value -> Expression -> Entity
+ //
+ // Here's some LESS code:
+ //
+ // .class {
+ // color: #fff;
+ // border: 1px solid #000;
+ // width: @w + 4px;
+ // > .child {...}
+ // }
+ //
+ // And here's what the parse tree might look like:
+ //
+ // Ruleset (Selector '.class', [
+ // Rule ("color", Value ([Expression [Color #fff]]))
+ // Rule ("border", Value ([Expression [Dimension 1px][Keyword "solid"][Color #000]]))
+ // Rule ("width", Value ([Expression [Operation "+" [Variable "@w"][Dimension 4px]]]))
+ // Ruleset (Selector [Element '>', '.child'], [...])
+ // ])
+ //
+ // In general, most rules will try to parse a token with the `$()` function, and if the return
+ // value is truly, will return a new node, of the relevant type. Sometimes, we need to check
+ // first, before parsing, that's when we use `peek()`.
+ //
+ parsers: {
+ //
+ // The `primary` rule is the *entry* and *exit* point of the parser.
+ // The rules here can appear at any level of the parse tree.
+ //
+ // The recursive nature of the grammar is an interplay between the `block`
+ // rule, which represents `{ ... }`, the `ruleset` rule, and this `primary` rule,
+ // as represented by this simplified grammar:
+ //
+ // primary → (ruleset | rule)+
+ // ruleset → selector+ block
+ // block → '{' primary '}'
+ //
+ // Only at one point is the primary rule not called from the
+ // block rule: at the root level.
+ //
+ primary: function () {
+ var node, root = [];
+
+ while ((node = $(this.mixin.definition) || $(this.rule) || $(this.ruleset) ||
+ $(this.mixin.call) || $(this.comment) || $(this.directive))
+ || $(/^[\s\n]+/)) {
+ node && root.push(node);
+ }
+ return root;
+ },
+
+ // We create a Comment node for CSS comments `/* */`,
+ // but keep the LeSS comments `//` silent, by just skipping
+ // over them.
+ comment: function () {
+ var comment;
+
+ if (input.charAt(i) !== '/') return;
+
+ if (input.charAt(i + 1) === '/') {
+ return new(tree.Comment)($(/^\/\/.*/), true);
+ } else if (comment = $(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/)) {
+ return new(tree.Comment)(comment);
+ }
+ },
+
+ //
+ // Entities are tokens which can be found inside an Expression
+ //
+ entities: {
+ //
+ // A string, which supports escaping " and '
+ //
+ // "milky way" 'he\'s the one!'
+ //
+ quoted: function () {
+ var str, j = i, e;
+
+ if (input.charAt(j) === '~') { j++, e = true } // Escaped strings
+ if (input.charAt(j) !== '"' && input.charAt(j) !== "'") return;
+
+ e && $('~');
+
+ if (str = $(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/)) {
+ return new(tree.Quoted)(str[0], str[1] || str[2], e);
+ }
+ },
+
+ //
+ // A catch-all word, such as:
+ //
+ // black border-collapse
+ //
+ keyword: function () {
+ var k;
+
+ if (k = $(/^[_A-Za-z-][_A-Za-z0-9-]*/)) {
+ if (tree.colors.hasOwnProperty(k)) {
+ // detect named color
+ return new(tree.Color)(tree.colors[k].slice(1));
+ } else {
+ return new(tree.Keyword)(k);
+ }
+ }
+ },
+
+ //
+ // A function call
+ //
+ // rgb(255, 0, 255)
+ //
+ // We also try to catch IE's `alpha()`, but let the `alpha` parser
+ // deal with the details.
+ //
+ // The arguments are parsed with the `entities.arguments` parser.
+ //
+ call: function () {
+ var name, args, index = i;
+
+ if (! (name = /^([\w-]+|%|progid:[\w\.]+)\(/.exec(chunks[j]))) return;
+
+ name = name[1].toLowerCase();
+
+ if (name === 'url') { return null }
+ else { i += name.length }
+
+ if (name === 'alpha') { return $(this.alpha) }
+
+ $('('); // Parse the '(' and consume whitespace.
+
+ args = $(this.entities.arguments);
+
+ if (! $(')')) return;
+
+ if (name) { return new(tree.Call)(name, args, index, env.filename) }
+ },
+ arguments: function () {
+ var args = [], arg;
+
+ while (arg = $(this.entities.assignment) || $(this.expression)) {
+ args.push(arg);
+ if (! $(',')) { break }
+ }
+ return args;
+ },
+ literal: function () {
+ return $(this.entities.dimension) ||
+ $(this.entities.color) ||
+ $(this.entities.quoted);
+ },
+
+ // Assignments are argument entities for calls.
+ // They are present in ie filter properties as shown below.
+ //
+ // filter: progid:DXImageTransform.Microsoft.Alpha( *opacity=50* )
+ //
+
+ assignment: function () {
+ var key, value;
+ if ((key = $(/^\w+(?=\s?=)/i)) && $('=') && (value = $(this.entity))) {
+ return new(tree.Assignment)(key, value);
+ }
+ },
+
+ //
+ // Parse url() tokens
+ //
+ // We use a specific rule for urls, because they don't really behave like
+ // standard function calls. The difference is that the argument doesn't have
+ // to be enclosed within a string, so it can't be parsed as an Expression.
+ //
+ url: function () {
+ var value;
+
+ if (input.charAt(i) !== 'u' || !$(/^url\(/)) return;
+ value = $(this.entities.quoted) || $(this.entities.variable) ||
+ $(this.entities.dataURI) || $(/^[-\w%@$\/.&=:;#+?~]+/) || "";
+
+ expect(')');
+
+ return new(tree.URL)((value.value || value.data || value instanceof tree.Variable)
+ ? value : new(tree.Anonymous)(value), imports.paths);
+ },
+
+ dataURI: function () {
+ var obj;
+
+ if ($(/^data:/)) {
+ obj = {};
+ obj.mime = $(/^[^\/]+\/[^,;)]+/) || '';
+ obj.charset = $(/^;\s*charset=[^,;)]+/) || '';
+ obj.base64 = $(/^;\s*base64/) || '';
+ obj.data = $(/^,\s*[^)]+/);
+
+ if (obj.data) { return obj }
+ }
+ },
+
+ //
+ // A Variable entity, such as `@fink`, in
+ //
+ // width: @fink + 2px
+ //
+ // We use a different parser for variable definitions,
+ // see `parsers.variable`.
+ //
+ variable: function () {
+ var name, index = i;
+
+ if (input.charAt(i) === '@' && (name = $(/^@@?[\w-]+/))) {
+ return new(tree.Variable)(name, index, env.filename);
+ }
+ },
+
+ //
+ // A Hexadecimal color
+ //
+ // #4F3C2F
+ //
+ // `rgb` and `hsl` colors are parsed through the `entities.call` parser.
+ //
+ color: function () {
+ var rgb;
+
+ if (input.charAt(i) === '#' && (rgb = $(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/))) {
+ return new(tree.Color)(rgb[1]);
+ }
+ },
+
+ //
+ // A Dimension, that is, a number and a unit
+ //
+ // 0.5em 95%
+ //
+ dimension: function () {
+ var value, c = input.charCodeAt(i);
+ if ((c > 57 || c < 45) || c === 47) return;
+
+ if (value = $(/^(-?\d*\.?\d+)(px|%|em|rem|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn|dpi)?/)) {
+ return new(tree.Dimension)(value[1], value[2]);
+ }
+ },
+
+ //
+ // JavaScript code to be evaluated
+ //
+ // `window.location.href`
+ //
+ javascript: function () {
+ var str, j = i, e;
+
+ if (input.charAt(j) === '~') { j++, e = true } // Escaped strings
+ if (input.charAt(j) !== '`') { return }
+
+ e && $('~');
+
+ if (str = $(/^`([^`]*)`/)) {
+ return new(tree.JavaScript)(str[1], i, e);
+ }
+ }
+ },
+
+ //
+ // The variable part of a variable definition. Used in the `rule` parser
+ //
+ // @fink:
+ //
+ variable: function () {
+ var name;
+
+ if (input.charAt(i) === '@' && (name = $(/^(@[\w-]+)\s*:/))) { return name[1] }
+ },
+
+ //
+ // A font size/line-height shorthand
+ //
+ // small/12px
+ //
+ // We need to peek first, or we'll match on keywords and dimensions
+ //
+ shorthand: function () {
+ var a, b;
+
+ if (! peek(/^[@\w.%-]+\/[@\w.-]+/)) return;
+
+ if ((a = $(this.entity)) && $('/') && (b = $(this.entity))) {
+ return new(tree.Shorthand)(a, b);
+ }
+ },
+
+ //
+ // Mixins
+ //
+ mixin: {
+ //
+ // A Mixin call, with an optional argument list
+ //
+ // #mixins > .square(#fff);
+ // .rounded(4px, black);
+ // .button;
+ //
+ // The `while` loop is there because mixins can be
+ // namespaced, but we only support the child and descendant
+ // selector for now.
+ //
+ call: function () {
+ var elements = [], e, c, args = [], arg, index = i, s = input.charAt(i), name, value, important = false;
+
+ if (s !== '.' && s !== '#') { return }
+
+ while (e = $(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)) {
+ elements.push(new(tree.Element)(c, e, i));
+ c = $('>');
+ }
+ if ($('(')) {
+ while (arg = $(this.expression)) {
+ value = arg;
+ name = null;
+
+ // Variable
+ if (arg.value.length == 1) {
+ var val = arg.value[0];
+ if (val instanceof tree.Variable) {
+ if ($(':')) {
+ if (value = $(this.expression)) {
+ name = val.name;
+ } else {
+ throw new(Error)("Expected value");
+ }
+ }
+ }
+ }
+
+ args.push({ name: name, value: value });
+
+ if (! $(',')) { break }
+ }
+ if (! $(')')) throw new(Error)("Expected )");
+ }
+
+ if ($(this.important)) {
+ important = true;
+ }
+
+ if (elements.length > 0 && ($(';') || peek('}'))) {
+ return new(tree.mixin.Call)(elements, args, index, env.filename, important);
+ }
+ },
+
+ //
+ // A Mixin definition, with a list of parameters
+ //
+ // .rounded (@radius: 2px, @color) {
+ // ...
+ // }
+ //
+ // Until we have a finer grained state-machine, we have to
+ // do a look-ahead, to make sure we don't have a mixin call.
+ // See the `rule` function for more information.
+ //
+ // We start by matching `.rounded (`, and then proceed on to
+ // the argument list, which has optional default values.
+ // We store the parameters in `params`, with a `value` key,
+ // if there is a value, such as in the case of `@radius`.
+ //
+ // Once we've got our params list, and a closing `)`, we parse
+ // the `{...}` block.
+ //
+ definition: function () {
+ var name, params = [], match, ruleset, param, value, cond, variadic = false;
+ if ((input.charAt(i) !== '.' && input.charAt(i) !== '#') ||
+ peek(/^[^{]*(;|})/)) return;
+
+ save();
+
+ if (match = $(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)) {
+ name = match[1];
+
+ do {
+ if (input.charAt(i) === '.' && $(/^\.{3}/)) {
+ variadic = true;
+ break;
+ } else if (param = $(this.entities.variable) || $(this.entities.literal)
+ || $(this.entities.keyword)) {
+ // Variable
+ if (param instanceof tree.Variable) {
+ if ($(':')) {
+ value = expect(this.expression, 'expected expression');
+ params.push({ name: param.name, value: value });
+ } else if ($(/^\.{3}/)) {
+ params.push({ name: param.name, variadic: true });
+ variadic = true;
+ break;
+ } else {
+ params.push({ name: param.name });
+ }
+ } else {
+ params.push({ value: param });
+ }
+ } else {
+ break;
+ }
+ } while ($(','))
+
+ expect(')');
+
+ if ($(/^when/)) { // Guard
+ cond = expect(this.conditions, 'expected condition');
+ }
+
+ ruleset = $(this.block);
+
+ if (ruleset) {
+ return new(tree.mixin.Definition)(name, params, ruleset, cond, variadic);
+ } else {
+ restore();
+ }
+ }
+ }
+ },
+
+ //
+ // Entities are the smallest recognized token,
+ // and can be found inside a rule's value.
+ //
+ entity: function () {
+ return $(this.entities.literal) || $(this.entities.variable) || $(this.entities.url) ||
+ $(this.entities.call) || $(this.entities.keyword) || $(this.entities.javascript) ||
+ $(this.comment);
+ },
+
+ //
+ // A Rule terminator. Note that we use `peek()` to check for '}',
+ // because the `block` rule will be expecting it, but we still need to make sure
+ // it's there, if ';' was ommitted.
+ //
+ end: function () {
+ return $(';') || peek('}');
+ },
+
+ //
+ // IE's alpha function
+ //
+ // alpha(opacity=88)
+ //
+ alpha: function () {
+ var value;
+
+ if (! $(/^\(opacity=/i)) return;
+ if (value = $(/^\d+/) || $(this.entities.variable)) {
+ expect(')');
+ return new(tree.Alpha)(value);
+ }
+ },
+
+ //
+ // A Selector Element
+ //
+ // div
+ // + h1
+ // #socks
+ // input[type="text"]
+ //
+ // Elements are the building blocks for Selectors,
+ // they are made out of a `Combinator` (see combinator rule),
+ // and an element name, such as a tag a class, or `*`.
+ //
+ element: function () {
+ var e, t, c, v;
+
+ c = $(this.combinator);
+ e = $(/^(?:\d+\.\d+|\d+)%/) || $(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/) ||
+ $('*') || $(this.attribute) || $(/^\([^)@]+\)/);
+
+ if (! e) {
+ $('(') && (v = $(this.entities.variable)) && $(')') && (e = new(tree.Paren)(v));
+ }
+
+ if (e) { return new(tree.Element)(c, e, i) }
+
+ if (c.value && c.value.charAt(0) === '&') {
+ return new(tree.Element)(c, null, i);
+ }
+ },
+
+ //
+ // Combinators combine elements together, in a Selector.
+ //
+ // Because our parser isn't white-space sensitive, special care
+ // has to be taken, when parsing the descendant combinator, ` `,
+ // as it's an empty space. We have to check the previous character
+ // in the input, to see if it's a ` ` character. More info on how
+ // we deal with this in *combinator.js*.
+ //
+ combinator: function () {
+ var match, c = input.charAt(i);
+
+ if (c === '>' || c === '+' || c === '~') {
+ i++;
+ while (input.charAt(i) === ' ') { i++ }
+ return new(tree.Combinator)(c);
+ } else if (c === '&') {
+ match = '&';
+ i++;
+ if(input.charAt(i) === ' ') {
+ match = '& ';
+ }
+ while (input.charAt(i) === ' ') { i++ }
+ return new(tree.Combinator)(match);
+ } else if (input.charAt(i - 1) === ' ') {
+ return new(tree.Combinator)(" ");
+ } else {
+ return new(tree.Combinator)(null);
+ }
+ },
+
+ //
+ // A CSS Selector
+ //
+ // .class > div + h1
+ // li a:hover
+ //
+ // Selectors are made out of one or more Elements, see above.
+ //
+ selector: function () {
+ var sel, e, elements = [], c, match;
+
+ if ($('(')) {
+ sel = $(this.entity);
+ expect(')');
+ return new(tree.Selector)([new(tree.Element)('', sel, i)]);
+ }
+
+ while (e = $(this.element)) {
+ c = input.charAt(i);
+ elements.push(e)
+ if (c === '{' || c === '}' || c === ';' || c === ',') { break }
+ }
+
+ if (elements.length > 0) { return new(tree.Selector)(elements) }
+ },
+ tag: function () {
+ return $(/^[a-zA-Z][a-zA-Z-]*[0-9]?/) || $('*');
+ },
+ attribute: function () {
+ var attr = '', key, val, op;
+
+ if (! $('[')) return;
+
+ if (key = $(/^[a-zA-Z-]+/) || $(this.entities.quoted)) {
+ if ((op = $(/^[|~*$^]?=/)) &&
+ (val = $(this.entities.quoted) || $(/^[\w-]+/))) {
+ attr = [key, op, val.toCSS ? val.toCSS() : val].join('');
+ } else { attr = key }
+ }
+
+ if (! $(']')) return;
+
+ if (attr) { return "[" + attr + "]" }
+ },
+
+ //
+ // The `block` rule is used by `ruleset` and `mixin.definition`.
+ // It's a wrapper around the `primary` rule, with added `{}`.
+ //
+ block: function () {
+ var content;
+
+ if ($('{') && (content = $(this.primary)) && $('}')) {
+ return content;
+ }
+ },
+
+ //
+ // div, .class, body > p {...}
+ //
+ ruleset: function () {
+ var selectors = [], s, rules, match;
+ save();
+
+ while (s = $(this.selector)) {
+ selectors.push(s);
+ $(this.comment);
+ if (! $(',')) { break }
+ $(this.comment);
+ }
+
+ if (selectors.length > 0 && (rules = $(this.block))) {
+ return new(tree.Ruleset)(selectors, rules, env.strictImports);
+ } else {
+ // Backtrack
+ furthest = i;
+ restore();
+ }
+ },
+ rule: function () {
+ var name, value, c = input.charAt(i), important, match;
+ save();
+
+ if (c === '.' || c === '#' || c === '&') { return }
+
+ if (name = $(this.variable) || $(this.property)) {
+ if ((name.charAt(0) != '@') && (match = /^([^@+\/'"*`(;{}-]*);/.exec(chunks[j]))) {
+ i += match[0].length - 1;
+ value = new(tree.Anonymous)(match[1]);
+ } else if (name === "font") {
+ value = $(this.font);
+ } else {
+ value = $(this.value);
+ }
+ important = $(this.important);
+
+ if (value && $(this.end)) {
+ return new(tree.Rule)(name, value, important, memo);
+ } else {
+ furthest = i;
+ restore();
+ }
+ }
+ },
+
+ //
+ // An @import directive
+ //
+ // @import "lib";
+ //
+ // Depending on our environemnt, importing is done differently:
+ // In the browser, it's an XHR request, in Node, it would be a
+ // file-system operation. The function used for importing is
+ // stored in `import`, which we pass to the Import constructor.
+ //
+ "import": function () {
+ var path, features, index = i;
+ var dir = $(/^@import(?:-(once))?\s+/);
+
+ if (dir && (path = $(this.entities.quoted) || $(this.entities.url))) {
+ features = $(this.mediaFeatures);
+ if ($(';')) {
+ return new(tree.Import)(path, imports, features, (dir[1] === 'once'), index);
+ }
+ }
+ },
+
+ mediaFeature: function () {
+ var e, p, nodes = [];
+
+ do {
+ if (e = $(this.entities.keyword)) {
+ nodes.push(e);
+ } else if ($('(')) {
+ p = $(this.property);
+ e = $(this.entity);
+ if ($(')')) {
+ if (p && e) {
+ nodes.push(new(tree.Paren)(new(tree.Rule)(p, e, null, i, true)));
+ } else if (e) {
+ nodes.push(new(tree.Paren)(e));
+ } else {
+ return null;
+ }
+ } else { return null }
+ }
+ } while (e);
+
+ if (nodes.length > 0) {
+ return new(tree.Expression)(nodes);
+ }
+ },
+
+ mediaFeatures: function () {
+ var e, features = [];
+
+ do {
+ if (e = $(this.mediaFeature)) {
+ features.push(e);
+ if (! $(',')) { break }
+ } else if (e = $(this.entities.variable)) {
+ features.push(e);
+ if (! $(',')) { break }
+ }
+ } while (e);
+
+ return features.length > 0 ? features : null;
+ },
+
+ media: function () {
+ var features, rules;
+
+ if ($(/^@media/)) {
+ features = $(this.mediaFeatures);
+
+ if (rules = $(this.block)) {
+ return new(tree.Media)(rules, features);
+ }
+ }
+ },
+
+ //
+ // A CSS Directive
+ //
+ // @charset "utf-8";
+ //
+ directive: function () {
+ var name, value, rules, types, e, nodes;
+
+ if (input.charAt(i) !== '@') return;
+
+ if (value = $(this['import']) || $(this.media)) {
+ return value;
+ } else if (name = $(/^@page|@keyframes/) || $(/^@(?:-webkit-|-moz-|-o-|-ms-)[a-z0-9-]+/)) {
+ types = ($(/^[^{]+/) || '').trim();
+ if (rules = $(this.block)) {
+ return new(tree.Directive)(name + " " + types, rules);
+ }
+ } else if (name = $(/^@[-a-z]+/)) {
+ if (name === '@font-face') {
+ if (rules = $(this.block)) {
+ return new(tree.Directive)(name, rules);
+ }
+ } else if ((value = $(this.entity)) && $(';')) {
+ return new(tree.Directive)(name, value);
+ }
+ }
+ },
+ font: function () {
+ var value = [], expression = [], weight, shorthand, font, e;
+
+ while (e = $(this.shorthand) || $(this.entity)) {
+ expression.push(e);
+ }
+ value.push(new(tree.Expression)(expression));
+
+ if ($(',')) {
+ while (e = $(this.expression)) {
+ value.push(e);
+ if (! $(',')) { break }
+ }
+ }
+ return new(tree.Value)(value);
+ },
+
+ //
+ // A Value is a comma-delimited list of Expressions
+ //
+ // font-family: Baskerville, Georgia, serif;
+ //
+ // In a Rule, a Value represents everything after the `:`,
+ // and before the `;`.
+ //
+ value: function () {
+ var e, expressions = [], important;
+
+ while (e = $(this.expression)) {
+ expressions.push(e);
+ if (! $(',')) { break }
+ }
+
+ if (expressions.length > 0) {
+ return new(tree.Value)(expressions);
+ }
+ },
+ important: function () {
+ if (input.charAt(i) === '!') {
+ return $(/^! *important/);
+ }
+ },
+ sub: function () {
+ var e;
+
+ if ($('(') && (e = $(this.expression)) && $(')')) {
+ return e;
+ }
+ },
+ multiplication: function () {
+ var m, a, op, operation;
+ if (m = $(this.operand)) {
+ while (!peek(/^\/\*/) && (op = ($('/') || $('*'))) && (a = $(this.operand))) {
+ operation = new(tree.Operation)(op, [operation || m, a]);
+ }
+ return operation || m;
+ }
+ },
+ addition: function () {
+ var m, a, op, operation;
+ if (m = $(this.multiplication)) {
+ while ((op = $(/^[-+]\s+/) || (input.charAt(i - 1) != ' ' && ($('+') || $('-')))) &&
+ (a = $(this.multiplication))) {
+ operation = new(tree.Operation)(op, [operation || m, a]);
+ }
+ return operation || m;
+ }
+ },
+ conditions: function () {
+ var a, b, index = i, condition;
+
+ if (a = $(this.condition)) {
+ while ($(',') && (b = $(this.condition))) {
+ condition = new(tree.Condition)('or', condition || a, b, index);
+ }
+ return condition || a;
+ }
+ },
+ condition: function () {
+ var a, b, c, op, index = i, negate = false;
+
+ if ($(/^not/)) { negate = true }
+ expect('(');
+ if (a = $(this.addition) || $(this.entities.keyword) || $(this.entities.quoted)) {
+ if (op = $(/^(?:>=|=<|[<=>])/)) {
+ if (b = $(this.addition) || $(this.entities.keyword) || $(this.entities.quoted)) {
+ c = new(tree.Condition)(op, a, b, index, negate);
+ } else {
+ error('expected expression');
+ }
+ } else {
+ c = new(tree.Condition)('=', a, new(tree.Keyword)('true'), index, negate);
+ }
+ expect(')');
+ return $(/^and/) ? new(tree.Condition)('and', c, $(this.condition)) : c;
+ }
+ },
+
+ //
+ // An operand is anything that can be part of an operation,
+ // such as a Color, or a Variable
+ //
+ operand: function () {
+ var negate, p = input.charAt(i + 1);
+
+ if (input.charAt(i) === '-' && (p === '@' || p === '(')) { negate = $('-') }
+ var o = $(this.sub) || $(this.entities.dimension) ||
+ $(this.entities.color) || $(this.entities.variable) ||
+ $(this.entities.call);
+ return negate ? new(tree.Operation)('*', [new(tree.Dimension)(-1), o])
+ : o;
+ },
+
+ //
+ // Expressions either represent mathematical operations,
+ // or white-space delimited Entities.
+ //
+ // 1px solid black
+ // @var * 2
+ //
+ expression: function () {
+ var e, delim, entities = [], d;
+
+ while (e = $(this.addition) || $(this.entity)) {
+ entities.push(e);
+ }
+ if (entities.length > 0) {
+ return new(tree.Expression)(entities);
+ }
+ },
+ property: function () {
+ var name;
+
+ if (name = $(/^(\*?-?[-a-z_0-9]+)\s*:/)) {
+ return name[1];
+ }
+ }
+ }
+ };
+};
+
+if (less.mode === 'browser' || less.mode === 'rhino') {
+ //
+ // Used by `@import` directives
+ //
+ less.Parser.importer = function (path, paths, callback, env) {
+ if (!/^([a-z]+:)?\//.test(path) && paths.length > 0) {
+ path = paths[0] + path;
+ }
+ // We pass `true` as 3rd argument, to force the reload of the import.
+ // This is so we can get the syntax tree as opposed to just the CSS output,
+ // as we need this to evaluate the current stylesheet.
+ loadStyleSheet({ href: path, title: path, type: env.mime }, function (e) {
+ if (e && typeof(env.errback) === "function") {
+ env.errback.call(null, path, paths, callback, env);
+ } else {
+ callback.apply(null, arguments);
+ }
+ }, true);
+ };
+}
+
diff --git a/tabula/bin/lib/less/rhino.js b/tabula/bin/lib/less/rhino.js
new file mode 100644
index 00000000..a2c5662f
--- /dev/null
+++ b/tabula/bin/lib/less/rhino.js
@@ -0,0 +1,62 @@
+var name;
+
+function loadStyleSheet(sheet, callback, reload, remaining) {
+ var sheetName = name.slice(0, name.lastIndexOf('/') + 1) + sheet.href;
+ var input = readFile(sheetName);
+ var parser = new less.Parser({
+ paths: [sheet.href.replace(/[\w\.-]+$/, '')]
+ });
+ parser.parse(input, function (e, root) {
+ if (e) {
+ print("Error: " + e);
+ quit(1);
+ }
+ callback(root, sheet, { local: false, lastModified: 0, remaining: remaining });
+ });
+
+ // callback({}, sheet, { local: true, remaining: remaining });
+}
+
+function writeFile(filename, content) {
+ var fstream = new java.io.FileWriter(filename);
+ var out = new java.io.BufferedWriter(fstream);
+ out.write(content);
+ out.close();
+}
+
+// Command line integration via Rhino
+(function (args) {
+ name = args[0];
+ var output = args[1];
+
+ if (!name) {
+ print('No files present in the fileset; Check your pattern match in build.xml');
+ quit(1);
+ }
+ path = name.split("/");path.pop();path=path.join("/")
+
+ var input = readFile(name);
+
+ if (!input) {
+ print('lesscss: couldn\'t open file ' + name);
+ quit(1);
+ }
+
+ var result;
+ var parser = new less.Parser();
+ parser.parse(input, function (e, root) {
+ if (e) {
+ quit(1);
+ } else {
+ result = root.toCSS();
+ if (output) {
+ writeFile(output, result);
+ print("Written to " + output);
+ } else {
+ print(result);
+ }
+ quit(0);
+ }
+ });
+ print("done");
+}(arguments));
diff --git a/tabula/bin/lib/less/tree.js b/tabula/bin/lib/less/tree.js
new file mode 100644
index 00000000..24ecd712
--- /dev/null
+++ b/tabula/bin/lib/less/tree.js
@@ -0,0 +1,17 @@
+(function (tree) {
+
+tree.find = function (obj, fun) {
+ for (var i = 0, r; i < obj.length; i++) {
+ if (r = fun.call(obj, obj[i])) { return r }
+ }
+ return null;
+};
+tree.jsify = function (obj) {
+ if (Array.isArray(obj.value) && (obj.value.length > 1)) {
+ return '[' + obj.value.map(function (v) { return v.toCSS(false) }).join(', ') + ']';
+ } else {
+ return obj.toCSS(false);
+ }
+};
+
+})(require('./tree'));
diff --git a/tabula/bin/lib/less/tree/alpha.js b/tabula/bin/lib/less/tree/alpha.js
new file mode 100644
index 00000000..139ae920
--- /dev/null
+++ b/tabula/bin/lib/less/tree/alpha.js
@@ -0,0 +1,17 @@
+(function (tree) {
+
+tree.Alpha = function (val) {
+ this.value = val;
+};
+tree.Alpha.prototype = {
+ toCSS: function () {
+ return "alpha(opacity=" +
+ (this.value.toCSS ? this.value.toCSS() : this.value) + ")";
+ },
+ eval: function (env) {
+ if (this.value.eval) { this.value = this.value.eval(env) }
+ return this;
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/anonymous.js b/tabula/bin/lib/less/tree/anonymous.js
new file mode 100644
index 00000000..460c9ec7
--- /dev/null
+++ b/tabula/bin/lib/less/tree/anonymous.js
@@ -0,0 +1,13 @@
+(function (tree) {
+
+tree.Anonymous = function (string) {
+ this.value = string.value || string;
+};
+tree.Anonymous.prototype = {
+ toCSS: function () {
+ return this.value;
+ },
+ eval: function () { return this }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/assignment.js b/tabula/bin/lib/less/tree/assignment.js
new file mode 100644
index 00000000..70ce6e2f
--- /dev/null
+++ b/tabula/bin/lib/less/tree/assignment.js
@@ -0,0 +1,17 @@
+(function (tree) {
+
+tree.Assignment = function (key, val) {
+ this.key = key;
+ this.value = val;
+};
+tree.Assignment.prototype = {
+ toCSS: function () {
+ return this.key + '=' + (this.value.toCSS ? this.value.toCSS() : this.value);
+ },
+ eval: function (env) {
+ if (this.value.eval) { this.value = this.value.eval(env) }
+ return this;
+ }
+};
+
+})(require('../tree'));
\ No newline at end of file
diff --git a/tabula/bin/lib/less/tree/call.js b/tabula/bin/lib/less/tree/call.js
new file mode 100644
index 00000000..c1465dd4
--- /dev/null
+++ b/tabula/bin/lib/less/tree/call.js
@@ -0,0 +1,48 @@
+(function (tree) {
+
+//
+// A function call node.
+//
+tree.Call = function (name, args, index, filename) {
+ this.name = name;
+ this.args = args;
+ this.index = index;
+ this.filename = filename;
+};
+tree.Call.prototype = {
+ //
+ // When evaluating a function call,
+ // we either find the function in `tree.functions` [1],
+ // in which case we call it, passing the evaluated arguments,
+ // or we simply print it out as it appeared originally [2].
+ //
+ // The *functions.js* file contains the built-in functions.
+ //
+ // The reason why we evaluate the arguments, is in the case where
+ // we try to pass a variable to a function, like: `saturate(@color)`.
+ // The function should receive the value, not the variable.
+ //
+ eval: function (env) {
+ var args = this.args.map(function (a) { return a.eval(env) });
+
+ if (this.name in tree.functions) { // 1.
+ try {
+ return tree.functions[this.name].apply(tree.functions, args);
+ } catch (e) {
+ throw { type: e.type || "Runtime",
+ message: "error evaluating function `" + this.name + "`" +
+ (e.message ? ': ' + e.message : ''),
+ index: this.index, filename: this.filename };
+ }
+ } else { // 2.
+ return new(tree.Anonymous)(this.name +
+ "(" + args.map(function (a) { return a.toCSS() }).join(', ') + ")");
+ }
+ },
+
+ toCSS: function (env) {
+ return this.eval(env).toCSS();
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/color.js b/tabula/bin/lib/less/tree/color.js
new file mode 100644
index 00000000..37ce1781
--- /dev/null
+++ b/tabula/bin/lib/less/tree/color.js
@@ -0,0 +1,101 @@
+(function (tree) {
+//
+// RGB Colors - #ff0014, #eee
+//
+tree.Color = function (rgb, a) {
+ //
+ // The end goal here, is to parse the arguments
+ // into an integer triplet, such as `128, 255, 0`
+ //
+ // This facilitates operations and conversions.
+ //
+ if (Array.isArray(rgb)) {
+ this.rgb = rgb;
+ } else if (rgb.length == 6) {
+ this.rgb = rgb.match(/.{2}/g).map(function (c) {
+ return parseInt(c, 16);
+ });
+ } else {
+ this.rgb = rgb.split('').map(function (c) {
+ return parseInt(c + c, 16);
+ });
+ }
+ this.alpha = typeof(a) === 'number' ? a : 1;
+};
+tree.Color.prototype = {
+ eval: function () { return this },
+
+ //
+ // If we have some transparency, the only way to represent it
+ // is via `rgba`. Otherwise, we use the hex representation,
+ // which has better compatibility with older browsers.
+ // Values are capped between `0` and `255`, rounded and zero-padded.
+ //
+ toCSS: function () {
+ if (this.alpha < 1.0) {
+ return "rgba(" + this.rgb.map(function (c) {
+ return Math.round(c);
+ }).concat(this.alpha).join(', ') + ")";
+ } else {
+ return '#' + this.rgb.map(function (i) {
+ i = Math.round(i);
+ i = (i > 255 ? 255 : (i < 0 ? 0 : i)).toString(16);
+ return i.length === 1 ? '0' + i : i;
+ }).join('');
+ }
+ },
+
+ //
+ // Operations have to be done per-channel, if not,
+ // channels will spill onto each other. Once we have
+ // our result, in the form of an integer triplet,
+ // we create a new Color node to hold the result.
+ //
+ operate: function (op, other) {
+ var result = [];
+
+ if (! (other instanceof tree.Color)) {
+ other = other.toColor();
+ }
+
+ for (var c = 0; c < 3; c++) {
+ result[c] = tree.operate(op, this.rgb[c], other.rgb[c]);
+ }
+ return new(tree.Color)(result, this.alpha + other.alpha);
+ },
+
+ toHSL: function () {
+ var r = this.rgb[0] / 255,
+ g = this.rgb[1] / 255,
+ b = this.rgb[2] / 255,
+ a = this.alpha;
+
+ var max = Math.max(r, g, b), min = Math.min(r, g, b);
+ var h, s, l = (max + min) / 2, d = max - min;
+
+ if (max === min) {
+ h = s = 0;
+ } else {
+ s = l > 0.5 ? d / (2 - max - min) : d / (max + min);
+
+ switch (max) {
+ case r: h = (g - b) / d + (g < b ? 6 : 0); break;
+ case g: h = (b - r) / d + 2; break;
+ case b: h = (r - g) / d + 4; break;
+ }
+ h /= 6;
+ }
+ return { h: h * 360, s: s, l: l, a: a };
+ },
+ toARGB: function () {
+ var argb = [Math.round(this.alpha * 255)].concat(this.rgb);
+ return '#' + argb.map(function (i) {
+ i = Math.round(i);
+ i = (i > 255 ? 255 : (i < 0 ? 0 : i)).toString(16);
+ return i.length === 1 ? '0' + i : i;
+ }).join('');
+ }
+};
+
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/comment.js b/tabula/bin/lib/less/tree/comment.js
new file mode 100644
index 00000000..f4a33840
--- /dev/null
+++ b/tabula/bin/lib/less/tree/comment.js
@@ -0,0 +1,14 @@
+(function (tree) {
+
+tree.Comment = function (value, silent) {
+ this.value = value;
+ this.silent = !!silent;
+};
+tree.Comment.prototype = {
+ toCSS: function (env) {
+ return env.compress ? '' : this.value;
+ },
+ eval: function () { return this }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/condition.js b/tabula/bin/lib/less/tree/condition.js
new file mode 100644
index 00000000..6b79dc96
--- /dev/null
+++ b/tabula/bin/lib/less/tree/condition.js
@@ -0,0 +1,42 @@
+(function (tree) {
+
+tree.Condition = function (op, l, r, i, negate) {
+ this.op = op.trim();
+ this.lvalue = l;
+ this.rvalue = r;
+ this.index = i;
+ this.negate = negate;
+};
+tree.Condition.prototype.eval = function (env) {
+ var a = this.lvalue.eval(env),
+ b = this.rvalue.eval(env);
+
+ var i = this.index, result;
+
+ var result = (function (op) {
+ switch (op) {
+ case 'and':
+ return a && b;
+ case 'or':
+ return a || b;
+ default:
+ if (a.compare) {
+ result = a.compare(b);
+ } else if (b.compare) {
+ result = b.compare(a);
+ } else {
+ throw { type: "Type",
+ message: "Unable to perform comparison",
+ index: i };
+ }
+ switch (result) {
+ case -1: return op === '<' || op === '=<';
+ case 0: return op === '=' || op === '>=' || op === '=<';
+ case 1: return op === '>' || op === '>=';
+ }
+ }
+ })(this.op);
+ return this.negate ? !result : result;
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/dimension.js b/tabula/bin/lib/less/tree/dimension.js
new file mode 100644
index 00000000..9a6fce3d
--- /dev/null
+++ b/tabula/bin/lib/less/tree/dimension.js
@@ -0,0 +1,49 @@
+(function (tree) {
+
+//
+// A number with a unit
+//
+tree.Dimension = function (value, unit) {
+ this.value = parseFloat(value);
+ this.unit = unit || null;
+};
+
+tree.Dimension.prototype = {
+ eval: function () { return this },
+ toColor: function () {
+ return new(tree.Color)([this.value, this.value, this.value]);
+ },
+ toCSS: function () {
+ var css = this.value + this.unit;
+ return css;
+ },
+
+ // In an operation between two Dimensions,
+ // we default to the first Dimension's unit,
+ // so `1px + 2em` will yield `3px`.
+ // In the future, we could implement some unit
+ // conversions such that `100cm + 10mm` would yield
+ // `101cm`.
+ operate: function (op, other) {
+ return new(tree.Dimension)
+ (tree.operate(op, this.value, other.value),
+ this.unit || other.unit);
+ },
+
+ // TODO: Perform unit conversion before comparing
+ compare: function (other) {
+ if (other instanceof tree.Dimension) {
+ if (other.value > this.value) {
+ return -1;
+ } else if (other.value < this.value) {
+ return 1;
+ } else {
+ return 0;
+ }
+ } else {
+ return -1;
+ }
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/directive.js b/tabula/bin/lib/less/tree/directive.js
new file mode 100644
index 00000000..27538332
--- /dev/null
+++ b/tabula/bin/lib/less/tree/directive.js
@@ -0,0 +1,35 @@
+(function (tree) {
+
+tree.Directive = function (name, value, features) {
+ this.name = name;
+
+ if (Array.isArray(value)) {
+ this.ruleset = new(tree.Ruleset)([], value);
+ this.ruleset.allowImports = true;
+ } else {
+ this.value = value;
+ }
+};
+tree.Directive.prototype = {
+ toCSS: function (ctx, env) {
+ if (this.ruleset) {
+ this.ruleset.root = true;
+ return this.name + (env.compress ? '{' : ' {\n ') +
+ this.ruleset.toCSS(ctx, env).trim().replace(/\n/g, '\n ') +
+ (env.compress ? '}': '\n}\n');
+ } else {
+ return this.name + ' ' + this.value.toCSS() + ';\n';
+ }
+ },
+ eval: function (env) {
+ env.frames.unshift(this);
+ this.ruleset = this.ruleset && this.ruleset.eval(env);
+ env.frames.shift();
+ return this;
+ },
+ variable: function (name) { return tree.Ruleset.prototype.variable.call(this.ruleset, name) },
+ find: function () { return tree.Ruleset.prototype.find.apply(this.ruleset, arguments) },
+ rulesets: function () { return tree.Ruleset.prototype.rulesets.apply(this.ruleset) }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/element.js b/tabula/bin/lib/less/tree/element.js
new file mode 100644
index 00000000..14b08d2e
--- /dev/null
+++ b/tabula/bin/lib/less/tree/element.js
@@ -0,0 +1,52 @@
+(function (tree) {
+
+tree.Element = function (combinator, value, index) {
+ this.combinator = combinator instanceof tree.Combinator ?
+ combinator : new(tree.Combinator)(combinator);
+
+ if (typeof(value) === 'string') {
+ this.value = value.trim();
+ } else if (value) {
+ this.value = value;
+ } else {
+ this.value = "";
+ }
+ this.index = index;
+};
+tree.Element.prototype.eval = function (env) {
+ return new(tree.Element)(this.combinator,
+ this.value.eval ? this.value.eval(env) : this.value,
+ this.index);
+};
+tree.Element.prototype.toCSS = function (env) {
+ var value = (this.value.toCSS ? this.value.toCSS(env) : this.value);
+ if (value == '' && this.combinator.value.charAt(0) == '&') {
+ return '';
+ } else {
+ return this.combinator.toCSS(env || {}) + value;
+ }
+};
+
+tree.Combinator = function (value) {
+ if (value === ' ') {
+ this.value = ' ';
+ } else if (value === '& ') {
+ this.value = '& ';
+ } else {
+ this.value = value ? value.trim() : "";
+ }
+};
+tree.Combinator.prototype.toCSS = function (env) {
+ return {
+ '' : '',
+ ' ' : ' ',
+ '&' : '',
+ '& ' : ' ',
+ ':' : ' :',
+ '+' : env.compress ? '+' : ' + ',
+ '~' : env.compress ? '~' : ' ~ ',
+ '>' : env.compress ? '>' : ' > '
+ }[this.value];
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/expression.js b/tabula/bin/lib/less/tree/expression.js
new file mode 100644
index 00000000..fbfa9c5b
--- /dev/null
+++ b/tabula/bin/lib/less/tree/expression.js
@@ -0,0 +1,23 @@
+(function (tree) {
+
+tree.Expression = function (value) { this.value = value };
+tree.Expression.prototype = {
+ eval: function (env) {
+ if (this.value.length > 1) {
+ return new(tree.Expression)(this.value.map(function (e) {
+ return e.eval(env);
+ }));
+ } else if (this.value.length === 1) {
+ return this.value[0].eval(env);
+ } else {
+ return this;
+ }
+ },
+ toCSS: function (env) {
+ return this.value.map(function (e) {
+ return e.toCSS ? e.toCSS(env) : '';
+ }).join(' ');
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/import.js b/tabula/bin/lib/less/tree/import.js
new file mode 100644
index 00000000..7a977def
--- /dev/null
+++ b/tabula/bin/lib/less/tree/import.js
@@ -0,0 +1,83 @@
+(function (tree) {
+//
+// CSS @import node
+//
+// The general strategy here is that we don't want to wait
+// for the parsing to be completed, before we start importing
+// the file. That's because in the context of a browser,
+// most of the time will be spent waiting for the server to respond.
+//
+// On creation, we push the import path to our import queue, though
+// `import,push`, we also pass it a callback, which it'll call once
+// the file has been fetched, and parsed.
+//
+tree.Import = function (path, imports, features, once, index) {
+ var that = this;
+
+ this.once = once;
+ this.index = index;
+ this._path = path;
+ this.features = features && new(tree.Value)(features);
+
+ // The '.less' extension is optional
+ if (path instanceof tree.Quoted) {
+ this.path = /\.(le?|c)ss(\?.*)?$/.test(path.value) ? path.value : path.value + '.less';
+ } else {
+ this.path = path.value.value || path.value;
+ }
+
+ this.css = /css(\?.*)?$/.test(this.path);
+
+ // Only pre-compile .less files
+ if (! this.css) {
+ imports.push(this.path, function (e, root, imported) {
+ if (e) { e.index = index }
+ if (imported && that.once) that.skip = imported;
+ that.root = root || new(tree.Ruleset)([], []);
+ });
+ }
+};
+
+//
+// The actual import node doesn't return anything, when converted to CSS.
+// The reason is that it's used at the evaluation stage, so that the rules
+// it imports can be treated like any other rules.
+//
+// In `eval`, we make sure all Import nodes get evaluated, recursively, so
+// we end up with a flat structure, which can easily be imported in the parent
+// ruleset.
+//
+tree.Import.prototype = {
+ toCSS: function (env) {
+ var features = this.features ? ' ' + this.features.toCSS(env) : '';
+
+ if (this.css) {
+ return "@import " + this._path.toCSS() + features + ';\n';
+ } else {
+ return "";
+ }
+ },
+ eval: function (env) {
+ var ruleset, features = this.features && this.features.eval(env);
+
+ if (this.skip) return [];
+
+ if (this.css) {
+ return this;
+ } else {
+ ruleset = new(tree.Ruleset)([], this.root.rules.slice(0));
+
+ for (var i = 0; i < ruleset.rules.length; i++) {
+ if (ruleset.rules[i] instanceof tree.Import) {
+ Array.prototype
+ .splice
+ .apply(ruleset.rules,
+ [i, 1].concat(ruleset.rules[i].eval(env)));
+ }
+ }
+ return this.features ? new(tree.Media)(ruleset.rules, this.features.value) : ruleset.rules;
+ }
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/javascript.js b/tabula/bin/lib/less/tree/javascript.js
new file mode 100644
index 00000000..772a31dd
--- /dev/null
+++ b/tabula/bin/lib/less/tree/javascript.js
@@ -0,0 +1,51 @@
+(function (tree) {
+
+tree.JavaScript = function (string, index, escaped) {
+ this.escaped = escaped;
+ this.expression = string;
+ this.index = index;
+};
+tree.JavaScript.prototype = {
+ eval: function (env) {
+ var result,
+ that = this,
+ context = {};
+
+ var expression = this.expression.replace(/@\{([\w-]+)\}/g, function (_, name) {
+ return tree.jsify(new(tree.Variable)('@' + name, that.index).eval(env));
+ });
+
+ try {
+ expression = new(Function)('return (' + expression + ')');
+ } catch (e) {
+ throw { message: "JavaScript evaluation error: `" + expression + "`" ,
+ index: this.index };
+ }
+
+ for (var k in env.frames[0].variables()) {
+ context[k.slice(1)] = {
+ value: env.frames[0].variables()[k].value,
+ toJS: function () {
+ return this.value.eval(env).toCSS();
+ }
+ };
+ }
+
+ try {
+ result = expression.call(context);
+ } catch (e) {
+ throw { message: "JavaScript evaluation error: '" + e.name + ': ' + e.message + "'" ,
+ index: this.index };
+ }
+ if (typeof(result) === 'string') {
+ return new(tree.Quoted)('"' + result + '"', result, this.escaped, this.index);
+ } else if (Array.isArray(result)) {
+ return new(tree.Anonymous)(result.join(', '));
+ } else {
+ return new(tree.Anonymous)(result);
+ }
+ }
+};
+
+})(require('../tree'));
+
diff --git a/tabula/bin/lib/less/tree/keyword.js b/tabula/bin/lib/less/tree/keyword.js
new file mode 100644
index 00000000..701b79e5
--- /dev/null
+++ b/tabula/bin/lib/less/tree/keyword.js
@@ -0,0 +1,19 @@
+(function (tree) {
+
+tree.Keyword = function (value) { this.value = value };
+tree.Keyword.prototype = {
+ eval: function () { return this },
+ toCSS: function () { return this.value },
+ compare: function (other) {
+ if (other instanceof tree.Keyword) {
+ return other.value === this.value ? 0 : 1;
+ } else {
+ return -1;
+ }
+ }
+};
+
+tree.True = new(tree.Keyword)('true');
+tree.False = new(tree.Keyword)('false');
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/media.js b/tabula/bin/lib/less/tree/media.js
new file mode 100644
index 00000000..2b7b26e5
--- /dev/null
+++ b/tabula/bin/lib/less/tree/media.js
@@ -0,0 +1,114 @@
+(function (tree) {
+
+tree.Media = function (value, features) {
+ var el = new(tree.Element)('&', null, 0),
+ selectors = [new(tree.Selector)([el])];
+
+ this.features = new(tree.Value)(features);
+ this.ruleset = new(tree.Ruleset)(selectors, value);
+ this.ruleset.allowImports = true;
+};
+tree.Media.prototype = {
+ toCSS: function (ctx, env) {
+ var features = this.features.toCSS(env);
+
+ this.ruleset.root = (ctx.length === 0 || ctx[0].multiMedia);
+ return '@media ' + features + (env.compress ? '{' : ' {\n ') +
+ this.ruleset.toCSS(ctx, env).trim().replace(/\n/g, '\n ') +
+ (env.compress ? '}': '\n}\n');
+ },
+ eval: function (env) {
+ if (!env.mediaBlocks) {
+ env.mediaBlocks = [];
+ env.mediaPath = [];
+ }
+
+ var blockIndex = env.mediaBlocks.length;
+ env.mediaPath.push(this);
+ env.mediaBlocks.push(this);
+
+ var media = new(tree.Media)([], []);
+ media.features = this.features.eval(env);
+
+ env.frames.unshift(this.ruleset);
+ media.ruleset = this.ruleset.eval(env);
+ env.frames.shift();
+
+ env.mediaBlocks[blockIndex] = media;
+ env.mediaPath.pop();
+
+ return env.mediaPath.length === 0 ? media.evalTop(env) :
+ media.evalNested(env)
+ },
+ variable: function (name) { return tree.Ruleset.prototype.variable.call(this.ruleset, name) },
+ find: function () { return tree.Ruleset.prototype.find.apply(this.ruleset, arguments) },
+ rulesets: function () { return tree.Ruleset.prototype.rulesets.apply(this.ruleset) },
+
+ evalTop: function (env) {
+ var result = this;
+
+ // Render all dependent Media blocks.
+ if (env.mediaBlocks.length > 1) {
+ var el = new(tree.Element)('&', null, 0);
+ var selectors = [new(tree.Selector)([el])];
+ result = new(tree.Ruleset)(selectors, env.mediaBlocks);
+ result.multiMedia = true;
+ }
+
+ delete env.mediaBlocks;
+ delete env.mediaPath;
+
+ return result;
+ },
+ evalNested: function (env) {
+ var i, value,
+ path = env.mediaPath.concat([this]);
+
+ // Extract the media-query conditions separated with `,` (OR).
+ for (i = 0; i < path.length; i++) {
+ value = path[i].features instanceof tree.Value ?
+ path[i].features.value : path[i].features;
+ path[i] = Array.isArray(value) ? value : [value];
+ }
+
+ // Trace all permutations to generate the resulting media-query.
+ //
+ // (a, b and c) with nested (d, e) ->
+ // a and d
+ // a and e
+ // b and c and d
+ // b and c and e
+ this.features = new(tree.Value)(this.permute(path).map(function (path) {
+ path = path.map(function (fragment) {
+ return fragment.toCSS ? fragment : new(tree.Anonymous)(fragment);
+ });
+
+ for(i = path.length - 1; i > 0; i--) {
+ path.splice(i, 0, new(tree.Anonymous)("and"));
+ }
+
+ return new(tree.Expression)(path);
+ }));
+
+ // Fake a tree-node that doesn't output anything.
+ return new(tree.Ruleset)([], []);
+ },
+ permute: function (arr) {
+ if (arr.length === 0) {
+ return [];
+ } else if (arr.length === 1) {
+ return arr[0];
+ } else {
+ var result = [];
+ var rest = this.permute(arr.slice(1));
+ for (var i = 0; i < rest.length; i++) {
+ for (var j = 0; j < arr[0].length; j++) {
+ result.push([arr[0][j]].concat(rest[i]));
+ }
+ }
+ return result;
+ }
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/mixin.js b/tabula/bin/lib/less/tree/mixin.js
new file mode 100644
index 00000000..b441bf3b
--- /dev/null
+++ b/tabula/bin/lib/less/tree/mixin.js
@@ -0,0 +1,146 @@
+(function (tree) {
+
+tree.mixin = {};
+tree.mixin.Call = function (elements, args, index, filename, important) {
+ this.selector = new(tree.Selector)(elements);
+ this.arguments = args;
+ this.index = index;
+ this.filename = filename;
+ this.important = important;
+};
+tree.mixin.Call.prototype = {
+ eval: function (env) {
+ var mixins, args, rules = [], match = false;
+
+ for (var i = 0; i < env.frames.length; i++) {
+ if ((mixins = env.frames[i].find(this.selector)).length > 0) {
+ args = this.arguments && this.arguments.map(function (a) {
+ return { name: a.name, value: a.value.eval(env) };
+ });
+ for (var m = 0; m < mixins.length; m++) {
+ if (mixins[m].match(args, env)) {
+ try {
+ Array.prototype.push.apply(
+ rules, mixins[m].eval(env, this.arguments, this.important).rules);
+ match = true;
+ } catch (e) {
+ throw { message: e.message, index: this.index, filename: this.filename, stack: e.stack };
+ }
+ }
+ }
+ if (match) {
+ return rules;
+ } else {
+ throw { type: 'Runtime',
+ message: 'No matching definition was found for `' +
+ this.selector.toCSS().trim() + '(' +
+ this.arguments.map(function (a) {
+ return a.toCSS();
+ }).join(', ') + ")`",
+ index: this.index, filename: this.filename };
+ }
+ }
+ }
+ throw { type: 'Name',
+ message: this.selector.toCSS().trim() + " is undefined",
+ index: this.index, filename: this.filename };
+ }
+};
+
+tree.mixin.Definition = function (name, params, rules, condition, variadic) {
+ this.name = name;
+ this.selectors = [new(tree.Selector)([new(tree.Element)(null, name)])];
+ this.params = params;
+ this.condition = condition;
+ this.variadic = variadic;
+ this.arity = params.length;
+ this.rules = rules;
+ this._lookups = {};
+ this.required = params.reduce(function (count, p) {
+ if (!p.name || (p.name && !p.value)) { return count + 1 }
+ else { return count }
+ }, 0);
+ this.parent = tree.Ruleset.prototype;
+ this.frames = [];
+};
+tree.mixin.Definition.prototype = {
+ toCSS: function () { return "" },
+ variable: function (name) { return this.parent.variable.call(this, name) },
+ variables: function () { return this.parent.variables.call(this) },
+ find: function () { return this.parent.find.apply(this, arguments) },
+ rulesets: function () { return this.parent.rulesets.apply(this) },
+
+ evalParams: function (env, args) {
+ var frame = new(tree.Ruleset)(null, []), varargs, arg;
+
+ for (var i = 0, val, name; i < this.params.length; i++) {
+ arg = args && args[i]
+
+ if (arg && arg.name) {
+ frame.rules.unshift(new(tree.Rule)(arg.name, arg.value.eval(env)));
+ args.splice(i, 1);
+ i--;
+ continue;
+ }
+
+ if (name = this.params[i].name) {
+ if (this.params[i].variadic && args) {
+ varargs = [];
+ for (var j = i; j < args.length; j++) {
+ varargs.push(args[j].value.eval(env));
+ }
+ frame.rules.unshift(new(tree.Rule)(name, new(tree.Expression)(varargs).eval(env)));
+ } else if (val = (arg && arg.value) || this.params[i].value) {
+ frame.rules.unshift(new(tree.Rule)(name, val.eval(env)));
+ } else {
+ throw { type: 'Runtime', message: "wrong number of arguments for " + this.name +
+ ' (' + args.length + ' for ' + this.arity + ')' };
+ }
+ }
+ }
+ return frame;
+ },
+ eval: function (env, args, important) {
+ var frame = this.evalParams(env, args), context, _arguments = [], rules, start;
+
+ for (var i = 0; i < Math.max(this.params.length, args && args.length); i++) {
+ _arguments.push((args[i] && args[i].value) || this.params[i].value);
+ }
+ frame.rules.unshift(new(tree.Rule)('@arguments', new(tree.Expression)(_arguments).eval(env)));
+
+ rules = important ?
+ this.rules.map(function (r) {
+ return new(tree.Rule)(r.name, r.value, '!important', r.index);
+ }) : this.rules.slice(0);
+
+ return new(tree.Ruleset)(null, rules).eval({
+ frames: [this, frame].concat(this.frames, env.frames)
+ });
+ },
+ match: function (args, env) {
+ var argsLength = (args && args.length) || 0, len, frame;
+
+ if (! this.variadic) {
+ if (argsLength < this.required) { return false }
+ if (argsLength > this.params.length) { return false }
+ if ((this.required > 0) && (argsLength > this.params.length)) { return false }
+ }
+
+ if (this.condition && !this.condition.eval({
+ frames: [this.evalParams(env, args)].concat(env.frames)
+ })) { return false }
+
+ len = Math.min(argsLength, this.arity);
+
+ for (var i = 0; i < len; i++) {
+ if (!this.params[i].name) {
+ if (args[i].value.eval(env).toCSS() != this.params[i].value.eval(env).toCSS()) {
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/operation.js b/tabula/bin/lib/less/tree/operation.js
new file mode 100644
index 00000000..1ce22fb0
--- /dev/null
+++ b/tabula/bin/lib/less/tree/operation.js
@@ -0,0 +1,32 @@
+(function (tree) {
+
+tree.Operation = function (op, operands) {
+ this.op = op.trim();
+ this.operands = operands;
+};
+tree.Operation.prototype.eval = function (env) {
+ var a = this.operands[0].eval(env),
+ b = this.operands[1].eval(env),
+ temp;
+
+ if (a instanceof tree.Dimension && b instanceof tree.Color) {
+ if (this.op === '*' || this.op === '+') {
+ temp = b, b = a, a = temp;
+ } else {
+ throw { name: "OperationError",
+ message: "Can't substract or divide a color from a number" };
+ }
+ }
+ return a.operate(this.op, b);
+};
+
+tree.operate = function (op, a, b) {
+ switch (op) {
+ case '+': return a + b;
+ case '-': return a - b;
+ case '*': return a * b;
+ case '/': return a / b;
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/paren.js b/tabula/bin/lib/less/tree/paren.js
new file mode 100644
index 00000000..384a43c7
--- /dev/null
+++ b/tabula/bin/lib/less/tree/paren.js
@@ -0,0 +1,16 @@
+
+(function (tree) {
+
+tree.Paren = function (node) {
+ this.value = node;
+};
+tree.Paren.prototype = {
+ toCSS: function (env) {
+ return '(' + this.value.toCSS(env) + ')';
+ },
+ eval: function (env) {
+ return new(tree.Paren)(this.value.eval(env));
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/quoted.js b/tabula/bin/lib/less/tree/quoted.js
new file mode 100644
index 00000000..794bf4ce
--- /dev/null
+++ b/tabula/bin/lib/less/tree/quoted.js
@@ -0,0 +1,29 @@
+(function (tree) {
+
+tree.Quoted = function (str, content, escaped, i) {
+ this.escaped = escaped;
+ this.value = content || '';
+ this.quote = str.charAt(0);
+ this.index = i;
+};
+tree.Quoted.prototype = {
+ toCSS: function () {
+ if (this.escaped) {
+ return this.value;
+ } else {
+ return this.quote + this.value + this.quote;
+ }
+ },
+ eval: function (env) {
+ var that = this;
+ var value = this.value.replace(/`([^`]+)`/g, function (_, exp) {
+ return new(tree.JavaScript)(exp, that.index, true).eval(env).value;
+ }).replace(/@\{([\w-]+)\}/g, function (_, name) {
+ var v = new(tree.Variable)('@' + name, that.index).eval(env);
+ return ('value' in v) ? v.value : v.toCSS();
+ });
+ return new(tree.Quoted)(this.quote + value + this.quote, value, this.escaped, this.index);
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/rule.js b/tabula/bin/lib/less/tree/rule.js
new file mode 100644
index 00000000..9e4e54a3
--- /dev/null
+++ b/tabula/bin/lib/less/tree/rule.js
@@ -0,0 +1,42 @@
+(function (tree) {
+
+tree.Rule = function (name, value, important, index, inline) {
+ this.name = name;
+ this.value = (value instanceof tree.Value) ? value : new(tree.Value)([value]);
+ this.important = important ? ' ' + important.trim() : '';
+ this.index = index;
+ this.inline = inline || false;
+
+ if (name.charAt(0) === '@') {
+ this.variable = true;
+ } else { this.variable = false }
+};
+tree.Rule.prototype.toCSS = function (env) {
+ if (this.variable) { return "" }
+ else {
+ return this.name + (env.compress ? ':' : ': ') +
+ this.value.toCSS(env) +
+ this.important + (this.inline ? "" : ";");
+ }
+};
+
+tree.Rule.prototype.eval = function (context) {
+ return new(tree.Rule)(this.name,
+ this.value.eval(context),
+ this.important,
+ this.index, this.inline);
+};
+
+tree.Shorthand = function (a, b) {
+ this.a = a;
+ this.b = b;
+};
+
+tree.Shorthand.prototype = {
+ toCSS: function (env) {
+ return this.a.toCSS(env) + "/" + this.b.toCSS(env);
+ },
+ eval: function () { return this }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/ruleset.js b/tabula/bin/lib/less/tree/ruleset.js
new file mode 100644
index 00000000..3100cc35
--- /dev/null
+++ b/tabula/bin/lib/less/tree/ruleset.js
@@ -0,0 +1,225 @@
+(function (tree) {
+
+tree.Ruleset = function (selectors, rules, strictImports) {
+ this.selectors = selectors;
+ this.rules = rules;
+ this._lookups = {};
+ this.strictImports = strictImports;
+};
+tree.Ruleset.prototype = {
+ eval: function (env) {
+ var selectors = this.selectors && this.selectors.map(function (s) { return s.eval(env) });
+ var ruleset = new(tree.Ruleset)(selectors, this.rules.slice(0), this.strictImports);
+
+ ruleset.root = this.root;
+ ruleset.allowImports = this.allowImports;
+
+ // push the current ruleset to the frames stack
+ env.frames.unshift(ruleset);
+
+ // Evaluate imports
+ if (ruleset.root || ruleset.allowImports || !ruleset.strictImports) {
+ for (var i = 0; i < ruleset.rules.length; i++) {
+ if (ruleset.rules[i] instanceof tree.Import) {
+ Array.prototype.splice
+ .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env)));
+ }
+ }
+ }
+
+ // Store the frames around mixin definitions,
+ // so they can be evaluated like closures when the time comes.
+ for (var i = 0; i < ruleset.rules.length; i++) {
+ if (ruleset.rules[i] instanceof tree.mixin.Definition) {
+ ruleset.rules[i].frames = env.frames.slice(0);
+ }
+ }
+
+ // Evaluate mixin calls.
+ for (var i = 0; i < ruleset.rules.length; i++) {
+ if (ruleset.rules[i] instanceof tree.mixin.Call) {
+ Array.prototype.splice
+ .apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env)));
+ }
+ }
+
+ // Evaluate everything else
+ for (var i = 0, rule; i < ruleset.rules.length; i++) {
+ rule = ruleset.rules[i];
+
+ if (! (rule instanceof tree.mixin.Definition)) {
+ ruleset.rules[i] = rule.eval ? rule.eval(env) : rule;
+ }
+ }
+
+ // Pop the stack
+ env.frames.shift();
+
+ return ruleset;
+ },
+ match: function (args) {
+ return !args || args.length === 0;
+ },
+ variables: function () {
+ if (this._variables) { return this._variables }
+ else {
+ return this._variables = this.rules.reduce(function (hash, r) {
+ if (r instanceof tree.Rule && r.variable === true) {
+ hash[r.name] = r;
+ }
+ return hash;
+ }, {});
+ }
+ },
+ variable: function (name) {
+ return this.variables()[name];
+ },
+ rulesets: function () {
+ if (this._rulesets) { return this._rulesets }
+ else {
+ return this._rulesets = this.rules.filter(function (r) {
+ return (r instanceof tree.Ruleset) || (r instanceof tree.mixin.Definition);
+ });
+ }
+ },
+ find: function (selector, self) {
+ self = self || this;
+ var rules = [], rule, match,
+ key = selector.toCSS();
+
+ if (key in this._lookups) { return this._lookups[key] }
+
+ this.rulesets().forEach(function (rule) {
+ if (rule !== self) {
+ for (var j = 0; j < rule.selectors.length; j++) {
+ if (match = selector.match(rule.selectors[j])) {
+ if (selector.elements.length > rule.selectors[j].elements.length) {
+ Array.prototype.push.apply(rules, rule.find(
+ new(tree.Selector)(selector.elements.slice(1)), self));
+ } else {
+ rules.push(rule);
+ }
+ break;
+ }
+ }
+ }
+ });
+ return this._lookups[key] = rules;
+ },
+ //
+ // Entry point for code generation
+ //
+ // `context` holds an array of arrays.
+ //
+ toCSS: function (context, env) {
+ var css = [], // The CSS output
+ rules = [], // node.Rule instances
+ _rules = [], //
+ rulesets = [], // node.Ruleset instances
+ paths = [], // Current selectors
+ selector, // The fully rendered selector
+ rule;
+
+ if (! this.root) {
+ if (context.length === 0) {
+ paths = this.selectors.map(function (s) { return [s] });
+ } else {
+ this.joinSelectors(paths, context, this.selectors);
+ }
+ }
+
+ // Compile rules and rulesets
+ for (var i = 0; i < this.rules.length; i++) {
+ rule = this.rules[i];
+
+ if (rule.rules || (rule instanceof tree.Directive) || (rule instanceof tree.Media)) {
+ rulesets.push(rule.toCSS(paths, env));
+ } else if (rule instanceof tree.Comment) {
+ if (!rule.silent) {
+ if (this.root) {
+ rulesets.push(rule.toCSS(env));
+ } else {
+ rules.push(rule.toCSS(env));
+ }
+ }
+ } else {
+ if (rule.toCSS && !rule.variable) {
+ rules.push(rule.toCSS(env));
+ } else if (rule.value && !rule.variable) {
+ rules.push(rule.value.toString());
+ }
+ }
+ }
+
+ rulesets = rulesets.join('');
+
+ // If this is the root node, we don't render
+ // a selector, or {}.
+ // Otherwise, only output if this ruleset has rules.
+ if (this.root) {
+ css.push(rules.join(env.compress ? '' : '\n'));
+ } else {
+ if (rules.length > 0) {
+ selector = paths.map(function (p) {
+ return p.map(function (s) {
+ return s.toCSS(env);
+ }).join('').trim();
+ }).join(env.compress ? ',' : ',\n');
+
+ // Remove duplicates
+ for (var i = rules.length - 1; i >= 0; i--) {
+ if (_rules.indexOf(rules[i]) === -1) {
+ _rules.unshift(rules[i]);
+ }
+ }
+ rules = _rules;
+
+ css.push(selector,
+ (env.compress ? '{' : ' {\n ') +
+ rules.join(env.compress ? '' : '\n ') +
+ (env.compress ? '}' : '\n}\n'));
+ }
+ }
+ css.push(rulesets);
+
+ return css.join('') + (env.compress ? '\n' : '');
+ },
+
+ joinSelectors: function (paths, context, selectors) {
+ for (var s = 0; s < selectors.length; s++) {
+ this.joinSelector(paths, context, selectors[s]);
+ }
+ },
+
+ joinSelector: function (paths, context, selector) {
+ var before = [], after = [], beforeElements = [],
+ afterElements = [], hasParentSelector = false, el;
+
+ for (var i = 0; i < selector.elements.length; i++) {
+ el = selector.elements[i];
+ if (el.combinator.value.charAt(0) === '&') {
+ hasParentSelector = true;
+ }
+ if (hasParentSelector) afterElements.push(el);
+ else beforeElements.push(el);
+ }
+
+ if (! hasParentSelector) {
+ afterElements = beforeElements;
+ beforeElements = [];
+ }
+
+ if (beforeElements.length > 0) {
+ before.push(new(tree.Selector)(beforeElements));
+ }
+
+ if (afterElements.length > 0) {
+ after.push(new(tree.Selector)(afterElements));
+ }
+
+ for (var c = 0; c < context.length; c++) {
+ paths.push(before.concat(context[c]).concat(after));
+ }
+ }
+};
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/selector.js b/tabula/bin/lib/less/tree/selector.js
new file mode 100644
index 00000000..65abbb69
--- /dev/null
+++ b/tabula/bin/lib/less/tree/selector.js
@@ -0,0 +1,42 @@
+(function (tree) {
+
+tree.Selector = function (elements) {
+ this.elements = elements;
+ if (this.elements[0].combinator.value === "") {
+ this.elements[0].combinator.value = ' ';
+ }
+};
+tree.Selector.prototype.match = function (other) {
+ var len = this.elements.length,
+ olen = other.elements.length,
+ max = Math.min(len, olen);
+
+ if (len < olen) {
+ return false;
+ } else {
+ for (var i = 0; i < max; i++) {
+ if (this.elements[i].value !== other.elements[i].value) {
+ return false;
+ }
+ }
+ }
+ return true;
+};
+tree.Selector.prototype.eval = function (env) {
+ return new(tree.Selector)(this.elements.map(function (e) {
+ return e.eval(env);
+ }));
+};
+tree.Selector.prototype.toCSS = function (env) {
+ if (this._css) { return this._css }
+
+ return this._css = this.elements.map(function (e) {
+ if (typeof(e) === 'string') {
+ return ' ' + e.trim();
+ } else {
+ return e.toCSS(env);
+ }
+ }).join('');
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/url.js b/tabula/bin/lib/less/tree/url.js
new file mode 100644
index 00000000..0caec345
--- /dev/null
+++ b/tabula/bin/lib/less/tree/url.js
@@ -0,0 +1,25 @@
+(function (tree) {
+
+tree.URL = function (val, paths) {
+ if (val.data) {
+ this.attrs = val;
+ } else {
+ // Add the base path if the URL is relative and we are in the browser
+ if (typeof(window) !== 'undefined' && !/^(?:https?:\/\/|file:\/\/|data:|\/)/.test(val.value) && paths.length > 0) {
+ val.value = paths[0] + (val.value.charAt(0) === '/' ? val.value.slice(1) : val.value);
+ }
+ this.value = val;
+ this.paths = paths;
+ }
+};
+tree.URL.prototype = {
+ toCSS: function () {
+ return "url(" + (this.attrs ? 'data:' + this.attrs.mime + this.attrs.charset + this.attrs.base64 + this.attrs.data
+ : this.value.toCSS()) + ")";
+ },
+ eval: function (ctx) {
+ return this.attrs ? this : new(tree.URL)(this.value.eval(ctx), this.paths);
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/value.js b/tabula/bin/lib/less/tree/value.js
new file mode 100644
index 00000000..3c1eb29a
--- /dev/null
+++ b/tabula/bin/lib/less/tree/value.js
@@ -0,0 +1,24 @@
+(function (tree) {
+
+tree.Value = function (value) {
+ this.value = value;
+ this.is = 'value';
+};
+tree.Value.prototype = {
+ eval: function (env) {
+ if (this.value.length === 1) {
+ return this.value[0].eval(env);
+ } else {
+ return new(tree.Value)(this.value.map(function (v) {
+ return v.eval(env);
+ }));
+ }
+ },
+ toCSS: function (env) {
+ return this.value.map(function (e) {
+ return e.toCSS(env);
+ }).join(env.compress ? ',' : ', ');
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/bin/lib/less/tree/variable.js b/tabula/bin/lib/less/tree/variable.js
new file mode 100644
index 00000000..ee557e1d
--- /dev/null
+++ b/tabula/bin/lib/less/tree/variable.js
@@ -0,0 +1,26 @@
+(function (tree) {
+
+tree.Variable = function (name, index, file) { this.name = name, this.index = index, this.file = file };
+tree.Variable.prototype = {
+ eval: function (env) {
+ var variable, v, name = this.name;
+
+ if (name.indexOf('@@') == 0) {
+ name = '@' + new(tree.Variable)(name.slice(1)).eval(env).value;
+ }
+
+ if (variable = tree.find(env.frames, function (frame) {
+ if (v = frame.variable(name)) {
+ return v.value.eval(env);
+ }
+ })) { return variable }
+ else {
+ throw { type: 'Name',
+ message: "variable " + name + " is undefined",
+ filename: this.file,
+ index: this.index };
+ }
+ }
+};
+
+})(require('../tree'));
diff --git a/tabula/manage.py b/tabula/manage.py
new file mode 100755
index 00000000..d674cef8
--- /dev/null
+++ b/tabula/manage.py
@@ -0,0 +1,10 @@
+#!/usr/bin/env python
+
+import sys
+import os
+
+
+if __name__ == "__main__":
+ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tabula.settings")
+ from django.core.management import execute_from_command_line
+ execute_from_command_line(sys.argv)
diff --git a/tabula/openstack-common.conf b/tabula/openstack-common.conf
new file mode 100644
index 00000000..507dc200
--- /dev/null
+++ b/tabula/openstack-common.conf
@@ -0,0 +1,7 @@
+[DEFAULT]
+
+# The list of modules to copy from openstack-common
+modules=setup,importutils,version
+
+# The base module to hold the copy of openstack.common
+base=tabula
diff --git a/tabula/packages/python-portasclient-2013.1.a345.ga70b44e.tar.gz b/tabula/packages/python-portasclient-2013.1.a345.ga70b44e.tar.gz
new file mode 100644
index 00000000..92ff304b
Binary files /dev/null and b/tabula/packages/python-portasclient-2013.1.a345.ga70b44e.tar.gz differ
diff --git a/tabula/run_tests.sh b/tabula/run_tests.sh
new file mode 100755
index 00000000..6dac0d35
--- /dev/null
+++ b/tabula/run_tests.sh
@@ -0,0 +1,442 @@
+#!/bin/bash
+
+set -o errexit
+
+# ---------------UPDATE ME-------------------------------#
+# Increment me any time the environment should be rebuilt.
+# This includes dependncy changes, directory renames, etc.
+# Simple integer secuence: 1, 2, 3...
+environment_version=31
+#--------------------------------------------------------#
+
+function usage {
+ echo "Usage: $0 [OPTION]..."
+ echo "Run Horizon's test suite(s)"
+ echo ""
+ echo " -V, --virtual-env Always use virtualenv. Install automatically"
+ echo " if not present"
+ echo " -N, --no-virtual-env Don't use virtualenv. Run tests in local"
+ echo " environment"
+ echo " -c, --coverage Generate reports using Coverage"
+ echo " -f, --force Force a clean re-build of the virtual"
+ echo " environment. Useful when dependencies have"
+ echo " been added."
+ echo " -m, --manage Run a Django management command."
+ echo " --makemessages Update all translation files."
+ echo " --compilemessages Compile all translation files."
+ echo " -p, --pep8 Just run pep8"
+ echo " -t, --tabs Check for tab characters in files."
+ echo " -y, --pylint Just run pylint"
+ echo " -q, --quiet Run non-interactively. (Relatively) quiet."
+ echo " Implies -V if -N is not set."
+ echo " --only-selenium Run only the Selenium unit tests"
+ echo " --with-selenium Run unit tests including Selenium tests"
+ echo " --runserver Run the Django development server for"
+ echo " openstack_dashboard in the virtual"
+ echo " environment."
+ echo " --docs Just build the documentation"
+ echo " --backup-environment Make a backup of the environment on exit"
+ echo " --restore-environment Restore the environment before running"
+ echo " --destroy-environment DEstroy the environment and exit"
+ echo " -h, --help Print this usage message"
+ echo ""
+ echo "Note: with no options specified, the script will try to run the tests in"
+ echo " a virtual environment, If no virtualenv is found, the script will ask"
+ echo " if you would like to create one. If you prefer to run tests NOT in a"
+ echo " virtual environment, simply pass the -N option."
+ exit
+}
+
+# DEFAULTS FOR RUN_TESTS.SH
+#
+root=`pwd`
+venv=$root/.venv
+with_venv=tools/with_venv.sh
+included_dirs="openstack_dashboard horizon"
+
+always_venv=0
+backup_env=0
+command_wrapper=""
+destroy=0
+force=0
+just_pep8=0
+just_pylint=0
+just_docs=0
+just_tabs=0
+never_venv=0
+quiet=0
+restore_env=0
+runserver=0
+only_selenium=0
+with_selenium=0
+testopts=""
+testargs=""
+with_coverage=0
+makemessages=0
+compilemessages=0
+manage=0
+
+# Jenkins sets a "JOB_NAME" variable, if it's not set, we'll make it "default"
+[ "$JOB_NAME" ] || JOB_NAME="default"
+
+function process_option {
+ case "$1" in
+ -h|--help) usage;;
+ -V|--virtual-env) always_venv=1; never_venv=0;;
+ -N|--no-virtual-env) always_venv=0; never_venv=1;;
+ -p|--pep8) just_pep8=1;;
+ -y|--pylint) just_pylint=1;;
+ -f|--force) force=1;;
+ -t|--tabs) just_tabs=1;;
+ -q|--quiet) quiet=1;;
+ -c|--coverage) with_coverage=1;;
+ -m|--manage) manage=1;;
+ --makemessages) makemessages=1;;
+ --compilemessages) compilemessages=1;;
+ --only-selenium) only_selenium=1;;
+ --with-selenium) with_selenium=1;;
+ --docs) just_docs=1;;
+ --runserver) runserver=1;;
+ --backup-environment) backup_env=1;;
+ --restore-environment) restore_env=1;;
+ --destroy-environment) destroy=1;;
+ -*) testopts="$testopts $1";;
+ *) testargs="$testargs $1"
+ esac
+}
+
+function run_management_command {
+ ${command_wrapper} python $root/manage.py $testopts $testargs
+}
+
+function run_server {
+ echo "Starting Django development server..."
+ ${command_wrapper} python $root/manage.py runserver $testopts $testargs
+ echo "Server stopped."
+}
+
+function run_pylint {
+ echo "Running pylint ..."
+ PYTHONPATH=$root ${command_wrapper} pylint --rcfile=.pylintrc -f parseable $included_dirs > pylint.txt || true
+ CODE=$?
+ grep Global -A2 pylint.txt
+ if [ $CODE -lt 32 ]; then
+ echo "Completed successfully."
+ exit 0
+ else
+ echo "Completed with problems."
+ exit $CODE
+ fi
+}
+
+function run_pep8 {
+ echo "Running pep8 ..."
+ ${command_wrapper} pep8 $included_dirs
+}
+
+function run_sphinx {
+ echo "Building sphinx..."
+ export DJANGO_SETTINGS_MODULE=openstack_dashboard.settings
+ ${command_wrapper} sphinx-build -b html doc/source doc/build/html
+ echo "Build complete."
+}
+
+function tab_check {
+ TAB_VIOLATIONS=`find $included_dirs -type f -regex ".*\.\(css\|js\|py\|html\)" -print0 | xargs -0 awk '/\t/' | wc -l`
+ if [ $TAB_VIOLATIONS -gt 0 ]; then
+ echo "TABS! $TAB_VIOLATIONS of them! Oh no!"
+ HORIZON_FILES=`find $included_dirs -type f -regex ".*\.\(css\|js\|py|\html\)"`
+ for TABBED_FILE in $HORIZON_FILES
+ do
+ TAB_COUNT=`awk '/\t/' $TABBED_FILE | wc -l`
+ if [ $TAB_COUNT -gt 0 ]; then
+ echo "$TABBED_FILE: $TAB_COUNT"
+ fi
+ done
+ fi
+ return $TAB_VIOLATIONS;
+}
+
+function destroy_venv {
+ echo "Cleaning environment..."
+ echo "Removing virtualenv..."
+ rm -rf $venv
+ echo "Virtualenv removed."
+ rm -f .environment_version
+ echo "Environment cleaned."
+}
+
+function environment_check {
+ echo "Checking environment."
+ if [ -f .environment_version ]; then
+ ENV_VERS=`cat .environment_version`
+ if [ $ENV_VERS -eq $environment_version ]; then
+ if [ -e ${venv} ]; then
+ # If the environment exists and is up-to-date then set our variables
+ command_wrapper="${root}/${with_venv}"
+ echo "Environment is up to date."
+ return 0
+ fi
+ fi
+ fi
+
+ if [ $always_venv -eq 1 ]; then
+ install_venv
+ else
+ if [ ! -e ${venv} ]; then
+ echo -e "Environment not found. Install? (Y/n) \c"
+ else
+ echo -e "Your environment appears to be out of date. Update? (Y/n) \c"
+ fi
+ read update_env
+ if [ "x$update_env" = "xY" -o "x$update_env" = "x" -o "x$update_env" = "xy" ]; then
+ install_venv
+ else
+ # Set our command wrapper anyway.
+ command_wrapper="${root}/${with_venv}"
+ fi
+ fi
+}
+
+function sanity_check {
+ # Anything that should be determined prior to running the tests, server, etc.
+ # Don't sanity-check anything environment-related in -N flag is set
+ if [ $never_venv -eq 0 ]; then
+ if [ ! -e ${venv} ]; then
+ echo "Virtualenv not found at $venv. Did install_venv.py succeed?"
+ exit 1
+ fi
+ fi
+ # Remove .pyc files. This is sanity checking because they can linger
+ # after old files are deleted.
+ find . -name "*.pyc" -exec rm -rf {} \;
+}
+
+function backup_environment {
+ if [ $backup_env -eq 1 ]; then
+ echo "Backing up environment \"$JOB_NAME\"..."
+ if [ ! -e ${venv} ]; then
+ echo "Environment not installed. Cannot back up."
+ return 0
+ fi
+ if [ -d /tmp/.horizon_environment/$JOB_NAME ]; then
+ mv /tmp/.horizon_environment/$JOB_NAME /tmp/.horizon_environment/$JOB_NAME.old
+ rm -rf /tmp/.horizon_environment/$JOB_NAME
+ fi
+ mkdir -p /tmp/.horizon_environment/$JOB_NAME
+ cp -r $venv /tmp/.horizon_environment/$JOB_NAME/
+ cp .environment_version /tmp/.horizon_environment/$JOB_NAME/
+ # Remove the backup now that we've completed successfully
+ rm -rf /tmp/.horizon_environment/$JOB_NAME.old
+ echo "Backup completed"
+ fi
+}
+
+function restore_environment {
+ if [ $restore_env -eq 1 ]; then
+ echo "Restoring environment from backup..."
+ if [ ! -d /tmp/.horizon_environment/$JOB_NAME ]; then
+ echo "No backup to restore from."
+ return 0
+ fi
+
+ cp -r /tmp/.horizon_environment/$JOB_NAME/.venv ./ || true
+ cp -r /tmp/.horizon_environment/$JOB_NAME/.environment_version ./ || true
+
+ echo "Environment restored successfully."
+ fi
+}
+
+function install_venv {
+ # Install with install_venv.py
+ export PIP_DOWNLOAD_CACHE=${PIP_DOWNLOAD_CACHE-/tmp/.pip_download_cache}
+ export PIP_USE_MIRRORS=true
+ if [ $quiet -eq 1 ]; then
+ export PIP_NO_INPUT=true
+ fi
+ echo "Fetching new src packages..."
+ rm -rf $venv/src
+ python tools/install_venv.py
+ command_wrapper="$root/${with_venv}"
+ # Make sure it worked and record the environment version
+ sanity_check
+ chmod -R 754 $venv
+ echo $environment_version > .environment_version
+}
+
+function run_tests {
+ sanity_check
+
+ if [ $with_selenium -eq 1 ]; then
+ export WITH_SELENIUM=1
+ elif [ $only_selenium -eq 1 ]; then
+ export WITH_SELENIUM=1
+ export SKIP_UNITTESTS=1
+ fi
+
+ if [ -z "$testargs" ]; then
+ run_tests_all
+ else
+ run_tests_subset
+ fi
+}
+
+function run_tests_subset {
+ project=`echo $testargs | awk -F. '{print $1}'`
+ ${command_wrapper} python $root/manage.py test --settings=$project.test.settings $testopts $testargs
+}
+
+function run_tests_all {
+ echo "Running Horizon application tests"
+ export NOSE_XUNIT_FILE=horizon/nosetests.xml
+ if [ "$NOSE_WITH_HTML_OUTPUT" = '1' ]; then
+ export NOSE_HTML_OUT_FILE='horizon_nose_results.html'
+ fi
+ ${command_wrapper} coverage erase
+ ${command_wrapper} coverage run -p $root/manage.py test horizon --settings=horizon.test.settings $testopts
+ # get results of the Horizon tests
+ HORIZON_RESULT=$?
+
+ echo "Running openstack_dashboard tests"
+ export NOSE_XUNIT_FILE=openstack_dashboard/nosetests.xml
+ if [ "$NOSE_WITH_HTML_OUTPUT" = '1' ]; then
+ export NOSE_HTML_OUT_FILE='dashboard_nose_results.html'
+ fi
+ ${command_wrapper} coverage run -p $root/manage.py test openstack_dashboard --settings=openstack_dashboard.test.settings $testopts
+ # get results of the openstack_dashboard tests
+ DASHBOARD_RESULT=$?
+
+ if [ $with_coverage -eq 1 ]; then
+ echo "Generating coverage reports"
+ ${command_wrapper} coverage combine
+ ${command_wrapper} coverage xml -i --omit='/usr*,setup.py,*egg*,.venv/*'
+ ${command_wrapper} coverage html -i --omit='/usr*,setup.py,*egg*,.venv/*' -d reports
+ fi
+ # Remove the leftover coverage files from the -p flag earlier.
+ rm -f .coverage.*
+
+ if [ $(($HORIZON_RESULT || $DASHBOARD_RESULT)) -eq 0 ]; then
+ echo "Tests completed successfully."
+ else
+ echo "Tests failed."
+ fi
+ exit $(($HORIZON_RESULT || $DASHBOARD_RESULT))
+}
+
+function run_makemessages {
+ cd horizon
+ ${command_wrapper} $root/manage.py makemessages --all --no-obsolete
+ HORIZON_PY_RESULT=$?
+ ${command_wrapper} $root/manage.py makemessages -d djangojs --all --no-obsolete
+ HORIZON_JS_RESULT=$?
+ cd ../openstack_dashboard
+ ${command_wrapper} $root/manage.py makemessages --all --no-obsolete
+ DASHBOARD_RESULT=$?
+ cd ..
+ exit $(($HORIZON_PY_RESULT || $HORIZON_JS_RESULT || $DASHBOARD_RESULT))
+}
+
+function run_compilemessages {
+ cd horizon
+ ${command_wrapper} $root/manage.py compilemessages
+ HORIZON_PY_RESULT=$?
+ cd ../openstack_dashboard
+ ${command_wrapper} $root/manage.py compilemessages
+ DASHBOARD_RESULT=$?
+ cd ..
+ exit $(($HORIZON_PY_RESULT || $DASHBOARD_RESULT))
+}
+
+
+# ---------PREPARE THE ENVIRONMENT------------ #
+
+# PROCESS ARGUMENTS, OVERRIDE DEFAULTS
+for arg in "$@"; do
+ process_option $arg
+done
+
+if [ $quiet -eq 1 ] && [ $never_venv -eq 0 ] && [ $always_venv -eq 0 ]
+then
+ always_venv=1
+fi
+
+# If destroy is set, just blow it away and exit.
+if [ $destroy -eq 1 ]; then
+ destroy_venv
+ exit 0
+fi
+
+# Ignore all of this if the -N flag was set
+if [ $never_venv -eq 0 ]; then
+
+ # Restore previous environment if desired
+ if [ $restore_env -eq 1 ]; then
+ restore_environment
+ fi
+
+ # Remove the virtual environment if --force used
+ if [ $force -eq 1 ]; then
+ destroy_venv
+ fi
+
+ # Then check if it's up-to-date
+ environment_check
+
+ # Create a backup of the up-to-date environment if desired
+ if [ $backup_env -eq 1 ]; then
+ backup_environment
+ fi
+fi
+
+# ---------EXERCISE THE CODE------------ #
+
+# Run management commands
+if [ $manage -eq 1 ]; then
+ run_management_command
+ exit $?
+fi
+
+# Build the docs
+if [ $just_docs -eq 1 ]; then
+ run_sphinx
+ exit $?
+fi
+
+# Update translation files
+if [ $makemessages -eq 1 ]; then
+ run_makemessages
+ exit $?
+fi
+
+# Compile translation files
+if [ $compilemessages -eq 1 ]; then
+ run_compilemessages
+ exit $?
+fi
+
+# PEP8
+if [ $just_pep8 -eq 1 ]; then
+ run_pep8
+ exit $?
+fi
+
+# Pylint
+if [ $just_pylint -eq 1 ]; then
+ run_pylint
+ exit $?
+fi
+
+# Tab checker
+if [ $just_tabs -eq 1 ]; then
+ tab_check
+ exit $?
+fi
+
+# Django development server
+if [ $runserver -eq 1 ]; then
+ run_server
+ exit $?
+fi
+
+# Full test suite
+run_tests || exit
diff --git a/windc/setup.cfg b/tabula/setup.cfg
similarity index 56%
rename from windc/setup.cfg
rename to tabula/setup.cfg
index d53addcb..79034b3c 100644
--- a/windc/setup.cfg
+++ b/tabula/setup.cfg
@@ -3,7 +3,7 @@ all_files = 1
build-dir = doc/build
source-dir = doc/source
-[egg_info]
-tag_build =
-tag_date = 0
-tag_svn_revision = 0
+[nosetests]
+verbosity=2
+detailed-errors=1
+
diff --git a/tabula/setup.py b/tabula/setup.py
new file mode 100755
index 00000000..16865fbb
--- /dev/null
+++ b/tabula/setup.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Copyright 2012 Nebula, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import setuptools
+
+from tabula.openstack.common import setup
+
+requires = setup.parse_requirements()
+depend_links = setup.parse_dependency_links()
+project = 'tabula'
+
+setuptools.setup(
+ name=project,
+ version=setup.get_version(project, '2013.1'),
+ description="The OpenStack Dashboard.",
+ license='Apache 2.0',
+ author='OpenStack',
+ author_email='horizon@lists.launchpad.net',
+ url='https://github.com/openstack/horizon/',
+ packages=setuptools.find_packages(exclude=['bin']),
+ cmdclass=setup.get_cmdclass(),
+ include_package_data=True,
+ install_requires=requires,
+ dependency_links=depend_links,
+ classifiers=['Development Status :: 5 - Production/Stable',
+ 'Framework :: Django',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Topic :: Internet :: WWW/HTTP',
+ 'Environment :: OpenStack']
+)
diff --git a/windc/tests/__init__.py b/tabula/tabula/__init__.py
similarity index 100%
rename from windc/tests/__init__.py
rename to tabula/tabula/__init__.py
diff --git a/windc/tests/functional/__init__.py b/tabula/tabula/local/__init__.py
similarity index 100%
rename from windc/tests/functional/__init__.py
rename to tabula/tabula/local/__init__.py
diff --git a/tabula/tabula/local/local_settings.py.example b/tabula/tabula/local/local_settings.py.example
new file mode 100644
index 00000000..d841e6a2
--- /dev/null
+++ b/tabula/tabula/local/local_settings.py.example
@@ -0,0 +1,147 @@
+import os
+
+from django.utils.translation import ugettext_lazy as _
+
+DEBUG = True
+TEMPLATE_DEBUG = DEBUG
+
+# Set SSL proxy settings:
+# For Django 1.4+ pass this header from the proxy after terminating the SSL,
+# and don't forget to strip it from the client's request.
+# For more information see:
+# https://docs.djangoproject.com/en/1.4/ref/settings/#secure-proxy-ssl-header
+# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https')
+
+# Specify a regular expression to validate user passwords.
+# HORIZON_CONFIG = {
+# "password_validator": {
+# "regex": '.*',
+# "help_text": _("Your password does not meet the requirements.")
+# },
+# 'help_url': "http://docs.openstack.org"
+# }
+
+LOCAL_PATH = os.path.dirname(os.path.abspath(__file__))
+
+# Set custom secret key:
+# You can either set it to a specific value or you can let horizion generate a
+# default secret key that is unique on this machine, e.i. regardless of the
+# amount of Python WSGI workers (if used behind Apache+mod_wsgi): However, there
+# may be situations where you would want to set this explicitly, e.g. when
+# multiple dashboard instances are distributed on different machines (usually
+# behind a load-balancer). Either you have to make sure that a session gets all
+# requests routed to the same dashboard instance or you set the same SECRET_KEY
+# for all of them.
+# from horizon.utils import secret_key
+# SECRET_KEY = secret_key.generate_or_read_from_file(os.path.join(LOCAL_PATH, '.secret_key_store'))
+
+# We recommend you use memcached for development; otherwise after every reload
+# of the django development server, you will have to login again. To use
+# memcached set CACHE_BACKED to something like 'memcached://127.0.0.1:11211/'
+CACHE_BACKEND = 'locmem://'
+
+# Send email to the console by default
+EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
+# Or send them to /dev/null
+#EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend'
+
+# Configure these for your outgoing email host
+# EMAIL_HOST = 'smtp.my-company.com'
+# EMAIL_PORT = 25
+# EMAIL_HOST_USER = 'djangomail'
+# EMAIL_HOST_PASSWORD = 'top-secret!'
+
+# For multiple regions uncomment this configuration, and add (endpoint, title).
+# AVAILABLE_REGIONS = [
+# ('http://cluster1.example.com:5000/v2.0', 'cluster1'),
+# ('http://cluster2.example.com:5000/v2.0', 'cluster2'),
+# ]
+
+OPENSTACK_HOST = "127.0.0.1"
+OPENSTACK_KEYSTONE_URL = "http://%s:5000/v2.0" % OPENSTACK_HOST
+OPENSTACK_KEYSTONE_DEFAULT_ROLE = "Member"
+
+# Disable SSL certificate checks (useful for self-signed certificates):
+# OPENSTACK_SSL_NO_VERIFY = True
+
+# The OPENSTACK_KEYSTONE_BACKEND settings can be used to identify the
+# capabilities of the auth backend for Keystone.
+# If Keystone has been configured to use LDAP as the auth backend then set
+# can_edit_user to False and name to 'ldap'.
+#
+# TODO(tres): Remove these once Keystone has an API to identify auth backend.
+OPENSTACK_KEYSTONE_BACKEND = {
+ 'name': 'native',
+ 'can_edit_user': True
+}
+
+OPENSTACK_HYPERVISOR_FEATURES = {
+ 'can_set_mount_point': True
+}
+
+# OPENSTACK_ENDPOINT_TYPE specifies the endpoint type to use for the endpoints
+# in the Keystone service catalog. Use this setting when Horizon is running
+# external to the OpenStack environment. The default is 'internalURL'.
+#OPENSTACK_ENDPOINT_TYPE = "publicURL"
+
+# The number of objects (Swift containers/objects or images) to display
+# on a single page before providing a paging element (a "more" link)
+# to paginate results.
+API_RESULT_LIMIT = 1000
+API_RESULT_PAGE_SIZE = 20
+
+# The timezone of the server. This should correspond with the timezone
+# of your entire OpenStack installation, and hopefully be in UTC.
+TIME_ZONE = "UTC"
+
+LOGGING = {
+ 'version': 1,
+ # When set to True this will disable all logging except
+ # for loggers specified in this configuration dictionary. Note that
+ # if nothing is specified here and disable_existing_loggers is True,
+ # django.db.backends will still log unless it is disabled explicitly.
+ 'disable_existing_loggers': False,
+ 'handlers': {
+ 'null': {
+ 'level': 'DEBUG',
+ 'class': 'django.utils.log.NullHandler',
+ },
+ 'console': {
+ # Set the level to "DEBUG" for verbose output logging.
+ 'level': 'INFO',
+ 'class': 'logging.StreamHandler',
+ },
+ },
+ 'loggers': {
+ # Logging from django.db.backends is VERY verbose, send to null
+ # by default.
+ 'django.db.backends': {
+ 'handlers': ['null'],
+ 'propagate': False,
+ },
+ 'horizon': {
+ 'handlers': ['console'],
+ 'propagate': False,
+ },
+ 'openstack_dashboard': {
+ 'handlers': ['console'],
+ 'propagate': False,
+ },
+ 'novaclient': {
+ 'handlers': ['console'],
+ 'propagate': False,
+ },
+ 'keystoneclient': {
+ 'handlers': ['console'],
+ 'propagate': False,
+ },
+ 'glanceclient': {
+ 'handlers': ['console'],
+ 'propagate': False,
+ },
+ 'nose.plugins.manager': {
+ 'handlers': ['console'],
+ 'propagate': False,
+ }
+ }
+}
diff --git a/tabula/tabula/models.py b/tabula/tabula/models.py
new file mode 100644
index 00000000..1b3d5f9e
--- /dev/null
+++ b/tabula/tabula/models.py
@@ -0,0 +1,3 @@
+"""
+Stub file to work around django bug: https://code.djangoproject.com/ticket/7198
+"""
diff --git a/windc/tests/unit/__init__.py b/tabula/tabula/openstack/__init__.py
similarity index 100%
rename from windc/tests/unit/__init__.py
rename to tabula/tabula/openstack/__init__.py
diff --git a/windc/windc/__init__.py b/tabula/tabula/openstack/common/__init__.py
similarity index 100%
rename from windc/windc/__init__.py
rename to tabula/tabula/openstack/common/__init__.py
diff --git a/tabula/tabula/openstack/common/importutils.py b/tabula/tabula/openstack/common/importutils.py
new file mode 100644
index 00000000..3bd277f4
--- /dev/null
+++ b/tabula/tabula/openstack/common/importutils.py
@@ -0,0 +1,67 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2011 OpenStack Foundation.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Import related utilities and helper functions.
+"""
+
+import sys
+import traceback
+
+
+def import_class(import_str):
+ """Returns a class from a string including module and class"""
+ mod_str, _sep, class_str = import_str.rpartition('.')
+ try:
+ __import__(mod_str)
+ return getattr(sys.modules[mod_str], class_str)
+ except (ValueError, AttributeError):
+ raise ImportError('Class %s cannot be found (%s)' %
+ (class_str,
+ traceback.format_exception(*sys.exc_info())))
+
+
+def import_object(import_str, *args, **kwargs):
+ """Import a class and return an instance of it."""
+ return import_class(import_str)(*args, **kwargs)
+
+
+def import_object_ns(name_space, import_str, *args, **kwargs):
+ """
+ Import a class and return an instance of it, first by trying
+ to find the class in a default namespace, then failing back to
+ a full path if not found in the default namespace.
+ """
+ import_value = "%s.%s" % (name_space, import_str)
+ try:
+ return import_class(import_value)(*args, **kwargs)
+ except ImportError:
+ return import_class(import_str)(*args, **kwargs)
+
+
+def import_module(import_str):
+ """Import a module."""
+ __import__(import_str)
+ return sys.modules[import_str]
+
+
+def try_import(import_str, default=None):
+ """Try to import a module and if it fails return default."""
+ try:
+ return import_module(import_str)
+ except ImportError:
+ return default
diff --git a/tabula/tabula/openstack/common/setup.py b/tabula/tabula/openstack/common/setup.py
new file mode 100644
index 00000000..dec74fd0
--- /dev/null
+++ b/tabula/tabula/openstack/common/setup.py
@@ -0,0 +1,367 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2011 OpenStack Foundation.
+# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Utilities with minimum-depends for use in setup.py
+"""
+
+import email
+import os
+import re
+import subprocess
+import sys
+
+from setuptools.command import sdist
+
+
+def parse_mailmap(mailmap='.mailmap'):
+ mapping = {}
+ if os.path.exists(mailmap):
+ with open(mailmap, 'r') as fp:
+ for l in fp:
+ try:
+ canonical_email, alias = re.match(
+ r'[^#]*?(<.+>).*(<.+>).*', l).groups()
+ except AttributeError:
+ continue
+ mapping[alias] = canonical_email
+ return mapping
+
+
+def _parse_git_mailmap(git_dir, mailmap='.mailmap'):
+ mailmap = os.path.join(os.path.dirname(git_dir), mailmap)
+ return parse_mailmap(mailmap)
+
+
+def canonicalize_emails(changelog, mapping):
+ """Takes in a string and an email alias mapping and replaces all
+ instances of the aliases in the string with their real email.
+ """
+ for alias, email_address in mapping.iteritems():
+ changelog = changelog.replace(alias, email_address)
+ return changelog
+
+
+# Get requirements from the first file that exists
+def get_reqs_from_files(requirements_files):
+ for requirements_file in requirements_files:
+ if os.path.exists(requirements_file):
+ with open(requirements_file, 'r') as fil:
+ return fil.read().split('\n')
+ return []
+
+
+def parse_requirements(requirements_files=['requirements.txt',
+ 'tools/pip-requires']):
+ requirements = []
+ for line in get_reqs_from_files(requirements_files):
+ # For the requirements list, we need to inject only the portion
+ # after egg= so that distutils knows the package it's looking for
+ # such as:
+ # -e git://github.com/openstack/nova/master#egg=nova
+ if re.match(r'\s*-e\s+', line):
+ requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1',
+ line))
+ # such as:
+ # http://github.com/openstack/nova/zipball/master#egg=nova
+ elif re.match(r'\s*https?:', line):
+ requirements.append(re.sub(r'\s*https?:.*#egg=(.*)$', r'\1',
+ line))
+ # -f lines are for index locations, and don't get used here
+ elif re.match(r'\s*-f\s+', line):
+ pass
+ # argparse is part of the standard library starting with 2.7
+ # adding it to the requirements list screws distro installs
+ elif line == 'argparse' and sys.version_info >= (2, 7):
+ pass
+ else:
+ requirements.append(line)
+
+ return requirements
+
+
+def parse_dependency_links(requirements_files=['requirements.txt',
+ 'tools/pip-requires']):
+ dependency_links = []
+ # dependency_links inject alternate locations to find packages listed
+ # in requirements
+ for line in get_reqs_from_files(requirements_files):
+ # skip comments and blank lines
+ if re.match(r'(\s*#)|(\s*$)', line):
+ continue
+ # lines with -e or -f need the whole line, minus the flag
+ if re.match(r'\s*-[ef]\s+', line):
+ dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line))
+ # lines that are only urls can go in unmolested
+ elif re.match(r'\s*https?:', line):
+ dependency_links.append(line)
+ return dependency_links
+
+
+def _run_shell_command(cmd, throw_on_error=False):
+ if os.name == 'nt':
+ output = subprocess.Popen(["cmd.exe", "/C", cmd],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ else:
+ output = subprocess.Popen(["/bin/sh", "-c", cmd],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ out = output.communicate()
+ if output.returncode and throw_on_error:
+ raise Exception("%s returned %d" % cmd, output.returncode)
+ if len(out) == 0:
+ return None
+ if len(out[0].strip()) == 0:
+ return None
+ return out[0].strip()
+
+
+def _get_git_directory():
+ parent_dir = os.path.dirname(__file__)
+ while True:
+ git_dir = os.path.join(parent_dir, '.git')
+ if os.path.exists(git_dir):
+ return git_dir
+ parent_dir, child = os.path.split(parent_dir)
+ if not child: # reached to root dir
+ return None
+
+
+def write_git_changelog():
+ """Write a changelog based on the git changelog."""
+ new_changelog = 'ChangeLog'
+ git_dir = _get_git_directory()
+ if not os.getenv('SKIP_WRITE_GIT_CHANGELOG'):
+ if git_dir:
+ git_log_cmd = 'git --git-dir=%s log' % git_dir
+ changelog = _run_shell_command(git_log_cmd)
+ mailmap = _parse_git_mailmap(git_dir)
+ with open(new_changelog, "w") as changelog_file:
+ changelog_file.write(canonicalize_emails(changelog, mailmap))
+ else:
+ open(new_changelog, 'w').close()
+
+
+def generate_authors():
+ """Create AUTHORS file using git commits."""
+ jenkins_email = 'jenkins@review.(openstack|stackforge).org'
+ old_authors = 'AUTHORS.in'
+ new_authors = 'AUTHORS'
+ git_dir = _get_git_directory()
+ if not os.getenv('SKIP_GENERATE_AUTHORS'):
+ if git_dir:
+ # don't include jenkins email address in AUTHORS file
+ git_log_cmd = ("git --git-dir=" + git_dir +
+ " log --format='%aN <%aE>' | sort -u | "
+ "egrep -v '" + jenkins_email + "'")
+ changelog = _run_shell_command(git_log_cmd)
+ signed_cmd = ("git log --git-dir=" + git_dir +
+ " | grep -i Co-authored-by: | sort -u")
+ signed_entries = _run_shell_command(signed_cmd)
+ if signed_entries:
+ new_entries = "\n".join(
+ [signed.split(":", 1)[1].strip()
+ for signed in signed_entries.split("\n") if signed])
+ changelog = "\n".join((changelog, new_entries))
+ mailmap = _parse_git_mailmap(git_dir)
+ with open(new_authors, 'w') as new_authors_fh:
+ new_authors_fh.write(canonicalize_emails(changelog, mailmap))
+ if os.path.exists(old_authors):
+ with open(old_authors, "r") as old_authors_fh:
+ new_authors_fh.write('\n' + old_authors_fh.read())
+ else:
+ open(new_authors, 'w').close()
+
+
+_rst_template = """%(heading)s
+%(underline)s
+
+.. automodule:: %(module)s
+ :members:
+ :undoc-members:
+ :show-inheritance:
+"""
+
+
+def get_cmdclass():
+ """Return dict of commands to run from setup.py."""
+
+ cmdclass = dict()
+
+ def _find_modules(arg, dirname, files):
+ for filename in files:
+ if filename.endswith('.py') and filename != '__init__.py':
+ arg["%s.%s" % (dirname.replace('/', '.'),
+ filename[:-3])] = True
+
+ class LocalSDist(sdist.sdist):
+ """Builds the ChangeLog and Authors files from VC first."""
+
+ def run(self):
+ write_git_changelog()
+ generate_authors()
+ # sdist.sdist is an old style class, can't use super()
+ sdist.sdist.run(self)
+
+ cmdclass['sdist'] = LocalSDist
+
+ # If Sphinx is installed on the box running setup.py,
+ # enable setup.py to build the documentation, otherwise,
+ # just ignore it
+ try:
+ from sphinx.setup_command import BuildDoc
+
+ class LocalBuildDoc(BuildDoc):
+
+ builders = ['html', 'man']
+
+ def generate_autoindex(self):
+ print "**Autodocumenting from %s" % os.path.abspath(os.curdir)
+ modules = {}
+ option_dict = self.distribution.get_option_dict('build_sphinx')
+ source_dir = os.path.join(option_dict['source_dir'][1], 'api')
+ if not os.path.exists(source_dir):
+ os.makedirs(source_dir)
+ for pkg in self.distribution.packages:
+ if '.' not in pkg:
+ os.path.walk(pkg, _find_modules, modules)
+ module_list = modules.keys()
+ module_list.sort()
+ autoindex_filename = os.path.join(source_dir, 'autoindex.rst')
+ with open(autoindex_filename, 'w') as autoindex:
+ autoindex.write(""".. toctree::
+ :maxdepth: 1
+
+""")
+ for module in module_list:
+ output_filename = os.path.join(source_dir,
+ "%s.rst" % module)
+ heading = "The :mod:`%s` Module" % module
+ underline = "=" * len(heading)
+ values = dict(module=module, heading=heading,
+ underline=underline)
+
+ print "Generating %s" % output_filename
+ with open(output_filename, 'w') as output_file:
+ output_file.write(_rst_template % values)
+ autoindex.write(" %s.rst\n" % module)
+
+ def run(self):
+ if not os.getenv('SPHINX_DEBUG'):
+ self.generate_autoindex()
+
+ for builder in self.builders:
+ self.builder = builder
+ self.finalize_options()
+ self.project = self.distribution.get_name()
+ self.version = self.distribution.get_version()
+ self.release = self.distribution.get_version()
+ BuildDoc.run(self)
+
+ class LocalBuildLatex(LocalBuildDoc):
+ builders = ['latex']
+
+ cmdclass['build_sphinx'] = LocalBuildDoc
+ cmdclass['build_sphinx_latex'] = LocalBuildLatex
+ except ImportError:
+ pass
+
+ return cmdclass
+
+
+def _get_revno(git_dir):
+ """Return the number of commits since the most recent tag.
+
+ We use git-describe to find this out, but if there are no
+ tags then we fall back to counting commits since the beginning
+ of time.
+ """
+ describe = _run_shell_command(
+ "git --git-dir=%s describe --always" % git_dir)
+ if "-" in describe:
+ return describe.rsplit("-", 2)[-2]
+
+ # no tags found
+ revlist = _run_shell_command(
+ "git --git-dir=%s rev-list --abbrev-commit HEAD" % git_dir)
+ return len(revlist.splitlines())
+
+
+def _get_version_from_git(pre_version):
+ """Return a version which is equal to the tag that's on the current
+ revision if there is one, or tag plus number of additional revisions
+ if the current revision has no tag."""
+
+ git_dir = _get_git_directory()
+ if git_dir:
+ if pre_version:
+ try:
+ return _run_shell_command(
+ "git --git-dir=" + git_dir + " describe --exact-match",
+ throw_on_error=True).replace('-', '.')
+ except Exception:
+ sha = _run_shell_command(
+ "git --git-dir=" + git_dir + " log -n1 --pretty=format:%h")
+ return "%s.a%s.g%s" % (pre_version, _get_revno(git_dir), sha)
+ else:
+ return _run_shell_command(
+ "git --git-dir=" + git_dir + " describe --always").replace(
+ '-', '.')
+ return None
+
+
+def _get_version_from_pkg_info(package_name):
+ """Get the version from PKG-INFO file if we can."""
+ try:
+ pkg_info_file = open('PKG-INFO', 'r')
+ except (IOError, OSError):
+ return None
+ try:
+ pkg_info = email.message_from_file(pkg_info_file)
+ except email.MessageError:
+ return None
+ # Check to make sure we're in our own dir
+ if pkg_info.get('Name', None) != package_name:
+ return None
+ return pkg_info.get('Version', None)
+
+
+def get_version(package_name, pre_version=None):
+ """Get the version of the project. First, try getting it from PKG-INFO, if
+ it exists. If it does, that means we're in a distribution tarball or that
+ install has happened. Otherwise, if there is no PKG-INFO file, pull the
+ version from git.
+
+ We do not support setup.py version sanity in git archive tarballs, nor do
+ we support packagers directly sucking our git repo into theirs. We expect
+ that a source tarball be made from our git repo - or that if someone wants
+ to make a source tarball from a fork of our repo with additional tags in it
+ that they understand and desire the results of doing that.
+ """
+ version = os.environ.get("OSLO_PACKAGE_VERSION", None)
+ if version:
+ return version
+ version = _get_version_from_pkg_info(package_name)
+ if version:
+ return version
+ version = _get_version_from_git(pre_version)
+ if version:
+ return version
+ raise Exception("Versioning for this project requires either an sdist"
+ " tarball, or access to an upstream git repository.")
diff --git a/tabula/tabula/openstack/common/version.py b/tabula/tabula/openstack/common/version.py
new file mode 100644
index 00000000..b2d4a79b
--- /dev/null
+++ b/tabula/tabula/openstack/common/version.py
@@ -0,0 +1,94 @@
+
+# Copyright 2012 OpenStack Foundation
+# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Utilities for consuming the version from pkg_resources.
+"""
+
+import pkg_resources
+
+
+class VersionInfo(object):
+
+ def __init__(self, package):
+ """Object that understands versioning for a package
+ :param package: name of the python package, such as glance, or
+ python-glanceclient
+ """
+ self.package = package
+ self.release = None
+ self.version = None
+ self._cached_version = None
+
+ def __str__(self):
+ """Make the VersionInfo object behave like a string."""
+ return self.version_string()
+
+ def __repr__(self):
+ """Include the name."""
+ return "VersionInfo(%s:%s)" % (self.package, self.version_string())
+
+ def _get_version_from_pkg_resources(self):
+ """Get the version of the package from the pkg_resources record
+ associated with the package."""
+ try:
+ requirement = pkg_resources.Requirement.parse(self.package)
+ provider = pkg_resources.get_provider(requirement)
+ return provider.version
+ except pkg_resources.DistributionNotFound:
+ # The most likely cause for this is running tests in a tree
+ # produced from a tarball where the package itself has not been
+ # installed into anything. Revert to setup-time logic.
+ from tabula.openstack.common import setup
+ return setup.get_version(self.package)
+
+ def release_string(self):
+ """Return the full version of the package including suffixes indicating
+ VCS status.
+ """
+ if self.release is None:
+ self.release = self._get_version_from_pkg_resources()
+
+ return self.release
+
+ def version_string(self):
+ """Return the short version minus any alpha/beta tags."""
+ if self.version is None:
+ parts = []
+ for part in self.release_string().split('.'):
+ if part[0].isdigit():
+ parts.append(part)
+ else:
+ break
+ self.version = ".".join(parts)
+
+ return self.version
+
+ # Compatibility functions
+ canonical_version_string = version_string
+ version_string_with_vcs = release_string
+
+ def cached_version_string(self, prefix=""):
+ """Generate an object which will expand in a string context to
+ the results of version_string(). We do this so that don't
+ call into pkg_resources every time we start up a program when
+ passing version information into the CONF constructor, but
+ rather only do the calculation when and if a version is requested
+ """
+ if not self._cached_version:
+ self._cached_version = "%s%s" % (prefix,
+ self.version_string())
+ return self._cached_version
diff --git a/tabula/tabula/settings.py b/tabula/tabula/settings.py
new file mode 100644
index 00000000..f2d475ba
--- /dev/null
+++ b/tabula/tabula/settings.py
@@ -0,0 +1,148 @@
+import logging
+import os
+import sys
+
+from openstack_dashboard import exceptions
+
+ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
+BIN_DIR = os.path.abspath(os.path.join(ROOT_PATH, '..', 'bin'))
+
+if ROOT_PATH not in sys.path:
+ sys.path.append(ROOT_PATH)
+
+DEBUG = False
+TEMPLATE_DEBUG = DEBUG
+
+SITE_BRANDING = 'OpenStack Dashboard'
+
+LOGIN_URL = '/auth/login/'
+LOGOUT_URL = '/auth/logout/'
+# LOGIN_REDIRECT_URL can be used as an alternative for
+# HORIZON_CONFIG.user_home, if user_home is not set.
+# Do not set it to '/home/', as this will cause circular redirect loop
+LOGIN_REDIRECT_URL = '/'
+
+MEDIA_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'media'))
+MEDIA_URL = '/media/'
+STATIC_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'static'))
+STATIC_URL = '/static/'
+ADMIN_MEDIA_PREFIX = '/static/admin/'
+
+ROOT_URLCONF = 'openstack_dashboard.urls'
+
+HORIZON_CONFIG = {
+ 'dashboards': ('project', 'admin', 'settings',),
+ 'default_dashboard': 'project',
+ 'user_home': 'openstack_dashboard.views.get_user_home',
+ 'ajax_queue_limit': 10,
+ 'help_url': "http://docs.openstack.org",
+ 'exceptions': {'recoverable': exceptions.RECOVERABLE,
+ 'not_found': exceptions.NOT_FOUND,
+ 'unauthorized': exceptions.UNAUTHORIZED},
+ 'customization_module': 'windc.overrides'
+}
+
+
+MIDDLEWARE_CLASSES = (
+ 'django.middleware.common.CommonMiddleware',
+ 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+ 'horizon.middleware.HorizonMiddleware',
+ 'django.middleware.doc.XViewMiddleware',
+ 'django.middleware.locale.LocaleMiddleware',
+)
+
+TEMPLATE_CONTEXT_PROCESSORS = (
+ 'django.core.context_processors.debug',
+ 'django.core.context_processors.i18n',
+ 'django.core.context_processors.request',
+ 'django.core.context_processors.media',
+ 'django.core.context_processors.static',
+ 'django.contrib.messages.context_processors.messages',
+ 'horizon.context_processors.horizon',
+)
+
+TEMPLATE_LOADERS = (
+ 'django.template.loaders.filesystem.Loader',
+ 'django.template.loaders.app_directories.Loader',
+ 'horizon.loaders.TemplateLoader'
+)
+
+TEMPLATE_DIRS = (
+ os.path.join(ROOT_PATH, 'templates'),
+)
+
+STATICFILES_FINDERS = (
+ 'compressor.finders.CompressorFinder',
+ 'django.contrib.staticfiles.finders.AppDirectoriesFinder',
+)
+
+less_binary = os.path.join(BIN_DIR, 'less', 'lessc')
+COMPRESS_PRECOMPILERS = (
+ ('text/less', (less_binary + ' {infile} {outfile}')),
+)
+
+COMPRESS_CSS_FILTERS = (
+ 'compressor.filters.css_default.CssAbsoluteFilter',
+)
+
+COMPRESS_ENABLED = True
+COMPRESS_OUTPUT_DIR = 'windc'
+COMPRESS_CSS_HASHING_METHOD = 'hash'
+COMPRESS_PARSER = 'compressor.parser.HtmlParser'
+
+INSTALLED_APPS = (
+ 'openstack_dashboard',
+ 'django.contrib.contenttypes',
+ 'django.contrib.auth',
+ 'django.contrib.sessions',
+ 'django.contrib.messages',
+ 'django.contrib.staticfiles',
+ 'django.contrib.humanize',
+ 'compressor',
+ 'horizon',
+ 'openstack_dashboard.dashboards.project',
+ 'openstack_dashboard.dashboards.admin',
+ 'openstack_dashboard.dashboards.settings',
+ 'openstack_auth'
+)
+
+TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
+AUTHENTICATION_BACKENDS = ('openstack_auth.backend.KeystoneBackend',)
+MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
+
+SESSION_ENGINE = 'django.contrib.sessions.backends.signed_cookies'
+SESSION_COOKIE_HTTPONLY = True
+SESSION_EXPIRE_AT_BROWSER_CLOSE = True
+SESSION_COOKIE_SECURE = False
+
+gettext_noop = lambda s: s
+LANGUAGES = (
+ ('en', gettext_noop('English')),
+ ('it', gettext_noop('Italiano')),
+ ('es', gettext_noop('Spanish')),
+ ('fr', gettext_noop('French')),
+ ('ja', gettext_noop('Japanese')),
+ ('pt', gettext_noop('Portuguese')),
+ ('pl', gettext_noop('Polish')),
+ ('zh-cn', gettext_noop('Simplified Chinese')),
+ ('zh-tw', gettext_noop('Traditional Chinese')),
+)
+LANGUAGE_CODE = 'en'
+USE_I18N = True
+USE_L10N = True
+USE_TZ = True
+
+OPENSTACK_KEYSTONE_DEFAULT_ROLE = 'Member'
+
+DEFAULT_EXCEPTION_REPORTER_FILTER = 'horizon.exceptions.HorizonReporterFilter'
+
+try:
+ from local.local_settings import *
+except ImportError:
+ logging.warning("No local_settings file found.")
+
+if DEBUG:
+ logging.basicConfig(level=logging.DEBUG)
diff --git a/dashboard/windc/templates/windc/_data_center_help.html b/tabula/tabula/templates/_data_center_help.html
similarity index 100%
rename from dashboard/windc/templates/windc/_data_center_help.html
rename to tabula/tabula/templates/_data_center_help.html
diff --git a/dashboard/windc/templates/windc/_dc_help.html b/tabula/tabula/templates/_dc_help.html
similarity index 100%
rename from dashboard/windc/templates/windc/_dc_help.html
rename to tabula/tabula/templates/_dc_help.html
diff --git a/dashboard/windc/templates/windc/_iis_help.html b/tabula/tabula/templates/_iis_help.html
similarity index 100%
rename from dashboard/windc/templates/windc/_iis_help.html
rename to tabula/tabula/templates/_iis_help.html
diff --git a/tabula/tabula/templates/_service_logs.html b/tabula/tabula/templates/_service_logs.html
new file mode 100644
index 00000000..dda752a0
--- /dev/null
+++ b/tabula/tabula/templates/_service_logs.html
@@ -0,0 +1,7 @@
+{% load i18n %}
+
+
{% trans "Service Logs" %}
+
+
+ {{ reports }}
+
\ No newline at end of file
diff --git a/tabula/tabula/templates/_services.html b/tabula/tabula/templates/_services.html
new file mode 100644
index 00000000..ee5006f6
--- /dev/null
+++ b/tabula/tabula/templates/_services.html
@@ -0,0 +1,16 @@
+{% load i18n %}
+
+
+
{% trans "Service Details" %}
+
+
+
{% trans "Name" %}
+
{{ service_name }}
+
{% trans "Type" %}
+
{{ service_type }}
+
{% trans "Domain" %}
+
{{ service_domain }}
+
{% trans "Status" %}
+
{{ service_status }}
+
+
\ No newline at end of file
diff --git a/dashboard/windc/templates/windc/_services_tabs.html b/tabula/tabula/templates/_services_tabs.html
similarity index 100%
rename from dashboard/windc/templates/windc/_services_tabs.html
rename to tabula/tabula/templates/_services_tabs.html
diff --git a/dashboard/windc/templates/windc/create.html b/tabula/tabula/templates/create.html
similarity index 100%
rename from dashboard/windc/templates/windc/create.html
rename to tabula/tabula/templates/create.html
diff --git a/dashboard/windc/templates/windc/create_dc.html b/tabula/tabula/templates/create_dc.html
similarity index 100%
rename from dashboard/windc/templates/windc/create_dc.html
rename to tabula/tabula/templates/create_dc.html
diff --git a/dashboard/windc/templates/windc/index.html b/tabula/tabula/templates/index.html
similarity index 100%
rename from dashboard/windc/templates/windc/index.html
rename to tabula/tabula/templates/index.html
diff --git a/tabula/tabula/templates/service_details.html b/tabula/tabula/templates/service_details.html
new file mode 100644
index 00000000..bde27525
--- /dev/null
+++ b/tabula/tabula/templates/service_details.html
@@ -0,0 +1,15 @@
+{% extends 'base.html' %}
+{% load i18n sizeformat %}
+{% block title %}{% trans "Service Detail" %}{% endblock %}
+
+{% block page_header %}
+ {% include "horizon/common/_page_header.html" with title="Service Detail: "|add:service.name %}
+{% endblock page_header %}
+
+{% block main %}
+
+
+ {{ tab_group.render }}
+
+
+{% endblock %}
diff --git a/dashboard/windc/templates/windc/services.html b/tabula/tabula/templates/services.html
similarity index 100%
rename from dashboard/windc/templates/windc/services.html
rename to tabula/tabula/templates/services.html
diff --git a/dashboard/windc/templates/windc/update.html b/tabula/tabula/templates/update.html
similarity index 100%
rename from dashboard/windc/templates/windc/update.html
rename to tabula/tabula/templates/update.html
diff --git a/windc/windc/api/__init__.py b/tabula/tabula/test/__init__.py
similarity index 100%
rename from windc/windc/api/__init__.py
rename to tabula/tabula/test/__init__.py
diff --git a/tabula/tabula/test/settings.py b/tabula/tabula/test/settings.py
new file mode 100644
index 00000000..501bc7f1
--- /dev/null
+++ b/tabula/tabula/test/settings.py
@@ -0,0 +1,70 @@
+import socket
+
+from dashboard.settings import *
+
+socket.setdefaulttimeout(1)
+
+DEBUG = False
+TEMPLATE_DEBUG = DEBUG
+
+SECRET_KEY = 'HELLA_SECRET!'
+
+DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
+
+TESTSERVER = 'http://testserver'
+
+INSTALLED_APPS += ('django_nose',)
+
+MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
+
+TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
+NOSE_ARGS = ['--nocapture',
+ '--nologcapture',
+ '--cover-package=windc']
+
+EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
+SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
+
+OPENSTACK_ADDRESS = "localhost"
+OPENSTACK_ADMIN_TOKEN = "openstack"
+OPENSTACK_KEYSTONE_URL = "http://%s:5000/v2.0" % OPENSTACK_ADDRESS
+OPENSTACK_KEYSTONE_ADMIN_URL = "http://%s:35357/v2.0" % OPENSTACK_ADDRESS
+OPENSTACK_KEYSTONE_DEFAULT_ROLE = "Member"
+
+# Silence logging output during tests.
+LOGGING = {
+ 'version': 1,
+ 'disable_existing_loggers': False,
+ 'handlers': {
+ 'null': {
+ 'level': 'DEBUG',
+ 'class': 'django.utils.log.NullHandler',
+ },
+ },
+ 'loggers': {
+ 'django.db.backends': {
+ 'handlers': ['null'],
+ 'propagate': False,
+ },
+ 'horizon': {
+ 'handlers': ['null'],
+ 'propagate': False,
+ },
+ 'novaclient': {
+ 'handlers': ['null'],
+ 'propagate': False,
+ },
+ 'keystoneclient': {
+ 'handlers': ['null'],
+ 'propagate': False,
+ },
+ 'quantum': {
+ 'handlers': ['null'],
+ 'propagate': False,
+ },
+ 'nose.plugins.manager': {
+ 'handlers': ['null'],
+ 'propagate': False,
+ }
+ }
+}
diff --git a/windc/windc/core/__init__.py b/tabula/tabula/version.py
similarity index 79%
rename from windc/windc/core/__init__.py
rename to tabula/tabula/version.py
index 1d3eb572..ac241ae7 100644
--- a/windc/windc/core/__init__.py
+++ b/tabula/tabula/version.py
@@ -1,7 +1,6 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
+# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@@ -15,7 +14,7 @@
# License for the specific language governing permissions and limitations
# under the License.
-import builder_set
-builder_set.builders = builder_set.BuilderSet()
-#builder_set.builders.load()
\ No newline at end of file
+from tabula.openstack.common import version as common_version
+
+version_info = common_version.VersionInfo('tabula')
diff --git a/windc/windc/api/middleware/__init__.py b/tabula/tabula/windc/__init__.py
similarity index 100%
rename from windc/windc/api/middleware/__init__.py
rename to tabula/tabula/windc/__init__.py
diff --git a/tabula/tabula/windc/api.py b/tabula/tabula/windc/api.py
new file mode 100644
index 00000000..86a56ccd
--- /dev/null
+++ b/tabula/tabula/windc/api.py
@@ -0,0 +1,215 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2012 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Copyright 2012 Nebula, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+
+from portasclient.v1.client import Client as windc_client
+
+log = logging.getLogger(__name__)
+
+
+def windcclient(request):
+ url = "http://127.0.0.1:8082"
+ log.debug('windcclient connection created using token "%s" and url "%s"'
+ % (request.user.token, url))
+ return windc_client(endpoint=url, token=request.user.token.token['id'])
+
+
+def datacenters_create(request, parameters):
+ env = windcclient(request).environments.create(parameters.get('name', ''))
+ log.debug('Environment::Create {0}'.format(env))
+ return env
+
+
+def datacenters_delete(request, datacenter_id):
+ result = windcclient(request).environments.delete(datacenter_id)
+ log.debug('Environment::Delete Id:{0}'.format(datacenter_id))
+ return result
+
+
+def datacenters_get(request, datacenter_id):
+ env = windcclient(request).environments.get(datacenter_id)
+ log.debug('Environment::Get {0}'.format(env))
+ return env
+
+
+def datacenters_list(request):
+ log.debug('Environment::List')
+ return windcclient(request).environments.list()
+
+
+def datacenters_deploy(request, datacenter_id):
+ sessions = windcclient(request).sessions.list(datacenter_id)
+ for session in sessions:
+ if session.state == 'open':
+ session_id = session.id
+ if not session_id:
+ return "Sorry, nothing to deploy."
+ log.debug('Obtained session with Id: {0}'.format(session_id))
+ result = windcclient(request).sessions.deploy(datacenter_id, session_id)
+ log.debug('Environment with Id: {0} deployed in session '
+ 'with Id: {1}'.format(datacenter_id, session_id))
+ return result
+
+
+def services_create(request, environment_id, parameters):
+ session_id = None
+ sessions = windcclient(request).sessions.list(environment_id)
+
+ for s in sessions:
+ if s.state == 'open':
+ session_id = s.id
+ else:
+ windcclient(request).sessions.delete(environment_id, s.id)
+
+ if session_id is None:
+ session_id = windcclient(request).sessions.configure(environment_id).id
+
+ if parameters['service_type'] == 'Active Directory':
+ service = windcclient(request)\
+ .activeDirectories\
+ .create(environment_id, session_id, parameters)
+ else:
+ service = windcclient(request)\
+ .webServers.create(environment_id, session_id, parameters)
+
+ log.debug('Service::Create {0}'.format(service))
+ return service
+
+
+def get_time(obj):
+ return obj.updated
+
+
+def services_list(request, datacenter_id):
+ services = []
+ session_id = None
+ sessions = windcclient(request).sessions.list(datacenter_id)
+ for s in sessions:
+ session_id = s.id
+
+ if session_id:
+ services = windcclient(request).activeDirectories.list(datacenter_id,
+ session_id)
+ services += windcclient(request).webServers.list(datacenter_id,
+ session_id)
+ for i in range(len(services)):
+ reports = windcclient(request).sessions. \
+ reports(datacenter_id, session_id,
+ services[i].id)
+
+ for report in reports:
+ services[i].operation = report.text
+
+ log.debug('Service::List')
+ return services
+
+
+def get_active_directories(request, datacenter_id):
+ services = []
+ session_id = None
+ sessions = windcclient(request).sessions.list(datacenter_id)
+
+ for s in sessions:
+ session_id = s.id
+
+ if session_id:
+ services = windcclient(request)\
+ .activeDirectories\
+ .list(datacenter_id, session_id)
+
+ log.debug('Service::Active Directories::List')
+ return services
+
+
+def services_get(request, datacenter_id, service_id):
+ services = services_list(request, datacenter_id)
+
+ for service in services:
+ if service.id == service_id:
+ log.debug('Service::Get {0}'.format(service))
+ return service
+
+
+def get_data_center_id_for_service(request, service_id):
+ datacenters = datacenters_list(request)
+
+ for dc in datacenters:
+ services = services_list(request, dc.id)
+ for service in services:
+ if service.id == service_id:
+ return dc.id
+
+
+def get_service_datails(request, service_id):
+ datacenters = datacenters_list(request)
+ services = []
+ for dc in datacenters:
+ services += services_list(request, dc.id)
+
+ for service in services:
+ if service.id == service_id:
+ return service
+
+
+def get_status_message_for_service(request, service_id):
+ environment_id = get_data_center_id_for_service(request, service_id)
+ session_id = None
+ sessions = windcclient(request).sessions.list(environment_id)
+
+ for s in sessions:
+ session_id = s.id
+
+ if session_id:
+ reports = windcclient(request).sessions.\
+ reports(environment_id, session_id, service_id)
+
+ result = 'Initialization.... \n'
+ for report in reports:
+ result += ' ' + str(report.text) + '\n'
+
+ return result
+
+
+def services_delete(request, datacenter_id, service_id):
+ log.debug('Service::Remove EnvId: {0} '
+ 'SrvId: {1}'.format(datacenter_id, service_id))
+
+ services = services_list(request, datacenter_id)
+
+ session_id = None
+ sessions = windcclient(request).sessions.list(datacenter_id)
+ for session in sessions:
+ if session.state == 'open':
+ session_id = session.id
+
+ if session_id is None:
+ raise Exception("Sorry, you can not delete this service now.")
+
+ for service in services:
+ if service.id is service_id:
+ if service.type is 'Active Directory':
+ windcclient(request).activeDirectories.delete(datacenter_id,
+ session_id,
+ service_id)
+ elif service.type is 'IIS':
+ windcclient(request).webServers.delete(datacenter_id,
+ session_id,
+ service_id)
diff --git a/dashboard/windc/forms.py b/tabula/tabula/windc/forms.py
similarity index 72%
rename from dashboard/windc/forms.py
rename to tabula/tabula/windc/forms.py
index b44cd160..ba8e6a73 100644
--- a/dashboard/windc/forms.py
+++ b/tabula/tabula/windc/forms.py
@@ -22,26 +22,21 @@ import logging
import string
from django import forms
-from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
+import re
+from tabula.windc import api
-from openstack_dashboard import api
-
-from horizon import forms
-from horizon import exceptions
-from horizon import messages
-
-LOG = logging.getLogger(__name__)
+log = logging.getLogger(__name__)
class PasswordField(forms.CharField):
-
# Setup the Field
def __init__(self, label, *args, **kwargs):
super(PasswordField, self).__init__(min_length=7, required=True,
- label=label,
- widget=forms.PasswordInput(render_value=False),
- *args, **kwargs)
+ label=label,
+ widget=forms.PasswordInput(
+ render_value=False),
+ *args, **kwargs)
def clean(self, value):
@@ -80,8 +75,8 @@ class PasswordField(forms.CharField):
class WizardFormServiceType(forms.Form):
service = forms.ChoiceField(label=_('Service Type'),
choices=[
- ('Active Directory', 'Active Directory'),
- ('IIS', 'Internet Information Services')
+ ('Active Directory', 'Active Directory'),
+ ('IIS', 'Internet Information Services')
])
@@ -104,6 +99,9 @@ class WizardFormADConfiguration(forms.Form):
recovery_password = PasswordField(_('Recovery password'))
+ def __init__(self, request, *args, **kwargs):
+ super(WizardFormADConfiguration, self).__init__(*args, **kwargs)
+
class WizardFormIISConfiguration(forms.Form):
iis_name = forms.CharField(label=_('IIS Server Name'),
@@ -111,10 +109,17 @@ class WizardFormIISConfiguration(forms.Form):
adm_password = PasswordField(_('Administrator password'))
- iis_domain = forms.CharField(label=_('Member of the Domain'),
- required=True)
+ iis_domain = forms.ChoiceField(label=_('Member of the Domain'),
+ required=False)
- domain_user_name = forms.CharField(label=_('Domain User Name'),
- required=True)
+ def __init__(self, request, *args, **kwargs):
+ super(WizardFormIISConfiguration, self).__init__(*args, **kwargs)
- domain_user_password = PasswordField(_('Domain User Password'))
+ link = request.__dict__['META']['HTTP_REFERER']
+ datacenter_id = re.search('windc/(\S+)', link).group(0)[6:-1]
+
+ domains = api.get_active_directories(request, datacenter_id)
+
+ self.fields['iis_domain'].choices = [("", "")] + \
+ [(domain.name, domain.name)
+ for domain in domains]
diff --git a/tabula/tabula/windc/overrides.py b/tabula/tabula/windc/overrides.py
new file mode 100644
index 00000000..10b27047
--- /dev/null
+++ b/tabula/tabula/windc/overrides.py
@@ -0,0 +1,6 @@
+import horizon
+
+from panel import WinDC
+
+project = horizon.get_dashboard('project')
+project.register(WinDC)
diff --git a/dashboard/windc/panel.py b/tabula/tabula/windc/panel.py
similarity index 100%
rename from dashboard/windc/panel.py
rename to tabula/tabula/windc/panel.py
diff --git a/dashboard/windc/tables.py b/tabula/tabula/windc/tables.py
similarity index 77%
rename from dashboard/windc/tables.py
rename to tabula/tabula/windc/tables.py
index c8c6646d..f1379622 100644
--- a/dashboard/windc/tables.py
+++ b/tabula/tabula/windc/tables.py
@@ -17,26 +17,13 @@
# TO DO: clear extra modules
-import re
import logging
-from django import shortcuts
-from django import template
-from django.core import urlresolvers
-from django.template.defaultfilters import title
-from django.utils.http import urlencode
-from django.utils.translation import string_concat, ugettext_lazy as _
-
-from horizon.conf import HORIZON_CONFIG
-from horizon import exceptions
+import re
+from django.utils.translation import ugettext_lazy as _
from horizon import messages
from horizon import tables
-from horizon.templatetags import sizeformat
-from horizon.utils.filters import replace_underscores
-
-from openstack_dashboard import api
-from openstack_dashboard.dashboards.project.access_and_security \
- .floating_ips.workflows import IPAssociationWorkflow
+from tabula.windc import api
LOG = logging.getLogger(__name__)
@@ -52,7 +39,7 @@ class CreateService(tables.LinkAction):
return True
def action(self, request, service):
- api.windc.services_create(request, service)
+ api.services_create(request, service)
class CreateDataCenter(tables.LinkAction):
@@ -65,7 +52,7 @@ class CreateDataCenter(tables.LinkAction):
return True
def action(self, request, datacenter):
- api.windc.datacenters_create(request, datacenter)
+ api.datacenters_create(request, datacenter)
class DeleteDataCenter(tables.BatchAction):
@@ -80,7 +67,7 @@ class DeleteDataCenter(tables.BatchAction):
return True
def action(self, request, datacenter_id):
- api.windc.datacenters_delete(request, datacenter_id)
+ api.datacenters_delete(request, datacenter_id)
class DeleteService(tables.BatchAction):
@@ -99,10 +86,10 @@ class DeleteService(tables.BatchAction):
datacenter_id = re.search('windc/(\S+)', link).group(0)[6:-1]
try:
- api.windc.services_delete(request, datacenter_id, service_id)
+ api.services_delete(request, datacenter_id, service_id)
except:
- messages.error(request,
- _('Sorry, you can not delete this service right now.'))
+ messages.error(request, _('Sorry, you can not delete this '
+ 'service right now.'))
class DeployDataCenter(tables.BatchAction):
@@ -111,13 +98,13 @@ class DeployDataCenter(tables.BatchAction):
action_past = _('Deploy')
data_type_singular = _('Data Center')
data_type_plural = _('Data Center')
- classes = ('btn-launch')
+ classes = 'btn-launch'
def allowed(self, request, datum):
return True
def action(self, request, datacenter_id):
- return api.windc.datacenters_deploy(request, datacenter_id)
+ return api.datacenters_deploy(request, datacenter_id)
class ShowDataCenterServices(tables.LinkAction):
@@ -133,25 +120,21 @@ class UpdateDCRow(tables.Row):
ajax = True
def get_data(self, request, datacenter_id):
- return api.windc.datacenters_get(request, datacenter_id)
-
-
+ return api.datacenters_get(request, datacenter_id)
+
+
class UpdateServiceRow(tables.Row):
ajax = True
def get_data(self, request, service_id):
+
link = request.__dict__['META']['HTTP_REFERER']
datacenter_id = re.search('windc/(\S+)', link).group(0)[6:-1]
-
- return api.windc.services_get(request, datacenter_id, service_id)
+ service = api.services_get(request, datacenter_id, service_id)
+
+ return service
-STATUS_DISPLAY_CHOICES = (
- ('draft', 'Ready to deploy'),
- ('pending', 'Wait for configuration'),
- ('inprogress', 'Deploy in progress'),
- ('finished', 'Active')
-)
STATUS_DISPLAY_CHOICES = (
('draft', 'Ready to deploy'),
@@ -162,14 +145,12 @@ STATUS_DISPLAY_CHOICES = (
class WinDCTable(tables.DataTable):
-
STATUS_CHOICES = (
(None, True),
('Ready to deploy', True),
('Active', True)
)
-
name = tables.Column('name',
link=('horizon:project:windc:services'),
verbose_name=_('Name'))
@@ -190,7 +171,6 @@ class WinDCTable(tables.DataTable):
class WinServicesTable(tables.DataTable):
-
STATUS_CHOICES = (
(None, True),
('Ready to deploy', True),
@@ -198,7 +178,7 @@ class WinServicesTable(tables.DataTable):
)
name = tables.Column('name', verbose_name=_('Name'),
- link=('horizon:project:windc:service_details'),)
+ link=('horizon:project:windc:service_details'))
_type = tables.Column('service_type', verbose_name=_('Type'))
@@ -207,10 +187,11 @@ class WinServicesTable(tables.DataTable):
status_choices=STATUS_CHOICES,
display_choices=STATUS_DISPLAY_CHOICES)
+ operation = tables.Column('operation', verbose_name=_('Operation'))
+
class Meta:
name = 'services'
verbose_name = _('Services')
row_class = UpdateServiceRow
status_columns = ['status']
table_actions = (CreateService,)
- row_actions = (DeleteService,)
diff --git a/dashboard/windc/tabs.py b/tabula/tabula/windc/tabs.py
similarity index 53%
rename from dashboard/windc/tabs.py
rename to tabula/tabula/windc/tabs.py
index 95b7217c..433a8ae4 100644
--- a/dashboard/windc/tabs.py
+++ b/tabula/tabula/windc/tabs.py
@@ -18,21 +18,42 @@ from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tabs
+import logging
-from openstack_dashboard import api
+from tabula.windc import api
+
+
+LOG = logging.getLogger(__name__)
class OverviewTab(tabs.Tab):
- name = _("Services")
- slug = "_services"
- template_name = ("project/windc/_services.html")
+ name = _("Service")
+ slug = "_service"
+ template_name = '_services.html'
def get_context_data(self, request):
- dc = self.tab_group.kwargs['domain_controller']
- return {"domain_controller": dc}
+ data = self.tab_group.kwargs['service']
+
+ return {"service_name": data.name,
+ "service_status": data.status,
+ "service_type": data.service_type,
+ "service_domain": data.domain}
-class WinServicesTab(tabs.TabGroup):
+class LogsTab(tabs.Tab):
+ name = _("Logs")
+ slug = "_logs"
+ template_name = '_service_logs.html'
+
+ def get_context_data(self, request):
+ data = self.tab_group.kwargs['service']
+
+ reports = api.get_status_message_for_service(request, data.id)
+
+ return {"reports": reports}
+
+
+class WinServicesTabs(tabs.TabGroup):
slug = "services_details"
- tabs = (OverviewTab,)
+ tabs = (OverviewTab, LogsTab)
sticky = True
diff --git a/dashboard/windc/urls.py b/tabula/tabula/windc/urls.py
similarity index 57%
rename from dashboard/windc/urls.py
rename to tabula/tabula/windc/urls.py
index 7b5f987c..df7ee2ed 100644
--- a/dashboard/windc/urls.py
+++ b/tabula/tabula/windc/urls.py
@@ -20,20 +20,22 @@
from django.conf.urls.defaults import patterns, url
-from .views import IndexView, WinServices, CreateWinDCView
+from .views import IndexView, WinServices, CreateWinDCView, DetailServiceView
from .views import Wizard
from .forms import WizardFormServiceType, WizardFormConfiguration
VIEW_MOD = 'openstack_dashboard.dashboards.project.windc.views'
urlpatterns = patterns(VIEW_MOD,
- url(r'^$', IndexView.as_view(), name='index'),
- url(r'^create$',
- Wizard.as_view([WizardFormServiceType, WizardFormConfiguration]),
- name='create'),
- url(r'^create_dc$', CreateWinDCView.as_view(), name='create_dc'),
- url(r'^(?P[^/]+)/$', WinServices.as_view(),
- name='services'),
- url(r'^(?P[^/]+)/$', WinServices.as_view(),
- name='service_details')
-)
+ url(r'^$', IndexView.as_view(), name='index'),
+ url(r'^create$',
+ Wizard.as_view([WizardFormServiceType,
+ WizardFormConfiguration]),
+ name='create'),
+ url(r'^create_dc$', CreateWinDCView.as_view(),
+ name='create_dc'),
+ url(r'^(?P[^/]+)/$',
+ WinServices.as_view(), name='services'),
+ url(r'^(?P[^/]+)/details$',
+ DetailServiceView.as_view(),
+ name='service_details'))
diff --git a/dashboard/windc/views.py b/tabula/tabula/windc/views.py
similarity index 63%
rename from dashboard/windc/views.py
rename to tabula/tabula/windc/views.py
index 48bfb0dd..08857a1f 100644
--- a/dashboard/windc/views.py
+++ b/tabula/tabula/windc/views.py
@@ -24,64 +24,67 @@ Views for managing instances.
import logging
import re
-from django import http
-from django import shortcuts
from django.views import generic
-from django.core.urlresolvers import reverse, reverse_lazy
-from django.utils.datastructures import SortedDict
+from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from django.contrib.formtools.wizard.views import SessionWizardView
from horizon import exceptions
-from horizon import forms
from horizon import tabs
from horizon import tables
from horizon import workflows
from horizon.forms.views import ModalFormMixin
-from openstack_dashboard import api
+from tabula.windc import api
+
from .tables import WinDCTable, WinServicesTable
from .workflows import CreateWinDC
-from .forms import (WizardFormServiceType, WizardFormConfiguration,
- WizardFormADConfiguration, WizardFormIISConfiguration)
+from .tabs import WinServicesTabs
+from .forms import (WizardFormADConfiguration, WizardFormIISConfiguration)
from horizon import messages
from django.http import HttpResponseRedirect
+
LOG = logging.getLogger(__name__)
class Wizard(ModalFormMixin, SessionWizardView, generic.FormView):
- template_name = 'project/windc/services_tabs.html'
+ template_name = 'services_tabs.html'
def done(self, form_list, **kwargs):
link = self.request.__dict__['META']['HTTP_REFERER']
datacenter_id = re.search('windc/(\S+)', link).group(0)[6:-1]
+
url = "/project/windc/%s/" % datacenter_id
service_type = form_list[0].data.get('0-service', '')
parameters = {'service_type': service_type}
-
+ data = form_list[1].data
if service_type == 'Active Directory':
parameters['configuration'] = 'standalone'
- parameters['name'] = str(form_list[1].data.get('1-dc_name',
- 'noname'))
- parameters['domain'] = parameters['name'] # Fix Me in orchestrator
- parameters['adminPassword'] = \
- str(form_list[1].data.get('1-adm_password', ''))
- dc_count = int(form_list[1].data.get('1-dc_count', 1))
- recovery_password = \
- str(form_list[1].data.get('1-recovery_password', ''))
+ parameters['name'] = str(data.get('1-dc_name', 'noname'))
+ parameters['domain'] = parameters['name'] # Fix Me in orchestrator
+ parameters['adminPassword'] = str(data.get('1-adm_password', ''))
+ dc_count = int(data.get('1-dc_count', 1))
+ recovery_password = str(data.get('1-recovery_password', ''))
parameters['units'] = []
parameters['units'].append({'isMaster': True,
'recoveryPassword': recovery_password,
'location': 'west-dc'})
for dc in range(dc_count - 1):
- parameters['units'].append({'isMaster': False,
- 'recoveryPassword': recovery_password,
- 'location': 'west-dc'})
+ parameters['units'].append({
+ 'isMaster': False,
+ 'recoveryPassword': recovery_password,
+ 'location': 'west-dc'
+ })
elif service_type == 'IIS':
+ password = data.get('1-adm_password', '')
+ parameters['name'] = str(data.get('1-iis_name', 'noname'))
+ parameters['credentials'] = {'username': 'Administrator',
+ 'password': password}
+ parameters['domain'] = str(data.get('1-iis_domain', ''))
password = form_list[1].data.get('1-adm_password', '')
domain = form_list[1].data.get('1-iis_domain', '')
dc_user = form_list[1].data.get('1-domain_user_name', '')
@@ -92,8 +95,8 @@ class Wizard(ModalFormMixin, SessionWizardView, generic.FormView):
parameters['credentials'] = {'username': 'Administrator',
'password': password}
parameters['domain'] = str(domain)
- # 'username': str(dc_user),
- # 'password': str(dc_pass)}
+ # 'username': str(dc_user),
+ # 'password': str(dc_pass)}
parameters['location'] = 'west-dc'
parameters['units'] = []
@@ -101,26 +104,27 @@ class Wizard(ModalFormMixin, SessionWizardView, generic.FormView):
'endpoint': [{'host': '10.0.0.1'}],
'location': 'west-dc'})
- service = api.windc.services_create(self.request,
- datacenter_id,
- parameters)
+ service = api.services_create(self.request, datacenter_id, parameters)
message = "The %s service successfully created." % service_type
messages.success(self.request, message)
return HttpResponseRedirect(url)
def get_form(self, step=None, data=None, files=None):
+
form = super(Wizard, self).get_form(step, data, files)
if data:
- service_type = data.get('0-service', '')
- self.service_type = service_type
- if service_type == 'Active Directory':
+ self.service_type = data.get('0-service', '')
+ if self.service_type == 'Active Directory':
self.form_list['1'] = WizardFormADConfiguration
- elif service_type == 'IIS':
+ elif self.service_type == 'IIS':
self.form_list['1'] = WizardFormIISConfiguration
return form
+ def get_form_kwargs(self, step=None):
+ return {'request': self.request} if step == u'1' else {}
+
def get_form_step_data(self, form):
LOG.debug(form.data)
return form.data
@@ -134,11 +138,11 @@ class Wizard(ModalFormMixin, SessionWizardView, generic.FormView):
class IndexView(tables.DataTableView):
table_class = WinDCTable
- template_name = 'project/windc/index.html'
+ template_name = 'index.html'
def get_data(self):
try:
- data_centers = api.windc.datacenters_list(self.request)
+ data_centers = api.datacenters_list(self.request)
except:
data_centers = []
exceptions.handle(self.request,
@@ -148,7 +152,7 @@ class IndexView(tables.DataTableView):
class WinServices(tables.DataTableView):
table_class = WinServicesTable
- template_name = 'project/windc/services.html'
+ template_name = 'services.html'
def get_context_data(self, **kwargs):
context = super(WinServices, self).get_context_data(**kwargs)
@@ -159,20 +163,50 @@ class WinServices(tables.DataTableView):
try:
dc_id = self.kwargs['data_center_id']
self.datacenter_id = dc_id
- datacenter = api.windc.datacenters_get(self.request, dc_id)
+ datacenter = api.datacenters_get(self.request, dc_id)
self.dc_name = datacenter.name
- services = api.windc.services_list(self.request, dc_id)
+ services = api.services_list(self.request, dc_id)
except:
services = []
exceptions.handle(self.request,
_('Unable to retrieve list of services for '
'data center "%s".') % self.dc_name)
- return services
+ self._services = services
+ return self._services
+
+
+class DetailServiceView(tabs.TabView):
+ tab_group_class = WinServicesTabs
+ template_name = 'service_details.html'
+
+ def get_context_data(self, **kwargs):
+ context = super(DetailServiceView, self).get_context_data(**kwargs)
+ context["service"] = self.get_data()
+ context["service_name"] = self.get_data().name
+ return context
+
+ def get_data(self):
+ if not hasattr(self, "_service"):
+ try:
+ service_id = self.kwargs['service_id']
+ service = api.get_service_datails(self.request, service_id)
+ except:
+ redirect = reverse('horizon:project:windc:index')
+ exceptions.handle(self.request,
+ _('Unable to retrieve details for '
+ 'service "%s".') % service_id,
+ redirect=redirect)
+ self._service = service
+ return self._service
+
+ def get_tabs(self, request, *args, **kwargs):
+ service = self.get_data()
+ return self.tab_group_class(request, service=service, **kwargs)
class CreateWinDCView(workflows.WorkflowView):
workflow_class = CreateWinDC
- template_name = 'project/windc/create_dc.html'
+ template_name = 'create_dc.html'
def get_initial(self):
initial = super(CreateWinDCView, self).get_initial()
diff --git a/dashboard/windc/workflows.py b/tabula/tabula/windc/workflows.py
similarity index 89%
rename from dashboard/windc/workflows.py
rename to tabula/tabula/windc/workflows.py
index 6910f66f..b88a3e76 100644
--- a/dashboard/windc/workflows.py
+++ b/tabula/tabula/windc/workflows.py
@@ -29,7 +29,7 @@ from horizon import exceptions
from horizon import forms
from horizon import workflows
-from openstack_dashboard import api
+from tabula.windc import api
LOG = logging.getLogger(__name__)
@@ -62,12 +62,11 @@ class SelectProjectUser(workflows.Step):
class ConfigureDCAction(workflows.Action):
- name = forms.CharField(label=_("Data Center Name"),
- required=True)
+ name = forms.CharField(label=_("Data Center Name"), required=True)
class Meta:
name = _("Data Center")
- help_text_template = ("project/windc/_data_center_help.html")
+ help_text_template = "_data_center_help.html"
class ConfigureDC(workflows.Step):
@@ -87,8 +86,7 @@ class CreateWinDC(workflows.Workflow):
success_message = _('Created data center "%s".')
failure_message = _('Unable to create data center "%s".')
success_url = "horizon:project:windc:index"
- default_steps = (SelectProjectUser,
- ConfigureDC)
+ default_steps = (SelectProjectUser, ConfigureDC)
def format_status_message(self, message):
name = self.context.get('name', 'noname')
@@ -96,7 +94,7 @@ class CreateWinDC(workflows.Workflow):
def handle(self, request, context):
try:
- datacenter = api.windc.datacenters_create(request, context)
+ datacenter = api.datacenters_create(request, context)
return True
except:
exceptions.handle(request)
diff --git a/tabula/tools/install_venv.py b/tabula/tools/install_venv.py
new file mode 100644
index 00000000..2aa61068
--- /dev/null
+++ b/tabula/tools/install_venv.py
@@ -0,0 +1,69 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Copyright 2010 OpenStack LLC.
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import subprocess
+import sys
+
+import install_venv_common as install_venv
+
+
+def print_help():
+ help = """
+ Tabula development environment setup is complete.
+
+ Tabula development uses virtualenv to track and manage Python dependencies
+ while in development and testing.
+
+ To activate the Tabula virtualenv for the extent of your current shell session
+ you can run:
+
+ $ source .venv/bin/activate
+
+ Or, if you prefer, you can run commands in the virtualenv on a case by case
+ basis by running:
+
+ $ tools/with_venv.sh
+
+ Also, make test will automatically use the virtualenv.
+ """
+ print help
+
+
+def main(argv):
+ root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+ venv = os.path.join(root, '.venv')
+ pip_requires = os.path.join(root, 'tools', 'pip-requires')
+ test_requires = os.path.join(root, 'tools', 'test-requires')
+ py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1])
+ project = 'tabula'
+ install = install_venv.InstallVenv(root, venv, pip_requires, test_requires,
+ py_version, project)
+ options = install.parse_args(argv)
+ install.check_python_version()
+ install.check_dependencies()
+ install.create_virtualenv(no_site_packages=options.no_site_packages)
+ install.install_dependencies()
+ install.post_process()
+ print_help()
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/tabula/tools/install_venv_common.py b/tabula/tools/install_venv_common.py
new file mode 100644
index 00000000..1ea3b2df
--- /dev/null
+++ b/tabula/tools/install_venv_common.py
@@ -0,0 +1,219 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4
+
+# Copyright 2013 OpenStack, LLC
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Provides methods needed by installation script for OpenStack development
+virtual environments.
+
+Synced in from openstack-common
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+
+class InstallVenv(object):
+
+ def __init__(self, root, venv, pip_requires, test_requires, py_version,
+ project):
+ self.root = root
+ self.venv = venv
+ self.pip_requires = pip_requires
+ self.test_requires = test_requires
+ self.py_version = py_version
+ self.project = project
+
+ def die(self, message, *args):
+ print >> sys.stderr, message % args
+ sys.exit(1)
+
+ def check_python_version(self):
+ if sys.version_info < (2, 6):
+ self.die("Need Python Version >= 2.6")
+
+ def run_command_with_code(self, cmd, redirect_output=True,
+ check_exit_code=True):
+ """Runs a command in an out-of-process shell.
+
+ Returns the output of that command. Working directory is self.root.
+ """
+ if redirect_output:
+ stdout = subprocess.PIPE
+ else:
+ stdout = None
+
+ proc = subprocess.Popen(cmd, cwd=self.root, stdout=stdout)
+ output = proc.communicate()[0]
+ if check_exit_code and proc.returncode != 0:
+ self.die('Command "%s" failed.\n%s', ' '.join(cmd), output)
+ return (output, proc.returncode)
+
+ def run_command(self, cmd, redirect_output=True, check_exit_code=True):
+ return self.run_command_with_code(cmd, redirect_output,
+ check_exit_code)[0]
+
+ def get_distro(self):
+ if (os.path.exists('/etc/fedora-release') or
+ os.path.exists('/etc/redhat-release')):
+ return Fedora(self.root, self.venv, self.pip_requires,
+ self.test_requires, self.py_version, self.project)
+ else:
+ return Distro(self.root, self.venv, self.pip_requires,
+ self.test_requires, self.py_version, self.project)
+
+ def check_dependencies(self):
+ self.get_distro().install_virtualenv()
+
+ def create_virtualenv(self, no_site_packages=True):
+ """Creates the virtual environment and installs PIP.
+
+ Creates the virtual environment and installs PIP only into the
+ virtual environment.
+ """
+ if not os.path.isdir(self.venv):
+ print 'Creating venv...',
+ if no_site_packages:
+ self.run_command(['virtualenv', '-q', '--no-site-packages',
+ self.venv])
+ else:
+ self.run_command(['virtualenv', '-q', self.venv])
+ print 'done.'
+ print 'Installing pip in venv...',
+ if not self.run_command(['tools/with_venv.sh', 'easy_install',
+ 'pip>1.0']).strip():
+ self.die("Failed to install pip.")
+ print 'done.'
+ else:
+ print "venv already exists..."
+ pass
+
+ def pip_install(self, *args):
+ self.run_command(['tools/with_venv.sh',
+ 'pip', 'install', '--upgrade'] + list(args),
+ redirect_output=False)
+
+ def install_dependencies(self):
+ print 'Installing dependencies with pip (this can take a while)...'
+
+ # First things first, make sure our venv has the latest pip and
+ # distribute.
+ # NOTE: we keep pip at version 1.1 since the most recent version causes
+ # the .venv creation to fail. See:
+ # https://bugs.launchpad.net/nova/+bug/1047120
+ self.pip_install('pip==1.1')
+ self.pip_install('distribute')
+
+ # Install greenlet by hand - just listing it in the requires file does
+ # not
+ # get it installed in the right order
+ self.pip_install('greenlet')
+
+ self.pip_install('-r', self.test_requires)
+ self.pip_install('-r', self.pip_requires)
+
+ def post_process(self):
+ self.get_distro().post_process()
+
+ def parse_args(self, argv):
+ """Parses command-line arguments."""
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-n', '--no-site-packages',
+ action='store_true',
+ help="Do not inherit packages from global Python "
+ "install")
+ return parser.parse_args(argv[1:])
+
+
+class Distro(InstallVenv):
+
+ def check_cmd(self, cmd):
+ return bool(self.run_command(['which', cmd],
+ check_exit_code=False).strip())
+
+ def install_virtualenv(self):
+ if self.check_cmd('virtualenv'):
+ return
+
+ if self.check_cmd('easy_install'):
+ print 'Installing virtualenv via easy_install...',
+ if self.run_command(['easy_install', 'virtualenv']):
+ print 'Succeeded'
+ return
+ else:
+ print 'Failed'
+
+ self.die('ERROR: virtualenv not found.\n\n%s development'
+ ' requires virtualenv, please install it using your'
+ ' favorite package management tool' % self.project)
+
+ def post_process(self):
+ """Any distribution-specific post-processing gets done here.
+
+ In particular, this is useful for applying patches to code inside
+ the venv.
+ """
+ pass
+
+
+class Fedora(Distro):
+ """This covers all Fedora-based distributions.
+
+ Includes: Fedora, RHEL, CentOS, Scientific Linux
+ """
+
+ def check_pkg(self, pkg):
+ return self.run_command_with_code(['rpm', '-q', pkg],
+ check_exit_code=False)[1] == 0
+
+ def yum_install(self, pkg, **kwargs):
+ print "Attempting to install '%s' via yum" % pkg
+ self.run_command(['sudo', 'yum', 'install', '-y', pkg], **kwargs)
+
+ def apply_patch(self, originalfile, patchfile):
+ self.run_command(['patch', originalfile, patchfile])
+
+ def install_virtualenv(self):
+ if self.check_cmd('virtualenv'):
+ return
+
+ if not self.check_pkg('python-virtualenv'):
+ self.yum_install('python-virtualenv', check_exit_code=False)
+
+ super(Fedora, self).install_virtualenv()
+
+ def post_process(self):
+ """Workaround for a bug in eventlet.
+
+ This currently affects RHEL6.1, but the fix can safely be
+ applied to all RHEL and Fedora distributions.
+
+ This can be removed when the fix is applied upstream.
+
+ Nova: https://bugs.launchpad.net/nova/+bug/884915
+ Upstream: https://bitbucket.org/which_linden/eventlet/issue/89
+ """
+
+ # Install "patch" program if it's not there
+ if not self.check_pkg('patch'):
+ self.yum_install('patch')
+
+ # Apply the eventlet patch
+ self.apply_patch(os.path.join(self.venv, 'lib', self.py_version,
+ 'site-packages',
+ 'eventlet/green/subprocess.py'),
+ 'contrib/redhat-eventlet.patch')
diff --git a/tabula/tools/pip-requires b/tabula/tools/pip-requires
new file mode 100644
index 00000000..b8f9550a
--- /dev/null
+++ b/tabula/tools/pip-requires
@@ -0,0 +1,14 @@
+# Horizon
+-e git+https://github.com/openstack/horizon.git#egg=horizon
+
+# Core Requirements
+Django>=1.4,<1.5
+
+#API
+./packages/python-portasclient-2013.1.a345.ga70b44e.tar.gz
+
+anyjson
+
+#Fix for bug https://bugs.launchpad.net/python-keystoneclient/+bug/1116740
+backports.ssl_match_hostname
+requests==0.14.2
diff --git a/tabula/tools/rfc.sh b/tabula/tools/rfc.sh
new file mode 100755
index 00000000..15781e52
--- /dev/null
+++ b/tabula/tools/rfc.sh
@@ -0,0 +1,145 @@
+#!/bin/sh -e
+# Copyright (c) 2010-2011 Gluster, Inc.
+# This initial version of this file was taken from the source tree
+# of GlusterFS. It was not directly attributed, but is assumed to be
+# Copyright (c) 2010-2011 Gluster, Inc and release GPLv3
+# Subsequent modifications are Copyright (c) 2012 OpenStack, LLC.
+#
+# GlusterFS is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published
+# by the Free Software Foundation; either version 3 of the License,
+# or (at your option) any later version.
+#
+# GlusterFS is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see
+# .
+
+
+branch="master";
+
+set_hooks_commit_msg()
+{
+ top_dir=`git rev-parse --show-toplevel`
+ f="${top_dir}/.git/hooks/commit-msg";
+ u="https://review.openstack.org/tools/hooks/commit-msg";
+
+ if [ -x "$f" ]; then
+ return;
+ fi
+
+ curl -o $f $u || wget -O $f $u;
+
+ chmod +x $f;
+
+ GIT_EDITOR=true git commit --amend
+}
+
+add_remote()
+{
+ username=$1
+ project=$2
+
+ echo "No remote set, testing ssh://$username@review.openstack.org:29418"
+ if project_list=`ssh -p29418 -o StrictHostKeyChecking=no $username@review.openstack.org gerrit ls-projects 2>/dev/null`
+ then
+ echo "$username@review.openstack.org:29418 worked."
+ if echo $project_list | grep $project >/dev/null
+ then
+ echo "Creating a git remote called gerrit that maps to:"
+ echo " ssh://$username@review.openstack.org:29418/$project"
+ git remote add gerrit ssh://$username@review.openstack.org:29418/$project
+ else
+ echo "The current project name, $project, is not a known project."
+ echo "Please either reclone from github/gerrit or create a"
+ echo "remote named gerrit that points to the intended project."
+ return 1
+ fi
+
+ return 0
+ fi
+ return 1
+}
+
+check_remote()
+{
+ if ! git remote | grep gerrit >/dev/null 2>&1
+ then
+ origin_project=`git remote show origin | grep 'Fetch URL' | perl -nle '@fields = split(m|[:/]|); $len = $#fields; print $fields[$len-1], "/", $fields[$len];'`
+ if add_remote $USERNAME $origin_project
+ then
+ return 0
+ else
+ echo "Your local name doesn't work on Gerrit."
+ echo -n "Enter Gerrit username (same as launchpad): "
+ read gerrit_user
+ if add_remote $gerrit_user $origin_project
+ then
+ return 0
+ else
+ echo "Can't infer where gerrit is - please set a remote named"
+ echo "gerrit manually and then try again."
+ echo
+ echo "For more information, please see:"
+ echo "\thttp://wiki.openstack.org/GerritWorkflow"
+ exit 1
+ fi
+ fi
+ fi
+}
+
+rebase_changes()
+{
+ git fetch;
+
+ GIT_EDITOR=true git rebase -i origin/$branch || exit $?;
+}
+
+
+assert_diverge()
+{
+ if ! git diff origin/$branch..HEAD | grep -q .
+ then
+ echo "No changes between the current branch and origin/$branch."
+ exit 1
+ fi
+}
+
+
+main()
+{
+ set_hooks_commit_msg;
+
+ check_remote;
+
+ rebase_changes;
+
+ assert_diverge;
+
+ bug=$(git show --format='%s %b' | perl -nle 'if (/\b([Bb]ug|[Ll][Pp])\s*[#:]?\s*(\d+)/) {print "$2"; exit}')
+
+ bp=$(git show --format='%s %b' | perl -nle 'if (/\b([Bb]lue[Pp]rint|[Bb][Pp])\s*[#:]?\s*([0-9a-zA-Z-_]+)/) {print "$2"; exit}')
+
+ if [ "$DRY_RUN" = 1 ]; then
+ drier='echo -e Please use the following command to send your commits to review:\n\n'
+ else
+ drier=
+ fi
+
+ local_branch=`git branch | grep -Ei "\* (.*)" | cut -f2 -d' '`
+ if [ -z "$bug" ]; then
+ if [ -z "$bp" ]; then
+ $drier git push gerrit HEAD:refs/for/$branch/$local_branch;
+ else
+ $drier git push gerrit HEAD:refs/for/$branch/bp/$bp;
+ fi
+ else
+ $drier git push gerrit HEAD:refs/for/$branch/bug/$bug;
+ fi
+}
+
+main "$@"
diff --git a/tabula/tools/test-requires b/tabula/tools/test-requires
new file mode 100644
index 00000000..c1da7cae
--- /dev/null
+++ b/tabula/tools/test-requires
@@ -0,0 +1,18 @@
+distribute>=0.6.24
+
+# Testing Requirements
+coverage
+django-nose
+mox
+nose
+nose-exclude
+nosexcover
+openstack.nose_plugin
+nosehtmloutput
+pep8>=1.3
+pylint
+selenium
+
+# Docs Requirements
+sphinx
+docutils==0.9.1 # for bug 1091333, remove after sphinx >1.1.3 is released.
diff --git a/windc/tools/with_venv.sh b/tabula/tools/with_venv.sh
similarity index 84%
rename from windc/tools/with_venv.sh
rename to tabula/tools/with_venv.sh
index ae91bbcb..c8d2940f 100755
--- a/windc/tools/with_venv.sh
+++ b/tabula/tools/with_venv.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/bash
TOOLS=`dirname $0`
VENV=$TOOLS/../.venv
source $VENV/bin/activate && $@
diff --git a/windc/.gitignore b/windc/.gitignore
deleted file mode 100644
index b9489855..00000000
--- a/windc/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-*.swp
-*.pyc
diff --git a/windc/README b/windc/README
deleted file mode 100644
index 1def0120..00000000
--- a/windc/README
+++ /dev/null
@@ -1,8 +0,0 @@
-This is the Windows DataCenter project. It serves two main purposes:
-
-* Proof the proposed architecture for windows dc service
-* Provide a Demo for the Windows Environment Management features
-
-This is not a final project. It is a POC for the Demo and architecture verification purposes.
-
-
diff --git a/windc/bin/windc-api b/windc/bin/windc-api
deleted file mode 100755
index 6c8adc03..00000000
--- a/windc/bin/windc-api
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env python
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Windows DataCenter API Server
-"""
-
-import optparse
-import os
-import sys
-import gettext
-# If ../windc/__init__.py exists, add ../ to Python search path, so that
-# it will override what happens to be installed in /usr/(local/)lib/python...
-possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
- os.pardir,
- os.pardir))
-if os.path.exists(os.path.join(possible_topdir, 'windc', '__init__.py')):
- sys.path.insert(0, possible_topdir)
- sys.path.insert(0, '.')
-
-
-from windc.common import cfg
-from windc.common import config
-from windc.common import wsgi
-from windc.db import session
-from windc.core import builder_set
-
-gettext.install('balancer', unicode=1)
-
-dbsync_opt = cfg.BoolOpt('dbsync', default=False,
- help='Perform database schema synchronization')
-
-if __name__ == '__main__':
- try:
- conf = config.WindcConfigOpts()
- conf.register_cli_opt(dbsync_opt)
- conf()
-
- config.setup_logging(conf)
- if conf.dbsync:
- session.sync(conf)
- else:
- builder_set.builders.load(conf)
- app = config.load_paste_app(conf)
- server = wsgi.Server()
- server.start(app, conf, default_port=8181)
- server.wait()
- except RuntimeError, e:
- sys.exit("ERROR: %s" % e)
-
-
-
-
-
-# def create_options(parser):
-# """
-# Sets up the CLI and config-file options that may be
-# parsed and program commands.
-#
-# :param parser: The option parser
-# """
-# config.add_common_options(parser)
-# config.add_log_options(parser)
-#
-#
-# if __name__ == '__main__':
-# oparser = optparse.OptionParser(version='%%prog %s'
-# % version.version_string())
-# create_options(oparser)
-# (options, args) = config.parse_options(oparser)
-#
-# # try:
-# conf, app = config.load_paste_app('windc', options, args)
-#
-# server = wsgi.Server()
-# server.start(app, int(conf['bind_port']), conf['bind_host'])
-# server.wait()
-# # except RuntimeError, e:
-# # sys.exit("ERROR: %s" % e)
diff --git a/windc/data/CreatePrimaryDC.json b/windc/data/CreatePrimaryDC.json
deleted file mode 100644
index fb74fbbc..00000000
--- a/windc/data/CreatePrimaryDC.json
+++ /dev/null
@@ -1,29 +0,0 @@
-{
- "Scripts": [
- "RnVuY3Rpb24gU2V0LUxvY2FsVXNlclBhc3N3b3JkIHsNCiAgICBwYXJhbSAoDQogICAgICAgIFtTdHJpbmddICRVc2VyTmFtZSwNCiAgICAgICAgW1N0cmluZ10gJFBhc3N3b3JkLA0KICAgICAgICBbU3dpdGNoXSAkRm9yY2UNCiAgICApDQogICAgDQogICAgdHJhcCB7IFN0b3AtRXhlY3V0aW9uICRfIH0NCiAgICANCiAgICBpZiAoKEdldC1XbWlPYmplY3QgV2luMzJfVXNlckFjY291bnQgLUZpbHRlciAiTG9jYWxBY2NvdW50ID0gJ1RydWUnIEFORCBOYW1lPSckVXNlck5hbWUnIikgLWVxICRudWxsKSB7DQogICAgICAgIHRocm93ICJVbmFibGUgdG8gZmluZCBsb2NhbCB1c2VyIGFjY291bnQgJyRVc2VyTmFtZSciDQogICAgfQ0KICAgIA0KICAgIGlmICgkRm9yY2UpIHsNCiAgICAgICAgV3JpdGUtTG9nICJDaGFuZ2luZyBwYXNzd29yZCBmb3IgdXNlciAnJFVzZXJOYW1lJyB0byAnKioqKionIiAjIDopDQogICAgICAgIChbQURTSV0gIldpbk5UOi8vLi8kVXNlck5hbWUiKS5TZXRQYXNzd29yZCgkUGFzc3dvcmQpDQogICAgfQ0KICAgIGVsc2Ugew0KICAgICAgICBXcml0ZS1Mb2dXYXJuaW5nICJZb3UgYXJlIHRyeWluZyB0byBjaGFuZ2UgcGFzc3dvcmQgZm9yIHVzZXIgJyRVc2VyTmFtZScuIFRvIGRvIHRoaXMgcGxlYXNlIHJ1biB0aGUgY29tbWFuZCBhZ2FpbiB3aXRoIC1Gb3JjZSBwYXJhbWV0ZXIuIg0KICAgICAgICAkVXNlckFjY291bnQNCiAgICB9DQp9DQoNCg0KDQpGdW5jdGlvbiBJbnN0YWxsLVJvbGVQcmltYXJ5RG9tYWluQ29udHJvbGxlcg0Kew0KPCMNCi5TWU5PUFNJUw0KQ29uZmlndXJlIG5vZGUncyBuZXR3b3JrIGFkYXB0ZXJzLg0KQ3JlYXRlIGZpcnN0IGRvbWFpbiBjb250cm9sbGVyIGluIHRoZSBmb3Jlc3QuDQoNCi5FWEFNUExFDQpQUz4gSW5zdGFsbC1Sb2xlUHJpbWFyeURvbWFpbkNvbnRyb2xsZXIgLURvbWFpbk5hbWUgYWNtZS5sb2NhbCAtU2FmZU1vZGVQYXNzd29yZCAiUEBzc3cwcmQiDQoNCkluc3RhbGwgRE5TIGFuZCBBRERTLCBjcmVhdGUgZm9yZXN0IGFuZCBkb21haW4gJ2FjbWUubG9jYWwnLg0KU2V0IERDIHJlY292ZXJ5IG1vZGUgcGFzc3dvcmQgdG8gJ1BAc3N3MHJkJy4NCiM+DQoJDQoJcGFyYW0NCgkoDQoJCVtTdHJpbmddDQoJCSMgTmV3IGRvbWFpbiBuYW1lLg0KCQkkRG9tYWluTmFtZSwNCgkJDQoJCVtTdHJpbmddDQoJCSMgRG9tYWluIGNvbnRyb2xsZXIgcmVjb3ZlcnkgbW9kZSBwYXNzd29yZC4NCgkJJFNhZmVNb2RlUGFzc3dvcmQNCgkpDQoNCgl0cmFwIHsgU3RvcC1FeGVjdXRpb24gJF8gfQ0KDQogICAgICAgICMgQWRkIHJlcXVpcmVkIHdpbmRvd3MgZmVhdHVyZXMNCglBZGQtV2luZG93c0ZlYXR1cmVXcmFwcGVyIGANCgkJLU5hbWUgIkROUyIsIkFELURvbWFpbi1TZXJ2aWNlcyIsIlJTQVQtREZTLU1nbXQtQ29uIiBgDQoJCS1JbmNsdWRlTWFuYWdlbWVudFRvb2xzIGANCiAgICAgICAgLU5vdGlmeVJlc3RhcnQNCg0KDQoJV3JpdGUtTG9nICJDcmVhdGluZyBmaXJzdCBkb21haW4gY29udHJvbGxlciAuLi4iDQoJCQ0KCSRTTUFQID0gQ29udmVydFRvLVNlY3VyZVN0cmluZyAtU3RyaW5nICRTYWZlTW9kZVBhc3N3b3JkIC1Bc1BsYWluVGV4dCAtRm9yY2UNCgkJDQoJSW5zdGFsbC1BRERTRm9yZXN0IGANCgkJLURvbWFpbk5hbWUgJERvbWFpbk5hbWUgYA0KCQktU2FmZU1vZGVBZG1pbmlzdHJhdG9yUGFzc3dvcmQgJFNNQVAgYA0KCQktRG9tYWluTW9kZSBEZWZhdWx0IGANCgkJLUZvcmVzdE1vZGUgRGVmYXVsdCBgDQoJCS1Ob1JlYm9vdE9uQ29tcGxldGlvbiBgDQoJCS1Gb3JjZSBgDQoJCS1FcnJvckFjdGlvbiBTdG9wIHwgT3V0LU51bGwNCg0KCVdyaXRlLUhvc3QgIldhaXRpbmcgZm9yIHJlYm9vdCAuLi4iCQkNCiMJU3RvcC1FeGVjdXRpb24gLUV4aXRDb2RlIDMwMTAgLUV4aXRTdHJpbmcgIkNvbXB1dGVyIG11c3QgYmUgcmVzdGFydGVkIHRvIGZpbmlzaCBkb21haW4gY29udHJvbGxlciBwcm9tb3Rpb24uIg0KIwlXcml0ZS1Mb2cgIlJlc3RhcmluZyBjb21wdXRlciAuLi4iDQojCVJlc3RhcnQtQ29tcHV0ZXIgLUZvcmNlDQp9DQo="
- ],
- "Commands": [
- {
- "Name": "Import-Module",
- "Arguments": {
- "Name": "CoreFunctions"
- }
- },
- {
- "Name": "Set-LocalUserPassword",
- "Arguments": {
- "UserName": "Administrator",
- "Password": "@adm_password",
- "Force": true
- }
- },
- {
- "Name": "Install-RolePrimaryDomainController",
- "Arguments": {
- "DomainName": "@dc_name",
- "SafeModePassword": "@recovery_password"
- }
- }
- ],
- "RebootOnCompletion": 1
-}
\ No newline at end of file
diff --git a/windc/data/Windows.template b/windc/data/Windows.template
deleted file mode 100644
index 66d650d6..00000000
--- a/windc/data/Windows.template
+++ /dev/null
@@ -1,61 +0,0 @@
-{
- "AWSTemplateFormatVersion" : "2010-09-09",
-
- "Description" : "",
-
- "Parameters" : {
- "KeyName" : {
- "Description" : "Name of an existing Amazon EC2 key pair for RDP access",
- "Type" : "String",
- "Default" : "keero_key"
- },
- "InstanceType" : {
- "Description" : "Amazon EC2 instance type",
- "Type" : "String",
- "Default" : "m1.medium",
- "AllowedValues" : [ "m1.small", "m1.medium", "m1.large" ]
- },
- "ImageName" : {
- "Description" : "Image name",
- "Type" : "String",
- "Default" : "ws-2012-full-agent",
- "AllowedValues" : [ "ws-2012-full", "ws-2012-core", "ws-2012-full-agent" ]
- }
- },
-
- "Resources" : {
- "IAMUser" : {
- "Type" : "AWS::IAM::User",
- "Properties" : {
- "Path": "/",
- "Policies": [{
- "PolicyName": "root",
- "PolicyDocument": { "Statement":[{
- "Effect": "Allow",
- "Action": "CloudFormation:DescribeStackResource",
- "Resource": "*"
- }]}
- }]
- }
- },
-
- "IAMUserAccessKey" : {
- "Type" : "AWS::IAM::AccessKey",
- "Properties" : {
- "UserName" : {"Ref": "IAMUser"}
- }
- },
-
- "InstanceTemplate": {
- "Type" : "AWS::EC2::Instance",
- "Properties": {
- "InstanceType" : { "Ref" : "InstanceType" },
- "ImageId" : { "Ref" : "ImageName" },
- "KeyName" : { "Ref" : "KeyName" }
- }
- }
- },
-
- "Outputs" : {
- }
-}
\ No newline at end of file
diff --git a/windc/doc/Makefile b/windc/doc/Makefile
deleted file mode 100644
index 251a008e..00000000
--- a/windc/doc/Makefile
+++ /dev/null
@@ -1,97 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS =
-SPHINXBUILD = sphinx-build
-SPHINXSOURCE = source
-PAPER =
-BUILDDIR = build
-
-# Internal variables.
-PAPEROPT_a4 = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SPHINXSOURCE)
-
-.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
-
-.DEFAULT_GOAL = html
-
-help:
- @echo "Please use \`make ' where is one of"
- @echo " html to make standalone HTML files"
- @echo " dirhtml to make HTML files named index.html in directories"
- @echo " pickle to make pickle files"
- @echo " json to make JSON files"
- @echo " htmlhelp to make HTML files and a HTML help project"
- @echo " qthelp to make HTML files and a qthelp project"
- @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
- @echo " changes to make an overview of all changed/added/deprecated items"
- @echo " linkcheck to check all external links for integrity"
- @echo " doctest to run all doctests embedded in the documentation (if enabled)"
-
-clean:
- -rm -rf $(BUILDDIR)/*
- -rm -rf nova.sqlite
- if [ -f .autogenerated ] ; then \
- cat .autogenerated | xargs rm ; \
- rm .autogenerated ; \
- fi
-
-html:
- $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
- @echo
- @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
-
-dirhtml:
- $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
- @echo
- @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
-
-pickle:
- $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
- @echo
- @echo "Build finished; now you can process the pickle files."
-
-json:
- $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
- @echo
- @echo "Build finished; now you can process the JSON files."
-
-htmlhelp:
- $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
- @echo
- @echo "Build finished; now you can run HTML Help Workshop with the" \
- ".hhp project file in $(BUILDDIR)/htmlhelp."
-
-qthelp:
- $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
- @echo
- @echo "Build finished; now you can run "qcollectiongenerator" with the" \
- ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
- @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/nova.qhcp"
- @echo "To view the help file:"
- @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/nova.qhc"
-
-latex:
- $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
- @echo
- @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
- @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
- "run these through (pdf)latex."
-
-changes:
- $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
- @echo
- @echo "The overview file is in $(BUILDDIR)/changes."
-
-linkcheck:
- $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
- @echo
- @echo "Link check complete; look for any errors in the above output " \
- "or in $(BUILDDIR)/linkcheck/output.txt."
-
-doctest:
- $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
- @echo "Testing of doctests in the sources finished, look at the " \
- "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/windc/doc/source/_static/tweaks.css b/windc/doc/source/_static/tweaks.css
deleted file mode 100644
index 16cd6e76..00000000
--- a/windc/doc/source/_static/tweaks.css
+++ /dev/null
@@ -1,65 +0,0 @@
-ul.todo_list {
- list-style-type: none;
- margin: 0;
- padding: 0;
-}
-
-ul.todo_list li {
- display: block;
- margin: 0;
- padding: 7px 0;
- border-top: 1px solid #eee;
-}
-
-ul.todo_list li p {
- display: inline;
-}
-
-ul.todo_list li p.link {
- font-weight: bold;
-}
-
-ul.todo_list li p.details {
- font-style: italic;
-}
-
-ul.todo_list li {
-}
-
-div.admonition {
- border: 1px solid #8F1000;
-}
-
-div.admonition p.admonition-title {
- background-color: #8F1000;
- border-bottom: 1px solid #8E8E8E;
-}
-
-a {
- color: #CF2F19;
-}
-
-div.related ul li a {
- color: #CF2F19;
-}
-
-div.sphinxsidebar h4 {
- background-color:#8E8E8E;
- border:1px solid #255E6E;
- color:white;
- font-size:1em;
- margin:1em 0 0.5em;
- padding:0.1em 0 0.1em 0.5em;
-}
-
-em {
- font-style: normal;
-}
-
-table.docutils {
- font-size: 11px;
-}
-
-a tt {
- color:#CF2F19;
-}
\ No newline at end of file
diff --git a/windc/doc/source/_theme/theme.conf b/windc/doc/source/_theme/theme.conf
deleted file mode 100644
index e039fe01..00000000
--- a/windc/doc/source/_theme/theme.conf
+++ /dev/null
@@ -1,5 +0,0 @@
-[theme]
-inherit = sphinxdoc
-stylesheet = sphinxdoc.css
-pygments_style = friendly
-
diff --git a/windc/etc/windc-api-paste.ini b/windc/etc/windc-api-paste.ini
deleted file mode 100644
index 8dbdc8c2..00000000
--- a/windc/etc/windc-api-paste.ini
+++ /dev/null
@@ -1,57 +0,0 @@
-[DEFAULT]
-# Show more verbose log output (sets INFO log level output)
-verbose = True
-# Show debugging output in logs (sets DEBUG log level output)
-debug = True
-# Address to bind the server to
-bind_host = 0.0.0.0
-# Port the bind the server to
-bind_port = 8082
-# Log to this file. Make sure the user running skeleton-api has
-# permissions to write to this file!
-log_file = /tmp/api.log
-# Orchestration Adapter Section
-#
-#provider - Cloud provider to use (openstack, amazon, dummy)
-provider = openstack
-
-# Heat specific parameters
-#heat_url - url for the heat service
-# [auto] - find in the keystone
-heat_url = auto
-
-#heat_api_version - version of the API to use
-#
-heat_api_version = 1
-
-
-[pipeline:windc-api]
-pipeline = apiv1app
-# NOTE: use the following pipeline for keystone
-#pipeline = authtoken context apiv1app
-
-[app:apiv1app]
-paste.app_factory = windc.common.wsgi:app_factory
-windc.app_factory = windc.api.v1.router:API
-
-[filter:context]
-paste.filter_factory = windc.common.wsgi:filter_factory
-windc.filter_factory = windc.common.context:ContextMiddleware
-
-[filter:authtoken]
-paste.filter_factory = keystone.middleware.auth_token:filter_factory
-auth_host = 172.18.67.57
-auth_port = 35357
-auth_protocol = http
-auth_uri = http://172.18.67.57:5000/v2.0/
-admin_tenant_name = service
-admin_user = windc
-admin_password = 000
-
-[filter:auth-context]
-paste.filter_factory = windc.common.wsgi:filter_factory
-windc.filter_factory = keystone.middleware.balancer_auth_token:KeystoneContextMiddleware
-
-[rabbitmq]
-host = 10.0.0.1
-vhost = keero
\ No newline at end of file
diff --git a/windc/etc/windc-api.conf b/windc/etc/windc-api.conf
deleted file mode 100644
index 3f1381b5..00000000
--- a/windc/etc/windc-api.conf
+++ /dev/null
@@ -1,34 +0,0 @@
-[DEFAULT]
-# Show more verbose log output (sets INFO log level output)
-verbose = True
-
-# Show debugging output in logs (sets DEBUG log level output)
-debug = True
-
-# Address to bind the server to
-bind_host = 0.0.0.0
-
-# Port the bind the server to
-bind_port = 8082
-
-# Log to this file. Make sure the user running skeleton-api has
-# permissions to write to this file!
-log_file = /tmp/api.log
-
-[pipeline:windc-api]
-pipeline = versionnegotiation context apiv1app
-
-[pipeline:versions]
-pipeline = versionsapp
-
-[app:versionsapp]
-paste.app_factory = windc.api.versions:app_factory
-
-[app:apiv1app]
-paste.app_factory = windc.api.v1:app_factory
-
-[filter:versionnegotiation]
-paste.filter_factory = windc.api.middleware.version_negotiation:filter_factory
-
-[filter:context]
-paste.filter_factory = openstack.common.middleware.context:filter_factory
diff --git a/windc/heat_run b/windc/heat_run
deleted file mode 100755
index e2498a84..00000000
--- a/windc/heat_run
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-
-
-#export OS_USERNAME=admin
-#source ../../devstack/openrc
-#nova keypair-add keero-linux-keys > heat_key.priv
-heat "$@"
-
diff --git a/windc/openstack/common/config.py b/windc/openstack/common/config.py
deleted file mode 100644
index 74301a8c..00000000
--- a/windc/openstack/common/config.py
+++ /dev/null
@@ -1,334 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Routines for configuring Openstack Projects
-"""
-
-import ConfigParser
-import logging
-import logging.config
-import logging.handlers
-import optparse
-import os
-import re
-import sys
-
-from paste import deploy
-
-DEFAULT_LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s"
-DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
-
-
-def parse_options(parser, cli_args=None):
- """
- Returns the parsed CLI options, command to run and its arguments, merged
- with any same-named options found in a configuration file.
-
- The function returns a tuple of (options, args), where options is a
- mapping of option key/str(value) pairs, and args is the set of arguments
- (not options) supplied on the command-line.
-
- The reason that the option values are returned as strings only is that
- ConfigParser and paste.deploy only accept string values...
-
- :param parser: The option parser
- :param cli_args: (Optional) Set of arguments to process. If not present,
- sys.argv[1:] is used.
- :retval tuple of (options, args)
- """
-
- (options, args) = parser.parse_args(cli_args)
-
- return (vars(options), args)
-
-
-def add_common_options(parser):
- """
- Given a supplied optparse.OptionParser, adds an OptionGroup that
- represents all common configuration options.
-
- :param parser: optparse.OptionParser
- """
- help_text = "The following configuration options are common to "\
- "this app's programs."
-
- group = optparse.OptionGroup(parser, "Common Options", help_text)
- group.add_option('-v', '--verbose', default=False, dest="verbose",
- action="store_true",
- help="Print more verbose output")
- group.add_option('-d', '--debug', default=False, dest="debug",
- action="store_true",
- help="Print debugging output")
- group.add_option('--config-file', default=None, metavar="PATH",
- help="Path to the config file to use. When not specified "
- "(the default), we generally look at the first "
- "argument specified to be a config file, and if "
- "that is also missing, we search standard "
- "directories for a config file.")
- parser.add_option_group(group)
-
-
-def add_log_options(parser):
- """
- Given a supplied optparse.OptionParser, adds an OptionGroup that
- represents all the configuration options around logging.
-
- :param parser: optparse.OptionParser
- """
- help_text = "The following configuration options are specific to logging "\
- "functionality for this program."
-
- group = optparse.OptionGroup(parser, "Logging Options", help_text)
- group.add_option('--log-config', default=None, metavar="PATH",
- help="If this option is specified, the logging "
- "configuration file specified is used and overrides "
- "any other logging options specified. Please see "
- "the Python logging module documentation for "
- "details on logging configuration files.")
- group.add_option('--log-date-format', metavar="FORMAT",
- default=DEFAULT_LOG_DATE_FORMAT,
- help="Format string for %(asctime)s in log records. "
- "Default: %default")
- group.add_option('--log-file', default=None, metavar="PATH",
- help="(Optional) Name of log file to output to. "
- "If not set, logging will go to stdout.")
- group.add_option("--log-dir", default=None,
- help="(Optional) The directory to keep log files in "
- "(will be prepended to --logfile)")
- group.add_option('--use-syslog', default=False, dest="use_syslog",
- action="store_true",
- help="Use syslog for logging.")
- parser.add_option_group(group)
-
-
-def setup_logging(options, conf):
- """
- Sets up the logging options for a log with supplied name
-
- :param options: Mapping of typed option key/values
- :param conf: Mapping of untyped key/values from config file
- """
-
- if options.get('log_config', None):
- # Use a logging configuration file for all settings...
- if os.path.exists(options['log_config']):
- logging.config.fileConfig(options['log_config'])
- return
- else:
- raise RuntimeError("Unable to locate specified logging "
- "config file: %s" % options['log_config'])
-
- # If either the CLI option or the conf value
- # is True, we set to True
- debug = options.get('debug') or \
- get_option(conf, 'debug', type='bool', default=False)
- verbose = options.get('verbose') or \
- get_option(conf, 'verbose', type='bool', default=False)
- root_logger = logging.root
- if debug:
- root_logger.setLevel(logging.DEBUG)
- elif verbose:
- root_logger.setLevel(logging.INFO)
- else:
- root_logger.setLevel(logging.WARNING)
-
- # Set log configuration from options...
- # Note that we use a hard-coded log format in the options
- # because of Paste.Deploy bug #379
- # http://trac.pythonpaste.org/pythonpaste/ticket/379
- log_format = options.get('log_format', DEFAULT_LOG_FORMAT)
- log_date_format = options.get('log_date_format', DEFAULT_LOG_DATE_FORMAT)
- formatter = logging.Formatter(log_format, log_date_format)
-
- logfile = options.get('log_file')
- if not logfile:
- logfile = conf.get('log_file')
-
- use_syslog = options.get('use_syslog') or \
- get_option(conf, 'use_syslog', type='bool', default=False)
-
- if use_syslog:
- handler = logging.handlers.SysLogHandler(address='/dev/log')
- elif logfile:
- logdir = options.get('log_dir')
- if not logdir:
- logdir = conf.get('log_dir')
- if logdir:
- logfile = os.path.join(logdir, logfile)
- handler = logging.FileHandler(logfile)
- else:
- handler = logging.StreamHandler(sys.stdout)
-
- handler.setFormatter(formatter)
- root_logger.addHandler(handler)
-
-
-def find_config_file(app_name, options, args, config_dir=None):
- """
- Return the first config file found for an application.
-
- We search for the paste config file in the following order:
- * If --config-file option is used, use that
- * If args[0] is a file, use that
- * Search for $app.conf in standard directories:
- * .
- * ~.config_dir/
- * ~
- * /etc/config_dir
- * /etc
-
- :retval Full path to config file, or None if no config file found
- """
- config_dir = config_dir or app_name
-
- fix_path = lambda p: os.path.abspath(os.path.expanduser(p))
- if options.get('config_file'):
- if os.path.exists(options['config_file']):
- return fix_path(options['config_file'])
- elif args:
- if os.path.exists(args[0]):
- return fix_path(args[0])
-
- # Handle standard directory search for $app_name.conf
- config_file_dirs = [fix_path(os.getcwd()),
- fix_path(os.path.join('~', '.' + config_dir)),
- fix_path('~'),
- os.path.join('/etc', config_dir),
- '/etc']
-
- for cfg_dir in config_file_dirs:
- cfg_file = os.path.join(cfg_dir, '%s.conf' % app_name)
- if os.path.exists(cfg_file):
- return cfg_file
-
-
-def load_paste_config(app_name, options, args, config_dir=None):
- """
- Looks for a config file to use for an app and returns the
- config file path and a configuration mapping from a paste config file.
-
- We search for the paste config file in the following order:
- * If --config-file option is used, use that
- * If args[0] is a file, use that
- * Search for $app_name.conf in standard directories:
- * .
- * ~.config_dir/
- * ~
- * /etc/config_dir
- * /etc
-
- :param app_name: Name of the application to load config for, or None.
- None signifies to only load the [DEFAULT] section of
- the config file.
- :param options: Set of typed options returned from parse_options()
- :param args: Command line arguments from argv[1:]
- :retval Tuple of (conf_file, conf)
-
- :raises RuntimeError when config file cannot be located or there was a
- problem loading the configuration file.
- """
- conf_file = find_config_file(app_name, options, args, config_dir)
- if not conf_file:
- raise RuntimeError("Unable to locate any configuration file. "
- "Cannot load application %s" % app_name)
- try:
- app = wsgi.paste_deploy_app(conf_file, app_name, conf)
- conf = deploy.appconfig("config:%s" % conf_file, name=app_name)
- return conf_file, conf
- except Exception, e:
- raise RuntimeError("Error trying to load config %s: %s"
- % (conf_file, e))
-
-
-def load_paste_app(app_name, options, args, config_dir=None):
- """
- Builds and returns a WSGI app from a paste config file.
-
- We search for the paste config file in the following order:
- * If --config-file option is used, use that
- * If args[0] is a file, use that
- * Search for $app_name.conf in standard directories:
- * .
- * ~.config_dir/
- * ~
- * /etc/config_dir
- * /etc
-
- :param app_name: Name of the application to load
- :param options: Set of typed options returned from parse_options()
- :param args: Command line arguments from argv[1:]
-
- :raises RuntimeError when config file cannot be located or application
- cannot be loaded from config file
- """
- conf_file, conf = load_paste_config(app_name, options,
- args, config_dir)
-
- try:
- # Setup logging early, supplying both the CLI options and the
- # configuration mapping from the config file
- setup_logging(options, conf)
-
- # We only update the conf dict for the verbose and debug
- # flags. Everything else must be set up in the conf file...
- debug = options.get('debug') or \
- get_option(conf, 'debug', type='bool', default=False)
- verbose = options.get('verbose') or \
- get_option(conf, 'verbose', type='bool', default=False)
- conf['debug'] = debug
- conf['verbose'] = verbose
-
- # Log the options used when starting if we're in debug mode...
- if debug:
- logger = logging.getLogger(app_name)
- logger.debug("*" * 80)
- logger.debug("Configuration options gathered from config file:")
- logger.debug(conf_file)
- logger.debug("================================================")
- items = dict([(k, v) for k, v in conf.items()
- if k not in ('__file__', 'here')])
- for key, value in sorted(items.items()):
- logger.debug("%(key)-30s %(value)s" % locals())
- logger.debug("*" * 80)
- app = deploy.loadapp("config:%s" % conf_file, name=app_name)
- except (LookupError, ImportError), e:
- raise RuntimeError("Unable to load %(app_name)s from "
- "configuration file %(conf_file)s."
- "\nGot: %(e)r" % locals())
- return conf, app
-
-
-def get_option(options, option, **kwargs):
- if option in options:
- value = options[option]
- type_ = kwargs.get('type', 'str')
- if type_ == 'bool':
- if hasattr(value, 'lower'):
- return value.lower() == 'true'
- else:
- return value
- elif type_ == 'int':
- return int(value)
- elif type_ == 'float':
- return float(value)
- else:
- return value
- elif 'default' in kwargs:
- return kwargs['default']
- else:
- raise KeyError("option '%s' not found" % option)
diff --git a/windc/openstack/common/context.py b/windc/openstack/common/context.py
deleted file mode 100644
index a9a16f8e..00000000
--- a/windc/openstack/common/context.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Simple class that stores security context information in the web request.
-
-Projects should subclass this class if they wish to enhance the request
-context or provide additional information in their specific WSGI pipeline.
-"""
-
-
-class RequestContext(object):
-
- """
- Stores information about the security context under which the user
- accesses the system, as well as additional request information.
- """
-
- def __init__(self, auth_tok=None, user=None, tenant=None, is_admin=False,
- read_only=False, show_deleted=False):
- self.auth_tok = auth_tok
- self.user = user
- self.tenant = tenant
- self.is_admin = is_admin
- self.read_only = read_only
- self.show_deleted = show_deleted
diff --git a/windc/openstack/common/exception.py b/windc/openstack/common/exception.py
deleted file mode 100644
index a81355ef..00000000
--- a/windc/openstack/common/exception.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Exceptions common to OpenStack projects
-"""
-
-import logging
-
-
-class ProcessExecutionError(IOError):
- def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None,
- description=None):
- if description is None:
- description = "Unexpected error while running command."
- if exit_code is None:
- exit_code = '-'
- message = "%s\nCommand: %s\nExit code: %s\nStdout: %r\nStderr: %r" % (
- description, cmd, exit_code, stdout, stderr)
- IOError.__init__(self, message)
-
-
-class Error(Exception):
- def __init__(self, message=None):
- super(Error, self).__init__(message)
-
-
-class ApiError(Error):
- def __init__(self, message='Unknown', code='Unknown'):
- self.message = message
- self.code = code
- super(ApiError, self).__init__('%s: %s' % (code, message))
-
-
-class NotFound(Error):
- pass
-
-
-class UnknownScheme(Error):
-
- msg = "Unknown scheme '%s' found in URI"
-
- def __init__(self, scheme):
- msg = self.__class__.msg % scheme
- super(UnknownScheme, self).__init__(msg)
-
-
-class BadStoreUri(Error):
-
- msg = "The Store URI %s was malformed. Reason: %s"
-
- def __init__(self, uri, reason):
- msg = self.__class__.msg % (uri, reason)
- super(BadStoreUri, self).__init__(msg)
-
-
-class Duplicate(Error):
- pass
-
-
-class NotAuthorized(Error):
- pass
-
-
-class NotEmpty(Error):
- pass
-
-
-class Invalid(Error):
- pass
-
-
-class BadInputError(Exception):
- """Error resulting from a client sending bad input to a server"""
- pass
-
-
-class MissingArgumentError(Error):
- pass
-
-
-class DatabaseMigrationError(Error):
- pass
-
-
-class ClientConnectionError(Exception):
- """Error resulting from a client connecting to a server"""
- pass
-
-
-def wrap_exception(f):
- def _wrap(*args, **kw):
- try:
- return f(*args, **kw)
- except Exception, e:
- if not isinstance(e, Error):
- #exc_type, exc_value, exc_traceback = sys.exc_info()
- logging.exception('Uncaught exception')
- #logging.error(traceback.extract_stack(exc_traceback))
- raise Error(str(e))
- raise
- _wrap.func_name = f.func_name
- return _wrap
-
-
-class OpenstackException(Exception):
- """
- Base Exception
-
- To correctly use this class, inherit from it and define
- a 'message' property. That message will get printf'd
- with the keyword arguments provided to the constructor.
- """
- message = "An unknown exception occurred"
-
- def __init__(self, **kwargs):
- try:
- self._error_string = self.message % kwargs
-
- except Exception:
- # at least get the core message out if something happened
- self._error_string = self.message
-
- def __str__(self):
- return self._error_string
-
-
-class InvalidContentType(OpenstackException):
- message = "Invalid content type %(content_type)s"
diff --git a/windc/openstack/common/middleware/context.py b/windc/openstack/common/middleware/context.py
deleted file mode 100644
index be7dafee..00000000
--- a/windc/openstack/common/middleware/context.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Middleware that attaches a context to the WSGI request
-"""
-
-from openstack.common import utils
-from openstack.common import wsgi
-from openstack.common import context
-
-
-class ContextMiddleware(wsgi.Middleware):
- def __init__(self, app, options):
- self.options = options
- super(ContextMiddleware, self).__init__(app)
-
- def make_context(self, *args, **kwargs):
- """
- Create a context with the given arguments.
- """
-
- # Determine the context class to use
- ctxcls = context.RequestContext
- if 'context_class' in self.options:
- ctxcls = utils.import_class(self.options['context_class'])
-
- return ctxcls(*args, **kwargs)
-
- def process_request(self, req):
- """
- Extract any authentication information in the request and
- construct an appropriate context from it.
- """
- # Use the default empty context, with admin turned on for
- # backwards compatibility
- req.context = self.make_context(is_admin=True)
-
-
-def filter_factory(global_conf, **local_conf):
- """
- Factory method for paste.deploy
- """
- conf = global_conf.copy()
- conf.update(local_conf)
-
- def filter(app):
- return ContextMiddleware(app, conf)
-
- return filter
diff --git a/windc/openstack/common/utils.py b/windc/openstack/common/utils.py
deleted file mode 100644
index 0d2f89e0..00000000
--- a/windc/openstack/common/utils.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-System-level utilities and helper functions.
-"""
-
-import datetime
-import sys
-
-from openstack.common import exception
-
-
-TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
-
-
-def int_from_bool_as_string(subject):
- """
- Interpret a string as a boolean and return either 1 or 0.
-
- Any string value in:
- ('True', 'true', 'On', 'on', '1')
- is interpreted as a boolean True.
-
- Useful for JSON-decoded stuff and config file parsing
- """
- return bool_from_string(subject) and 1 or 0
-
-
-def bool_from_string(subject):
- """
- Interpret a string as a boolean.
-
- Any string value in:
- ('True', 'true', 'On', 'on', '1')
- is interpreted as a boolean True.
-
- Useful for JSON-decoded stuff and config file parsing
- """
- if type(subject) == type(bool):
- return subject
- if hasattr(subject, 'startswith'): # str or unicode...
- if subject.strip().lower() in ('true', 'on', '1'):
- return True
- return False
-
-
-def import_class(import_str):
- """Returns a class from a string including module and class"""
- mod_str, _sep, class_str = import_str.rpartition('.')
- try:
- __import__(mod_str)
- return getattr(sys.modules[mod_str], class_str)
- except (ImportError, ValueError, AttributeError):
- raise exception.NotFound('Class %s cannot be found' % class_str)
-
-
-def import_object(import_str):
- """Returns an object including a module or module and class"""
- try:
- __import__(import_str)
- return sys.modules[import_str]
- except ImportError:
- cls = import_class(import_str)
- return cls()
-
-
-def isotime(at=None):
- if not at:
- at = datetime.datetime.utcnow()
- return at.strftime(TIME_FORMAT)
-
-
-def parse_isotime(timestr):
- return datetime.datetime.strptime(timestr, TIME_FORMAT)
diff --git a/windc/openstack/common/wsgi.py b/windc/openstack/common/wsgi.py
deleted file mode 100644
index e6d7b2ef..00000000
--- a/windc/openstack/common/wsgi.py
+++ /dev/null
@@ -1,395 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Utility methods for working with WSGI servers
-"""
-
-import json
-import logging
-import sys
-import datetime
-import urllib2
-
-import eventlet
-import eventlet.wsgi
-eventlet.patcher.monkey_patch(all=False, socket=True)
-import routes
-import routes.middleware
-import webob.dec
-import webob.exc
-
-from openstack.common import exception
-
-logger = logging.getLogger('openstack.common.wsgi')
-
-
-class WritableLogger(object):
- """A thin wrapper that responds to `write` and logs."""
-
- def __init__(self, logger, level=logging.DEBUG):
- self.logger = logger
- self.level = level
-
- def write(self, msg):
- self.logger.log(self.level, msg.strip("\n"))
-
-
-def run_server(application, port):
- """Run a WSGI server with the given application."""
- sock = eventlet.listen(('0.0.0.0', port))
- eventlet.wsgi.server(sock, application)
-
-
-class Server(object):
- """Server class to manage multiple WSGI sockets and applications."""
-
- def __init__(self, threads=1000):
- self.pool = eventlet.GreenPool(threads)
-
- def start(self, application, port, host='0.0.0.0', backlog=128):
- """Run a WSGI server with the given application."""
- socket = eventlet.listen((host, port), backlog=backlog)
- self.pool.spawn_n(self._run, application, socket)
-
- def wait(self):
- """Wait until all servers have completed running."""
- try:
- self.pool.waitall()
- except KeyboardInterrupt:
- pass
-
- def _run(self, application, socket):
- """Start a WSGI server in a new green thread."""
- logger = logging.getLogger('eventlet.wsgi.server')
- eventlet.wsgi.server(socket, application, custom_pool=self.pool,
- log=WritableLogger(logger))
-
-
-class Middleware(object):
- """
- Base WSGI middleware wrapper. These classes require an application to be
- initialized that will be called next. By default the middleware will
- simply call its wrapped app, or you can override __call__ to customize its
- behavior.
- """
-
- def __init__(self, application):
- self.application = application
-
- def process_request(self, req):
- """
- Called on each request.
-
- If this returns None, the next application down the stack will be
- executed. If it returns a response then that response will be returned
- and execution will stop here.
- """
- return None
-
- def process_response(self, response):
- """Do whatever you'd like to the response."""
- return response
-
- @webob.dec.wsgify
- def __call__(self, req):
- response = self.process_request(req)
- if response:
- return response
- response = req.get_response(self.application)
- return self.process_response(response)
-
-
-class Debug(Middleware):
- """
- Helper class that can be inserted into any WSGI application chain
- to get information about the request and response.
- """
-
- @webob.dec.wsgify
- def __call__(self, req):
- print ("*" * 40) + " REQUEST ENVIRON"
- for key, value in req.environ.items():
- print key, "=", value
- print
- resp = req.get_response(self.application)
-
- print ("*" * 40) + " RESPONSE HEADERS"
- for (key, value) in resp.headers.iteritems():
- print key, "=", value
- print
-
- resp.app_iter = self.print_generator(resp.app_iter)
-
- return resp
-
- @staticmethod
- def print_generator(app_iter):
- """
- Iterator that prints the contents of a wrapper string iterator
- when iterated.
- """
- print ("*" * 40) + " BODY"
- for part in app_iter:
- sys.stdout.write(part)
- sys.stdout.flush()
- yield part
- print
-
-
-class Router(object):
-
- """
- WSGI middleware that maps incoming requests to WSGI apps.
- """
-
- def __init__(self, mapper):
- """
- Create a router for the given routes.Mapper.
-
- Each route in `mapper` must specify a 'controller', which is a
- WSGI app to call. You'll probably want to specify an 'action' as
- well and have your controller be a wsgi.Controller, who will route
- the request to the action method.
-
- Examples:
- mapper = routes.Mapper()
- sc = ServerController()
-
- # Explicit mapping of one route to a controller+action
- mapper.connect(None, "/svrlist", controller=sc, action="list")
-
- # Actions are all implicitly defined
- mapper.resource("server", "servers", controller=sc)
-
- # Pointing to an arbitrary WSGI app. You can specify the
- # {path_info:.*} parameter so the target app can be handed just that
- # section of the URL.
- mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp())
- """
- self.map = mapper
- self._router = routes.middleware.RoutesMiddleware(self._dispatch,
- self.map)
-
- @webob.dec.wsgify
- def __call__(self, req):
- """
- Route the incoming request to a controller based on self.map.
- If no match, return a 404.
- """
- return self._router
-
- @staticmethod
- @webob.dec.wsgify
- def _dispatch(req):
- """
- Called by self._router after matching the incoming request to a route
- and putting the information into req.environ. Either returns 404
- or the routed WSGI app's response.
- """
- match = req.environ['wsgiorg.routing_args'][1]
- if not match:
- return webob.exc.HTTPNotFound()
- app = match['controller']
- return app
-
-
-class Request(webob.Request):
-
- """Add some Openstack API-specific logic to the base webob.Request."""
-
- def best_match_content_type(self):
- """Determine the requested response content-type."""
- supported = ('application/json',)
- bm = self.accept.best_match(supported)
- return bm or 'application/json'
-
- def get_content_type(self, allowed_content_types):
- """Determine content type of the request body."""
- if not "Content-Type" in self.headers:
- raise exception.InvalidContentType(content_type=None)
-
- content_type = self.content_type
-
- if content_type not in allowed_content_types:
- raise exception.InvalidContentType(content_type=content_type)
- else:
- return content_type
-
-
-class JSONRequestDeserializer(object):
- def has_body(self, request):
- """
- Returns whether a Webob.Request object will possess an entity body.
-
- :param request: Webob.Request object
- """
- if 'transfer-encoding' in request.headers:
- return True
- elif request.content_length > 0:
- return True
-
- return False
-
- def from_json(self, datastring):
- return json.loads(datastring)
-
- def default(self, request):
- msg = "Request deserialization: %s" % request
- logger.debug(msg)
- if self.has_body(request):
- logger.debug("Deserialization: request has body")
- if request.headers['Content-Type'] == 'application/x-www-form-urlencoded':
- body = urllib2.unquote(request.body)
- else:
- body = request.body
- msg = "Request body: %s" % body
- logger.debug(msg)
- return {'body': self.from_json(body)}
- else:
- logger.debug("Deserialization: request has NOT body")
- return {}
-
-
-class JSONResponseSerializer(object):
-
- def to_json(self, data):
- def sanitizer(obj):
- if isinstance(obj, datetime.datetime):
- return obj.isoformat()
- return obj
-
- return json.dumps(data, default=sanitizer)
-
- def default(self, response, result):
- logger.debug("JSONSerializer default method called.")
- response.headers['Content-Type'] = 'application/json'
- response.body = self.to_json(result)
-
-
-class Resource(object):
- """
- WSGI app that handles (de)serialization and controller dispatch.
-
- Reads routing information supplied by RoutesMiddleware and calls
- the requested action method upon its deserializer, controller,
- and serializer. Those three objects may implement any of the basic
- controller action methods (create, update, show, index, delete)
- along with any that may be specified in the api router. A 'default'
- method may also be implemented to be used in place of any
- non-implemented actions. Deserializer methods must accept a request
- argument and return a dictionary. Controller methods must accept a
- request argument. Additionally, they must also accept keyword
- arguments that represent the keys returned by the Deserializer. They
- may raise a webob.exc exception or return a dict, which will be
- serialized by requested content type.
- """
- def __init__(self, controller, deserializer, serializer):
- """
- :param controller: object that implement methods created by routes lib
- :param deserializer: object that supports webob request deserialization
- through controller-like actions
- :param serializer: object that supports webob response serialization
- through controller-like actions
- """
- self.controller = controller
- self.serializer = serializer
- self.deserializer = deserializer
-
- # NOTE(yorik-sar): ugly fix for Routes misbehaviour
- def __add__(self, other):
- return other
-
- @webob.dec.wsgify(RequestClass=Request)
- def __call__(self, request):
- """WSGI method that controls (de)serialization and method dispatch."""
- logger.debug("Resource __call__ is invoked")
- action_args = self.get_action_args(request.environ)
- action = action_args.pop('action', None)
-
- deserialized_params = self.deserialize_request(action, request)
- action_args.update(deserialized_params)
- action_result = self.execute_action(action, request, **action_args)
-
- try:
- return self.serialize_response(action, action_result, request)
-
- # return unserializable result (typically a webob exc)
- except Exception:
- return action_result
-
- def deserialize_request(self, action, request):
- return self.dispatch(self.deserializer, action, request)
-
- def serialize_response(self, action, action_result, request):
- msg = "Called serialize response Action:%s Result:%s Request:%s" % (action, action_result, request)
- logger.debug(msg)
-
- try:
- if not self.controller:
- meth = getattr(self, action)
- else:
- meth = getattr(self.controller, action)
- except AttributeError:
- raise
-
- code = 200
- if hasattr(meth, 'wsgi_code'):
- code = meth.wsgi_code
-
- response = webob.Response()
- response.status = code
- logger.debug("serializer: dispatching call")
- #TODO check why it fails with original openstack code
- #self.dispatch(self.serializer, action, response,
- # action_result, request)
- if action_result is not None:
- self.serializer.default(response, action_result)
- msg = "Response: %s" % response
- logger.debug(msg)
- return response
-
- def execute_action(self, action, request, **action_args):
- return self.dispatch(self.controller, action, request, **action_args)
-
- def dispatch(self, obj, action, *args, **kwargs):
- """Find action-specific method on self and call it."""
- try:
- method = getattr(obj, action)
- except AttributeError:
- method = getattr(obj, 'default')
-
- return method(*args, **kwargs)
-
- def get_action_args(self, request_environment):
- """Parse dictionary created by routes library."""
- try:
- args = request_environment['wsgiorg.routing_args'][1].copy()
- except Exception:
- return {}
-
- try:
- del args['controller']
- except KeyError:
- pass
-
- try:
- del args['format']
- except KeyError:
- pass
-
- return args
diff --git a/windc/openstack/oldcommon/config.py b/windc/openstack/oldcommon/config.py
deleted file mode 100644
index a5fc3f09..00000000
--- a/windc/openstack/oldcommon/config.py
+++ /dev/null
@@ -1,337 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Routines for configuring Openstack Projects
-"""
-
-import logging
-import logging.config
-import logging.handlers
-import optparse
-import os
-import sys
-
-from paste import deploy
-
-DEFAULT_LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s"
-DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
-
-
-def parse_options(parser, cli_args=None):
- """
- Returns the parsed CLI options, command to run and its arguments, merged
- with any same-named options found in a configuration file.
-
- The function returns a tuple of (options, args), where options is a
- mapping of option key/str(value) pairs, and args is the set of arguments
- (not options) supplied on the command-line.
-
- The reason that the option values are returned as strings only is that
- ConfigParser and paste.deploy only accept string values...
-
- :param parser: The option parser
- :param cli_args: (Optional) Set of arguments to process. If not present,
- sys.argv[1:] is used.
- :retval tuple of (options, args)
- """
-
- (options, args) = parser.parse_args(cli_args)
-
- return (vars(options), args)
-
-
-def add_common_options(parser):
- """
- Given a supplied optparse.OptionParser, adds an OptionGroup that
- represents all common configuration options.
-
- :param parser: optparse.OptionParser
- """
- help_text = "The following configuration options are common to "\
- "this app's programs."
-
- group = optparse.OptionGroup(parser, "Common Options", help_text)
- group.add_option('-v', '--verbose', default=False, dest="verbose",
- action="store_true",
- help="Print more verbose output")
- group.add_option('-d', '--debug', default=False, dest="debug",
- action="store_true",
- help="Print debugging output")
- group.add_option('--config-file', default=None, metavar="PATH",
- help="Path to the config file to use. When not specified "
- "(the default), we generally look at the first "
- "argument specified to be a config file, and if "
- "that is also missing, we search standard "
- "directories for a config file.")
- parser.add_option_group(group)
-
-
-def add_log_options(parser):
- """
- Given a supplied optparse.OptionParser, adds an OptionGroup that
- represents all the configuration options around logging.
-
- :param parser: optparse.OptionParser
- """
- help_text = "The following configuration options are specific to logging "\
- "functionality for this program."
-
- group = optparse.OptionGroup(parser, "Logging Options", help_text)
- group.add_option('--log-config', default=None, metavar="PATH",
- help="If this option is specified, the logging "
- "configuration file specified is used and overrides "
- "any other logging options specified. Please see "
- "the Python logging module documentation for "
- "details on logging configuration files.")
- group.add_option('--log-date-format', metavar="FORMAT",
- default=DEFAULT_LOG_DATE_FORMAT,
- help="Format string for %(asctime)s in log records. "
- "Default: %default")
- group.add_option('--log-file', default=None, metavar="PATH",
- help="(Optional) Name of log file to output to. "
- "If not set, logging will go to stdout.")
- group.add_option("--log-dir", default=None,
- help="(Optional) The directory to keep log files in "
- "(will be prepended to --logfile)")
- group.add_option('--use-syslog', default=False, dest="use_syslog",
- action="store_true",
- help="Use syslog for logging.")
- parser.add_option_group(group)
-
-
-def setup_logging(options, conf):
- """
- Sets up the logging options for a log with supplied name
-
- :param options: Mapping of typed option key/values
- :param conf: Mapping of untyped key/values from config file
- """
-
- if options.get('log_config', None):
- # Use a logging configuration file for all settings...
- if os.path.exists(options['log_config']):
- logging.config.fileConfig(options['log_config'])
- return
- else:
- raise RuntimeError("Unable to locate specified logging "
- "config file: %s" % options['log_config'])
-
- # If either the CLI option or the conf value
- # is True, we set to True
- debug = options.get('debug') or \
- get_option(conf, 'debug', type='bool', default=False)
- verbose = options.get('verbose') or \
- get_option(conf, 'verbose', type='bool', default=False)
- root_logger = logging.root
- if debug:
- root_logger.setLevel(logging.DEBUG)
- elif verbose:
- root_logger.setLevel(logging.INFO)
- else:
- root_logger.setLevel(logging.WARNING)
-
- # Set log configuration from options...
- # Note that we use a hard-coded log format in the options
- # because of Paste.Deploy bug #379
- # http://trac.pythonpaste.org/pythonpaste/ticket/379
- log_format = options.get('log_format', DEFAULT_LOG_FORMAT)
- log_date_format = options.get('log_date_format', DEFAULT_LOG_DATE_FORMAT)
- formatter = logging.Formatter(log_format, log_date_format)
-
- logfile = options.get('log_file')
- if not logfile:
- logfile = conf.get('log_file')
-
- use_syslog = options.get('use_syslog') or \
- get_option(conf, 'use_syslog', type='bool', default=False)
-
- if use_syslog:
- handler = logging.handlers.SysLogHandler(address='/dev/log')
- elif logfile:
- logdir = options.get('log_dir')
- if not logdir:
- logdir = conf.get('log_dir')
- if logdir:
- logfile = os.path.join(logdir, logfile)
- handler = logging.FileHandler(logfile)
- else:
- handler = logging.StreamHandler(sys.stdout)
-
- handler.setFormatter(formatter)
- root_logger.addHandler(handler)
-
-
-def fix_path(path):
- """
- Return the full absolute path
- """
- return os.path.abspath(os.path.expanduser(path))
-
-
-def find_config_file(app_name, options, args, config_dir=None):
- """
- Return the first config file found for an application.
-
- We search for the paste config file in the following order:
- * If --config-file option is used, use that
- * If args[0] is a file, use that
- * Search for $app.conf in standard directories:
- * .
- * ~.config_dir/
- * ~
- * /etc/config_dir
- * /etc
-
- :retval Full path to config file, or None if no config file found
- """
- config_dir = config_dir or app_name
-
- if options.get('config_file'):
- if os.path.exists(options['config_file']):
- return fix_path(options['config_file'])
- elif args:
- if os.path.exists(args[0]):
- return fix_path(args[0])
-
- # Handle standard directory search for $app_name.conf
- config_file_dirs = [fix_path(os.getcwd()),
- fix_path(os.path.join('~', '.' + config_dir)),
- fix_path('~'),
- os.path.join('/etc', config_dir),
- '/etc']
-
- for cfg_dir in config_file_dirs:
- cfg_file = os.path.join(cfg_dir, '%s.conf' % app_name)
- if os.path.exists(cfg_file):
- return cfg_file
-
-
-def load_paste_config(app_name, options, args, config_dir=None):
- """
- Looks for a config file to use for an app and returns the
- config file path and a configuration mapping from a paste config file.
-
- We search for the paste config file in the following order:
- * If --config-file option is used, use that
- * If args[0] is a file, use that
- * Search for $app_name.conf in standard directories:
- * .
- * ~.config_dir/
- * ~
- * /etc/config_dir
- * /etc
-
- :param app_name: Name of the application to load config for, or None.
- None signifies to only load the [DEFAULT] section of
- the config file.
- :param options: Set of typed options returned from parse_options()
- :param args: Command line arguments from argv[1:]
- :retval Tuple of (conf_file, conf)
-
- :raises RuntimeError when config file cannot be located or there was a
- problem loading the configuration file.
- """
- conf_file = find_config_file(app_name, options, args, config_dir)
- if not conf_file:
- raise RuntimeError("Unable to locate any configuration file. "
- "Cannot load application %s" % app_name)
- try:
- conf = deploy.appconfig("config:%s" % conf_file, name=app_name)
- return conf_file, conf
- except Exception, e:
- raise RuntimeError("Error trying to load config %s: %s"
- % (conf_file, e))
-
-
-def load_paste_app(app_name, options, args, config_dir=None):
- """
- Builds and returns a WSGI app from a paste config file.
-
- We search for the paste config file in the following order:
- * If --config-file option is used, use that
- * If args[0] is a file, use that
- * Search for $app_name.conf in standard directories:
- * .
- * ~.config_dir/
- * ~
- * /etc/config_dir
- * /etc
-
- :param app_name: Name of the application to load
- :param options: Set of typed options returned from parse_options()
- :param args: Command line arguments from argv[1:]
-
- :raises RuntimeError when config file cannot be located or application
- cannot be loaded from config file
- """
- conf_file, conf = load_paste_config(app_name, options,
- args, config_dir)
-
- try:
- # Setup logging early, supplying both the CLI options and the
- # configuration mapping from the config file
- setup_logging(options, conf)
-
- # We only update the conf dict for the verbose and debug
- # flags. Everything else must be set up in the conf file...
- debug = options.get('debug') or \
- get_option(conf, 'debug', type='bool', default=False)
- verbose = options.get('verbose') or \
- get_option(conf, 'verbose', type='bool', default=False)
- conf['debug'] = debug
- conf['verbose'] = verbose
-
- # Log the options used when starting if we're in debug mode...
- if debug:
- logger = logging.getLogger(app_name)
- logger.debug("*" * 80)
- logger.debug("Configuration options gathered from config file:")
- logger.debug(conf_file)
- logger.debug("================================================")
- items = dict([(k, v) for k, v in conf.items()
- if k not in ('__file__', 'here')])
- for key, value in sorted(items.items()):
- logger.debug("%(key)-30s %(value)s" % locals())
- logger.debug("*" * 80)
- app = deploy.loadapp("config:%s" % conf_file, name=app_name)
- except (LookupError, ImportError), e:
- raise RuntimeError("Unable to load %(app_name)s from "
- "configuration file %(conf_file)s."
- "\nGot: %(e)r" % locals())
- return conf, app
-
-
-def get_option(options, option, **kwargs):
- if option in options:
- value = options[option]
- type_ = kwargs.get('type', 'str')
- if type_ == 'bool':
- if hasattr(value, 'lower'):
- return value.lower() == 'true'
- else:
- return value
- elif type_ == 'int':
- return int(value)
- elif type_ == 'float':
- return float(value)
- else:
- return value
- elif 'default' in kwargs:
- return kwargs['default']
- else:
- raise KeyError("option '%s' not found" % option)
diff --git a/windc/openstack/oldcommon/context.py b/windc/openstack/oldcommon/context.py
deleted file mode 100644
index a9a16f8e..00000000
--- a/windc/openstack/oldcommon/context.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Simple class that stores security context information in the web request.
-
-Projects should subclass this class if they wish to enhance the request
-context or provide additional information in their specific WSGI pipeline.
-"""
-
-
-class RequestContext(object):
-
- """
- Stores information about the security context under which the user
- accesses the system, as well as additional request information.
- """
-
- def __init__(self, auth_tok=None, user=None, tenant=None, is_admin=False,
- read_only=False, show_deleted=False):
- self.auth_tok = auth_tok
- self.user = user
- self.tenant = tenant
- self.is_admin = is_admin
- self.read_only = read_only
- self.show_deleted = show_deleted
diff --git a/windc/openstack/oldcommon/extensions.py b/windc/openstack/oldcommon/extensions.py
deleted file mode 100644
index 162a02a0..00000000
--- a/windc/openstack/oldcommon/extensions.py
+++ /dev/null
@@ -1,538 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# Copyright 2011 Justin Santa Barbara
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import imp
-import os
-import routes
-import webob.dec
-import webob.exc
-import logging
-from lxml import etree
-
-from openstack.common import exception
-from openstack.common import wsgi
-
-LOG = logging.getLogger('extensions')
-DEFAULT_XMLNS = "http://docs.openstack.org/"
-XMLNS_ATOM = "http://www.w3.org/2005/Atom"
-
-
-class ExtensionDescriptor(object):
- """Base class that defines the contract for extensions.
-
- Note that you don't have to derive from this class to have a valid
- extension; it is purely a convenience.
-
- """
-
- def get_name(self):
- """The name of the extension.
-
- e.g. 'Fox In Socks'
-
- """
- raise NotImplementedError()
-
- def get_alias(self):
- """The alias for the extension.
-
- e.g. 'FOXNSOX'
-
- """
- raise NotImplementedError()
-
- def get_description(self):
- """Friendly description for the extension.
-
- e.g. 'The Fox In Socks Extension'
-
- """
- raise NotImplementedError()
-
- def get_namespace(self):
- """The XML namespace for the extension.
-
- e.g. 'http://www.fox.in.socks/api/ext/pie/v1.0'
-
- """
- raise NotImplementedError()
-
- def get_updated(self):
- """The timestamp when the extension was last updated.
-
- e.g. '2011-01-22T13:25:27-06:00'
-
- """
- # NOTE(justinsb): Not sure of the purpose of this is, vs the XML NS
- raise NotImplementedError()
-
- def get_resources(self):
- """List of extensions.ResourceExtension extension objects.
-
- Resources define new nouns, and are accessible through URLs.
-
- """
- resources = []
- return resources
-
- def get_actions(self):
- """List of extensions.ActionExtension extension objects.
-
- Actions are verbs callable from the API.
-
- """
- actions = []
- return actions
-
- def get_request_extensions(self):
- """List of extensions.RequestException extension objects.
-
- Request extensions are used to handle custom request data.
-
- """
- request_exts = []
- return request_exts
-
-
-class ActionExtensionController(object):
- def __init__(self, application):
- self.application = application
- self.action_handlers = {}
-
- def add_action(self, action_name, handler):
- self.action_handlers[action_name] = handler
-
- def action(self, req, id, body):
- for action_name, handler in self.action_handlers.iteritems():
- if action_name in body:
- return handler(body, req, id)
- # no action handler found (bump to downstream application)
- res = self.application
- return res
-
-
-class ActionExtensionResource(wsgi.Resource):
-
- def __init__(self, application):
- controller = ActionExtensionController(application)
- wsgi.Resource.__init__(self, controller)
-
- def add_action(self, action_name, handler):
- self.controller.add_action(action_name, handler)
-
-
-class RequestExtensionController(object):
-
- def __init__(self, application):
- self.application = application
- self.handlers = []
-
- def add_handler(self, handler):
- self.handlers.append(handler)
-
- def process(self, req, *args, **kwargs):
- res = req.get_response(self.application)
- # currently request handlers are un-ordered
- for handler in self.handlers:
- res = handler(req, res)
- return res
-
-
-class RequestExtensionResource(wsgi.Resource):
-
- def __init__(self, application):
- controller = RequestExtensionController(application)
- wsgi.Resource.__init__(self, controller)
-
- def add_handler(self, handler):
- self.controller.add_handler(handler)
-
-
-class ExtensionsResource(wsgi.Resource):
-
- def __init__(self, extension_manager):
- self.extension_manager = extension_manager
- body_serializers = {'application/xml': ExtensionsXMLSerializer()}
- serializer = wsgi.ResponseSerializer(body_serializers=body_serializers)
- super(ExtensionsResource, self).__init__(self, None, serializer)
-
- def _translate(self, ext):
- ext_data = {}
- ext_data['name'] = ext.get_name()
- ext_data['alias'] = ext.get_alias()
- ext_data['description'] = ext.get_description()
- ext_data['namespace'] = ext.get_namespace()
- ext_data['updated'] = ext.get_updated()
- ext_data['links'] = [] # TODO(dprince): implement extension links
- return ext_data
-
- def index(self, req):
- extensions = []
- for _alias, ext in self.extension_manager.extensions.iteritems():
- extensions.append(self._translate(ext))
- return dict(extensions=extensions)
-
- def show(self, req, id):
- # NOTE(dprince): the extensions alias is used as the 'id' for show
- ext = self.extension_manager.extensions.get(id, None)
- if not ext:
- raise webob.exc.HTTPNotFound(
- _("Extension with alias %s does not exist") % id)
-
- return dict(extension=self._translate(ext))
-
- def delete(self, req, id):
- raise webob.exc.HTTPNotFound()
-
- def create(self, req):
- raise webob.exc.HTTPNotFound()
-
-
-class ExtensionMiddleware(wsgi.Middleware):
- """Extensions middleware for WSGI."""
-
- @classmethod
- def factory(cls, global_config, **local_config):
- """Paste factory."""
- def _factory(app):
- return cls(app, global_config, **local_config)
- return _factory
-
- def _action_ext_resources(self, application, ext_mgr, mapper):
- """Return a dict of ActionExtensionResource-s by collection."""
- action_resources = {}
- for action in ext_mgr.get_actions():
- if not action.collection in action_resources.keys():
- resource = ActionExtensionResource(application)
- mapper.connect("/%s/:(id)/action.:(format)" %
- action.collection,
- action='action',
- controller=resource,
- conditions=dict(method=['POST']))
- mapper.connect("/%s/:(id)/action" %
- action.collection,
- action='action',
- controller=resource,
- conditions=dict(method=['POST']))
- action_resources[action.collection] = resource
-
- return action_resources
-
- def _request_ext_resources(self, application, ext_mgr, mapper):
- """Returns a dict of RequestExtensionResource-s by collection."""
- request_ext_resources = {}
- for req_ext in ext_mgr.get_request_extensions():
- if not req_ext.key in request_ext_resources.keys():
- resource = RequestExtensionResource(application)
- mapper.connect(req_ext.url_route + '.:(format)',
- action='process',
- controller=resource,
- conditions=req_ext.conditions)
-
- mapper.connect(req_ext.url_route,
- action='process',
- controller=resource,
- conditions=req_ext.conditions)
- request_ext_resources[req_ext.key] = resource
-
- return request_ext_resources
-
- def __init__(self, application, config, ext_mgr=None):
- ext_mgr = ext_mgr or ExtensionManager(
- config['api_extensions_path'])
- mapper = routes.Mapper()
-
- # extended resources
- for resource_ext in ext_mgr.get_resources():
- LOG.debug(_('Extended resource: %s'), resource_ext.collection)
- controller_resource = wsgi.Resource(resource_ext.controller,
- resource_ext.deserializer,
- resource_ext.serializer)
- self._map_custom_collection_actions(resource_ext, mapper,
- controller_resource)
- kargs = dict(controller=controller_resource,
- collection=resource_ext.collection_actions,
- member=resource_ext.member_actions)
- if resource_ext.parent:
- kargs['parent_resource'] = resource_ext.parent
- mapper.resource(resource_ext.collection,
- resource_ext.collection, **kargs)
-
- # extended actions
- action_resources = self._action_ext_resources(application, ext_mgr,
- mapper)
- for action in ext_mgr.get_actions():
- LOG.debug(_('Extended action: %s'), action.action_name)
- resource = action_resources[action.collection]
- resource.add_action(action.action_name, action.handler)
-
- # extended requests
- req_controllers = self._request_ext_resources(application, ext_mgr,
- mapper)
- for request_ext in ext_mgr.get_request_extensions():
- LOG.debug(_('Extended request: %s'), request_ext.key)
- controller = req_controllers[request_ext.key]
- controller.add_handler(request_ext.handler)
-
- self._router = routes.middleware.RoutesMiddleware(self._dispatch,
- mapper)
-
- super(ExtensionMiddleware, self).__init__(application)
-
- def _map_custom_collection_actions(self, resource_ext, mapper,
- controller_resource):
- for action, method in resource_ext.collection_actions.iteritems():
- parent = resource_ext.parent
- conditions = dict(method=[method])
- path = "/%s/%s" % (resource_ext.collection, action)
-
- path_prefix = ""
- if parent:
- path_prefix = "/%s/{%s_id}" % (parent["collection_name"],
- parent["member_name"])
-
- with mapper.submapper(controller=controller_resource,
- action=action,
- path_prefix=path_prefix,
- conditions=conditions) as submap:
- submap.connect(path)
- submap.connect("%s.:(format)" % path)
-
- @webob.dec.wsgify(RequestClass=wsgi.Request)
- def __call__(self, req):
- """Route the incoming request with router."""
- req.environ['extended.app'] = self.application
- return self._router
-
- @staticmethod
- @webob.dec.wsgify(RequestClass=wsgi.Request)
- def _dispatch(req):
- """Dispatch the request.
-
- Returns the routed WSGI app's response or defers to the extended
- application.
-
- """
- match = req.environ['wsgiorg.routing_args'][1]
- if not match:
- return req.environ['extended.app']
- app = match['controller']
- return app
-
-
-class ExtensionManager(object):
- """Load extensions from the configured extension path.
-
- See nova/tests/api/openstack/extensions/foxinsocks/extension.py for an
- example extension implementation.
-
- """
-
- def __init__(self, path):
- LOG.debug(_('Initializing extension manager.'))
-
- self.path = path
- self.extensions = {}
- self._load_all_extensions()
-
- def get_resources(self):
- """Returns a list of ResourceExtension objects."""
- resources = []
- extension_resource = ExtensionsResource(self)
- res_ext = ResourceExtension('extensions',
- extension_resource,
- serializer=extension_resource.serializer)
- resources.append(res_ext)
- for alias, ext in self.extensions.iteritems():
- try:
- resources.extend(ext.get_resources())
- except AttributeError:
- # NOTE(dprince): Extension aren't required to have resource
- # extensions
- pass
- return resources
-
- def get_actions(self):
- """Returns a list of ActionExtension objects."""
- actions = []
- for alias, ext in self.extensions.iteritems():
- try:
- actions.extend(ext.get_actions())
- except AttributeError:
- # NOTE(dprince): Extension aren't required to have action
- # extensions
- pass
- return actions
-
- def get_request_extensions(self):
- """Returns a list of RequestExtension objects."""
- request_exts = []
- for alias, ext in self.extensions.iteritems():
- try:
- request_exts.extend(ext.get_request_extensions())
- except AttributeError:
- # NOTE(dprince): Extension aren't required to have request
- # extensions
- pass
- return request_exts
-
- def _check_extension(self, extension):
- """Checks for required methods in extension objects."""
- try:
- LOG.debug(_('Ext name: %s'), extension.get_name())
- LOG.debug(_('Ext alias: %s'), extension.get_alias())
- LOG.debug(_('Ext description: %s'), extension.get_description())
- LOG.debug(_('Ext namespace: %s'), extension.get_namespace())
- LOG.debug(_('Ext updated: %s'), extension.get_updated())
- except AttributeError as ex:
- LOG.exception(_("Exception loading extension: %s"), unicode(ex))
- return False
- return True
-
- def _load_all_extensions(self):
- """Load extensions from the configured path.
-
- Load extensions from the configured path. The extension name is
- constructed from the module_name. If your extension module was named
- widgets.py the extension class within that module should be
- 'Widgets'.
-
- In addition, extensions are loaded from the 'contrib' directory.
-
- See nova/tests/api/openstack/extensions/foxinsocks.py for an example
- extension implementation.
-
- """
- if os.path.exists(self.path):
- self._load_all_extensions_from_path(self.path)
-
- contrib_path = os.path.join(os.path.dirname(__file__), "contrib")
- if os.path.exists(contrib_path):
- self._load_all_extensions_from_path(contrib_path)
-
- def _load_all_extensions_from_path(self, path):
- for f in os.listdir(path):
- LOG.debug(_('Loading extension file: %s'), f)
- mod_name, file_ext = os.path.splitext(os.path.split(f)[-1])
- ext_path = os.path.join(path, f)
- if file_ext.lower() == '.py' and not mod_name.startswith('_'):
- mod = imp.load_source(mod_name, ext_path)
- ext_name = mod_name[0].upper() + mod_name[1:]
- new_ext_class = getattr(mod, ext_name, None)
- if not new_ext_class:
- LOG.warn(_('Did not find expected name '
- '"%(ext_name)s" in %(file)s'),
- {'ext_name': ext_name,
- 'file': ext_path})
- continue
- new_ext = new_ext_class()
- self.add_extension(new_ext)
-
- def add_extension(self, ext):
- # Do nothing if the extension doesn't check out
- if not self._check_extension(ext):
- return
-
- alias = ext.get_alias()
- LOG.debug(_('Loaded extension: %s'), alias)
-
- if alias in self.extensions:
- raise exception.Error("Found duplicate extension: %s" % alias)
- self.extensions[alias] = ext
-
-
-class RequestExtension(object):
- """Extend requests and responses of core nova OpenStack API resources.
-
- Provide a way to add data to responses and handle custom request data
- that is sent to core nova OpenStack API controllers.
-
- """
- def __init__(self, method, url_route, handler):
- self.url_route = url_route
- self.handler = handler
- self.conditions = dict(method=[method])
- self.key = "%s-%s" % (method, url_route)
-
-
-class ActionExtension(object):
- """Add custom actions to core nova OpenStack API resources."""
-
- def __init__(self, collection, action_name, handler):
- self.collection = collection
- self.action_name = action_name
- self.handler = handler
-
-
-class ResourceExtension(object):
- """Add top level resources to the OpenStack API in nova."""
-
- def __init__(self, collection, controller, parent=None,
- collection_actions=None, member_actions=None,
- deserializer=None, serializer=None):
- if not collection_actions:
- collection_actions = {}
- if not member_actions:
- member_actions = {}
- self.collection = collection
- self.controller = controller
- self.parent = parent
- self.collection_actions = collection_actions
- self.member_actions = member_actions
- self.deserializer = deserializer
- self.serializer = serializer
-
-
-class ExtensionsXMLSerializer(wsgi.XMLDictSerializer):
-
- def __init__(self):
- self.nsmap = {None: DEFAULT_XMLNS, 'atom': XMLNS_ATOM}
-
- def show(self, ext_dict):
- ext = etree.Element('extension', nsmap=self.nsmap)
- self._populate_ext(ext, ext_dict['extension'])
- return self._to_xml(ext)
-
- def index(self, exts_dict):
- exts = etree.Element('extensions', nsmap=self.nsmap)
- for ext_dict in exts_dict['extensions']:
- ext = etree.SubElement(exts, 'extension')
- self._populate_ext(ext, ext_dict)
- return self._to_xml(exts)
-
- def _populate_ext(self, ext_elem, ext_dict):
- """Populate an extension xml element from a dict."""
-
- ext_elem.set('name', ext_dict['name'])
- ext_elem.set('namespace', ext_dict['namespace'])
- ext_elem.set('alias', ext_dict['alias'])
- ext_elem.set('updated', ext_dict['updated'])
- desc = etree.Element('description')
- desc.text = ext_dict['description']
- ext_elem.append(desc)
- for link in ext_dict.get('links', []):
- elem = etree.SubElement(ext_elem, '{%s}link' % XMLNS_ATOM)
- elem.set('rel', link['rel'])
- elem.set('href', link['href'])
- elem.set('type', link['type'])
- return ext_elem
-
- def _to_xml(self, root):
- """Convert the xml object to an xml string."""
-
- return etree.tostring(root, encoding='UTF-8')
diff --git a/windc/openstack/oldcommon/middleware/context.py b/windc/openstack/oldcommon/middleware/context.py
deleted file mode 100644
index be7dafee..00000000
--- a/windc/openstack/oldcommon/middleware/context.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Middleware that attaches a context to the WSGI request
-"""
-
-from openstack.common import utils
-from openstack.common import wsgi
-from openstack.common import context
-
-
-class ContextMiddleware(wsgi.Middleware):
- def __init__(self, app, options):
- self.options = options
- super(ContextMiddleware, self).__init__(app)
-
- def make_context(self, *args, **kwargs):
- """
- Create a context with the given arguments.
- """
-
- # Determine the context class to use
- ctxcls = context.RequestContext
- if 'context_class' in self.options:
- ctxcls = utils.import_class(self.options['context_class'])
-
- return ctxcls(*args, **kwargs)
-
- def process_request(self, req):
- """
- Extract any authentication information in the request and
- construct an appropriate context from it.
- """
- # Use the default empty context, with admin turned on for
- # backwards compatibility
- req.context = self.make_context(is_admin=True)
-
-
-def filter_factory(global_conf, **local_conf):
- """
- Factory method for paste.deploy
- """
- conf = global_conf.copy()
- conf.update(local_conf)
-
- def filter(app):
- return ContextMiddleware(app, conf)
-
- return filter
diff --git a/windc/openstack/oldcommon/utils.py b/windc/openstack/oldcommon/utils.py
deleted file mode 100644
index 1faeab5a..00000000
--- a/windc/openstack/oldcommon/utils.py
+++ /dev/null
@@ -1,210 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-System-level utilities and helper functions.
-"""
-
-import datetime
-import logging
-import os
-import random
-import shlex
-import sys
-import types
-
-from eventlet import greenthread
-from eventlet.green import subprocess
-
-from openstack.common import exception
-
-
-TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
-LOG = logging.getLogger(__name__)
-
-
-def int_from_bool_as_string(subject):
- """
- Interpret a string as a boolean and return either 1 or 0.
-
- Any string value in:
- ('True', 'true', 'On', 'on', '1')
- is interpreted as a boolean True.
-
- Useful for JSON-decoded stuff and config file parsing
- """
- return bool_from_string(subject) and 1 or 0
-
-
-def bool_from_string(subject):
- """
- Interpret a string as a boolean.
-
- Any string value in:
- ('True', 'true', 'On', 'on', '1')
- is interpreted as a boolean True.
-
- Useful for JSON-decoded stuff and config file parsing
- """
- if isinstance(subject, types.BooleanType):
- return subject
- if isinstance(subject, types.StringTypes):
- if subject.strip().lower() in ('true', 'on', '1'):
- return True
- return False
-
-
-def execute(*cmd, **kwargs):
- """
- Helper method to execute command with optional retry.
-
- :cmd Passed to subprocess.Popen.
- :process_input Send to opened process.
- :check_exit_code Defaults to 0. Raise exception.ProcessExecutionError
- unless program exits with this code.
- :delay_on_retry True | False. Defaults to True. If set to True, wait a
- short amount of time before retrying.
- :attempts How many times to retry cmd.
- :run_as_root True | False. Defaults to False. If set to True,
- the command is prefixed by the command specified
- in the root_helper kwarg.
- :root_helper command to prefix all cmd's with
-
- :raises exception.Error on receiving unknown arguments
- :raises exception.ProcessExecutionError
- """
-
- process_input = kwargs.pop('process_input', None)
- check_exit_code = kwargs.pop('check_exit_code', 0)
- delay_on_retry = kwargs.pop('delay_on_retry', True)
- attempts = kwargs.pop('attempts', 1)
- run_as_root = kwargs.pop('run_as_root', False)
- root_helper = kwargs.pop('root_helper', '')
- if len(kwargs):
- raise exception.Error(_('Got unknown keyword args '
- 'to utils.execute: %r') % kwargs)
- if run_as_root:
- cmd = shlex.split(root_helper) + list(cmd)
- cmd = map(str, cmd)
-
- while attempts > 0:
- attempts -= 1
- try:
- LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd))
- _PIPE = subprocess.PIPE # pylint: disable=E1101
- obj = subprocess.Popen(cmd,
- stdin=_PIPE,
- stdout=_PIPE,
- stderr=_PIPE,
- close_fds=True)
- result = None
- if process_input is not None:
- result = obj.communicate(process_input)
- else:
- result = obj.communicate()
- obj.stdin.close() # pylint: disable=E1101
- _returncode = obj.returncode # pylint: disable=E1101
- if _returncode:
- LOG.debug(_('Result was %s') % _returncode)
- if type(check_exit_code) == types.IntType \
- and _returncode != check_exit_code:
- (stdout, stderr) = result
- raise exception.ProcessExecutionError(
- exit_code=_returncode,
- stdout=stdout,
- stderr=stderr,
- cmd=' '.join(cmd))
- return result
- except exception.ProcessExecutionError:
- if not attempts:
- raise
- else:
- LOG.debug(_('%r failed. Retrying.'), cmd)
- if delay_on_retry:
- greenthread.sleep(random.randint(20, 200) / 100.0)
- finally:
- # NOTE(termie): this appears to be necessary to let the subprocess
- # call clean something up in between calls, without
- # it two execute calls in a row hangs the second one
- greenthread.sleep(0)
-
-
-def import_class(import_str):
- """Returns a class from a string including module and class"""
- mod_str, _sep, class_str = import_str.rpartition('.')
- try:
- __import__(mod_str)
- return getattr(sys.modules[mod_str], class_str)
- except (ImportError, ValueError, AttributeError):
- raise exception.NotFound('Class %s cannot be found' % class_str)
-
-
-def import_object(import_str):
- """Returns an object including a module or module and class"""
- try:
- __import__(import_str)
- return sys.modules[import_str]
- except ImportError:
- return import_class(import_str)
-
-
-def isotime(at=None):
- if not at:
- at = datetime.datetime.utcnow()
- return at.strftime(TIME_FORMAT)
-
-
-def parse_isotime(timestr):
- return datetime.datetime.strptime(timestr, TIME_FORMAT)
-
-
-def parse_mailmap(mailmap='.mailmap'):
- mapping = {}
- if os.path.exists(mailmap):
- fp = open(mailmap, 'r')
- for l in fp:
- l = l.strip()
- if not l.startswith('#') and ' ' in l:
- canonical_email, alias = l.split(' ')
- mapping[alias] = canonical_email
- return mapping
-
-
-def str_dict_replace(s, mapping):
- for s1, s2 in mapping.iteritems():
- s = s.replace(s1, s2)
- return s
-
-
-def utcnow():
- """Overridable version of utils.utcnow."""
- if utcnow.override_time:
- return utcnow.override_time
- return datetime.datetime.utcnow()
-
-
-utcnow.override_time = None
-
-
-def set_time_override(override_time=datetime.datetime.utcnow()):
- """Override utils.utcnow to return a constant time."""
- utcnow.override_time = override_time
-
-
-def clear_time_override():
- """Remove the overridden time."""
- utcnow.override_time = None
diff --git a/windc/run_tests.log b/windc/run_tests.log
deleted file mode 100644
index dc224f3f..00000000
--- a/windc/run_tests.log
+++ /dev/null
@@ -1,3 +0,0 @@
-nose.config: INFO: Ignoring files matching ['^\\.', '^_', '^setup\\.py$']
-nose.selector: INFO: /Users/gokrokve/Keero/windc/run_tests.sh is executable; skipped
-2013-02-11 13:35:01,989 DEBUG Initialized with method overriding = True, and path info altering = True
diff --git a/windc/run_tests.py b/windc/run_tests.py
deleted file mode 100644
index 8f59fe25..00000000
--- a/windc/run_tests.py
+++ /dev/null
@@ -1,293 +0,0 @@
-#!/usr/bin/env python
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 OpenStack, LLC
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Colorizer Code is borrowed from Twisted:
-# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""
-Unittest runner for balancer
-
-To run all test::
- python run_tests.py
-
-To run a single test::
- python run_tests.py test_stores:TestSwiftBackend.test_get
-
-To run a single test module::
- python run_tests.py test_stores
-"""
-
-import gettext
-import logging
-import os
-import unittest
-import sys
-
-gettext.install('windc', unicode=1)
-
-from nose import config
-from nose import result
-from nose import core
-
-
-class _AnsiColorizer(object):
- """
- A colorizer is an object that loosely wraps around a stream, allowing
- callers to write text to the stream in a particular color.
-
- Colorizer classes must implement C{supported()} and C{write(text, color)}.
- """
- _colors = dict(black=30, red=31, green=32, yellow=33,
- blue=34, magenta=35, cyan=36, white=37)
-
- def __init__(self, stream):
- self.stream = stream
-
- def supported(cls, stream=sys.stdout):
- """
- A class method that returns True if the current platform supports
- coloring terminal output using this method. Returns False otherwise.
- """
- if not stream.isatty():
- return False # auto color only on TTYs
- try:
- import curses
- except ImportError:
- return False
- else:
- try:
- try:
- return curses.tigetnum("colors") > 2
- except curses.error:
- curses.setupterm()
- return curses.tigetnum("colors") > 2
- except:
- raise
- # guess false in case of error
- return False
- supported = classmethod(supported)
-
- def write(self, text, color):
- """
- Write the given text to the stream in the given color.
-
- @param text: Text to be written to the stream.
-
- @param color: A string label for a color. e.g. 'red', 'white'.
- """
- color = self._colors[color]
- self.stream.write('\x1b[%s;1m%s\x1b[0m' % (color, text))
-
-
-class _Win32Colorizer(object):
- """
- See _AnsiColorizer docstring.
- """
- def __init__(self, stream):
- from win32console import GetStdHandle, STD_OUT_HANDLE, \
- FOREGROUND_RED, FOREGROUND_BLUE, FOREGROUND_GREEN, \
- FOREGROUND_INTENSITY
- red, green, blue, bold = (FOREGROUND_RED, FOREGROUND_GREEN,
- FOREGROUND_BLUE, FOREGROUND_INTENSITY)
- self.stream = stream
- self.screenBuffer = GetStdHandle(STD_OUT_HANDLE)
- self._colors = {
- 'normal': red | green | blue,
- 'red': red | bold,
- 'green': green | bold,
- 'blue': blue | bold,
- 'yellow': red | green | bold,
- 'magenta': red | blue | bold,
- 'cyan': green | blue | bold,
- 'white': red | green | blue | bold}
-
- def supported(cls, stream=sys.stdout):
- try:
- import win32console
- screenBuffer = win32console.GetStdHandle(
- win32console.STD_OUT_HANDLE)
- except ImportError:
- return False
- import pywintypes
- try:
- screenBuffer.SetConsoleTextAttribute(
- win32console.FOREGROUND_RED |
- win32console.FOREGROUND_GREEN |
- win32console.FOREGROUND_BLUE)
- except pywintypes.error:
- return False
- else:
- return True
- supported = classmethod(supported)
-
- def write(self, text, color):
- color = self._colors[color]
- self.screenBuffer.SetConsoleTextAttribute(color)
- self.stream.write(text)
- self.screenBuffer.SetConsoleTextAttribute(self._colors['normal'])
-
-
-class _NullColorizer(object):
- """
- See _AnsiColorizer docstring.
- """
- def __init__(self, stream):
- self.stream = stream
-
- def supported(cls, stream=sys.stdout):
- return True
- supported = classmethod(supported)
-
- def write(self, text, color):
- self.stream.write(text)
-
-
-class WindcTestResult(result.TextTestResult):
- def __init__(self, *args, **kw):
- result.TextTestResult.__init__(self, *args, **kw)
- self._last_case = None
- self.colorizer = None
- # NOTE(vish, tfukushima): reset stdout for the terminal check
- stdout = sys.stdout
- sys.stdout = sys.__stdout__
- for colorizer in [_Win32Colorizer, _AnsiColorizer, _NullColorizer]:
- if colorizer.supported():
- self.colorizer = colorizer(self.stream)
- break
- sys.stdout = stdout
-
- def getDescription(self, test):
- return str(test)
-
- # NOTE(vish, tfukushima): copied from unittest with edit to add color
- def addSuccess(self, test):
- unittest.TestResult.addSuccess(self, test)
- if self.showAll:
- self.colorizer.write("OK", 'green')
- self.stream.writeln()
- elif self.dots:
- self.stream.write('.')
- self.stream.flush()
-
- # NOTE(vish, tfukushima): copied from unittest with edit to add color
- def addFailure(self, test, err):
- unittest.TestResult.addFailure(self, test, err)
- if self.showAll:
- self.colorizer.write("FAIL", 'red')
- self.stream.writeln()
- elif self.dots:
- self.stream.write('F')
- self.stream.flush()
-
- # NOTE(vish, tfukushima): copied from unittest with edit to add color
- def addError(self, test, err):
- """
- Overrides normal addError to add support for errorClasses.
- If the exception is a registered class, the error will be added
- to the list for that class, not errors.
- """
- stream = getattr(self, 'stream', None)
- ec, ev, tb = err
- try:
- exc_info = self._exc_info_to_string(err, test)
- except TypeError:
- # This is for compatibility with Python 2.3.
- exc_info = self._exc_info_to_string(err)
- for cls, (storage, label, isfail) in self.errorClasses.items():
- if result.isclass(ec) and issubclass(ec, cls):
- if isfail:
- test.passwd = False
- storage.append((test, exc_info))
- # Might get patched into a streamless result
- if stream is not None:
- if self.showAll:
- message = [label]
- detail = result._exception_detail(err[1])
- if detail:
- message.append(detail)
- stream.writeln(": ".join(message))
- elif self.dots:
- stream.write(label[:1])
- return
- self.errors.append((test, exc_info))
- test.passed = False
- if stream is not None:
- if self.showAll:
- self.colorizer.write("ERROR", 'red')
- self.stream.writeln()
- elif self.dots:
- stream.write('E')
-
- def startTest(self, test):
- unittest.TestResult.startTest(self, test)
- current_case = test.test.__class__.__name__
-
- if self.showAll:
- if current_case != self._last_case:
- self.stream.writeln(current_case)
- self._last_case = current_case
-
- self.stream.write(
- ' %s' % str(test.test._testMethodName).ljust(60))
- self.stream.flush()
-
-
-class WindcTestRunner(core.TextTestRunner):
- def _makeResult(self):
- return WindcTestResult(self.stream,
- self.descriptions,
- self.verbosity,
- self.config)
-
-
-if __name__ == '__main__':
- logger = logging.getLogger()
- hdlr = logging.StreamHandler()
- formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
- hdlr.setFormatter(formatter)
- logger.addHandler(hdlr)
- logger.setLevel(logging.INFO)
-
- c = config.Config(stream=sys.stdout,
- env=os.environ,
- verbosity=3,
- plugins=core.DefaultPluginManager())
-
- runner = WindcTestRunner(stream=c.stream,
- verbosity=c.verbosity,
- config=c)
- sys.exit(not core.run(config=c, testRunner=runner))
diff --git a/windc/run_tests.sh b/windc/run_tests.sh
deleted file mode 100755
index 2fb93e1d..00000000
--- a/windc/run_tests.sh
+++ /dev/null
@@ -1,129 +0,0 @@
-#!/bin/bash
-
-function usage {
- echo "Usage: $0 [OPTION]..."
- echo "Run Loadbalancer's test suite(s)"
- echo ""
- echo " -V, --virtual-env Always use virtualenv. Install automatically if not present"
- echo " -N, --no-virtual-env Don't use virtualenv. Run tests in local environment"
- echo " -f, --force Force a clean re-build of the virtual environment. Useful when dependencies have been added."
- echo " --unittests-only Run unit tests only, exclude functional tests."
- echo " -c, --coverage Generate coverage report"
- echo " -p, --pep8 Just run pep8"
- echo " -h, --help Print this usage message"
- echo ""
- echo "Note: with no options specified, the script will try to run the tests in a virtual environment,"
- echo " If no virtualenv is found, the script will ask if you would like to create one. If you "
- echo " prefer to run tests NOT in a virtual environment, simply pass the -N option."
- exit
-}
-
-function process_option {
- case "$1" in
- -h|--help) usage;;
- -V|--virtual-env) let always_venv=1; let never_venv=0;;
- -N|--no-virtual-env) let always_venv=0; let never_venv=1;;
- -p|--pep8) let just_pep8=1;;
- -f|--force) let force=1;;
- --unittests-only) noseopts="$noseopts --exclude-dir=windc/tests/functional";;
- -c|--coverage) coverage=1;;
- -*) noseopts="$noseopts $1";;
- *) noseargs="$noseargs $1"
- esac
-}
-
-venv=.venv
-with_venv=tools/with_venv.sh
-always_venv=0
-never_venv=0
-force=0
-noseargs=
-noseopts=
-wrapper=""
-just_pep8=0
-coverage=0
-
-for arg in "$@"; do
- process_option $arg
-done
-
-# If enabled, tell nose to collect coverage data
-if [ $coverage -eq 1 ]; then
- noseopts="$noseopts --with-coverage --cover-package=windc --cover-inclusive"
-fi
-
-function run_tests {
- # Just run the test suites in current environment
- ${wrapper} $NOSETESTS 2> run_tests.log
-}
-
-function run_pep8 {
- echo "Running pep8 ..."
- PEP8_OPTIONS="--exclude=$PEP8_EXCLUDE --repeat"
- PEP8_INCLUDE="bin/* windc tools setup.py run_tests.py"
- ${wrapper} pep8 $PEP8_OPTIONS $PEP8_INCLUDE
- PEP_RESULT=$?
- case "$TERM" in
- *color* ) function out { printf "\033[3%d;1m%s\033[m\n" "$1" "$2"; } ;;
- * ) function out { printf "%s\n" "$2"; } ;;
- esac
- if [ $PEP_RESULT -eq 0 ]; then
- out 2 "PEP8 OK"
- else
- out 1 "PEP8 FAIL"
- fi
- return $PEP_RESULT
-}
-
-
-NOSETESTS="python run_tests.py $noseopts $noseargs"
-
-if [ $never_venv -eq 0 ]
-then
- # Remove the virtual environment if --force used
- if [ $force -eq 1 ]; then
- echo "Cleaning virtualenv..."
- rm -rf ${venv}
- fi
- if [ -e ${venv} ]; then
- wrapper="${with_venv}"
- else
- if [ $always_venv -eq 1 ]; then
- # Automatically install the virtualenv
- python tools/install_venv.py || exit 1
- wrapper="${with_venv}"
- else
- echo -e "No virtual environment found...create one? (Y/n) \c"
- read use_ve
- if [ "x$use_ve" = "xY" -o "x$use_ve" = "x" -o "x$use_ve" = "xy" ]; then
- # Install the virtualenv and run the test suite in it
- python tools/install_venv.py || exit 1
- wrapper=${with_venv}
- fi
- fi
- fi
-fi
-
-# Delete old coverage data from previous runs
-if [ $coverage -eq 1 ]; then
- ${wrapper} coverage erase
-fi
-
-if [ $just_pep8 -eq 1 ]; then
- run_pep8
- exit $?
-fi
-
-run_tests
-TEST_RESULT=$?
-
-if [ -z "$noseargs" ]; then
- run_pep8 || exit 1
-fi
-
-if [ $coverage -eq 1 ]; then
- echo "Generating coverage report in covhtml/"
- ${wrapper} coverage html -d covhtml -i --include='windc/*' --omit='windc/db/migrate_repo*,windc/common*,windc/tests*'
-fi
-
-exit $TEST_RESULT
diff --git a/windc/setup.py b/windc/setup.py
deleted file mode 100644
index 3265bcab..00000000
--- a/windc/setup.py
+++ /dev/null
@@ -1,90 +0,0 @@
-#!/usr/bin/python
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import os
-import subprocess
-
-from setuptools import setup, find_packages
-from setuptools.command.sdist import sdist
-
-from windc import version
-
-
-if os.path.isdir('.bzr'):
- with open("windc/vcsversion.py", 'w') as version_file:
- vcs_cmd = subprocess.Popen(["bzr", "version-info", "--python"],
- stdout=subprocess.PIPE)
- vcsversion = vcs_cmd.communicate()[0]
- version_file.write(vcsversion)
-
-
-class local_sdist(sdist):
- """Customized sdist hook - builds the ChangeLog file from VC first"""
-
- def run(self):
- if os.path.isdir('.bzr'):
- # We're in a bzr branch
-
- log_cmd = subprocess.Popen(["bzr", "log", "--gnu"],
- stdout=subprocess.PIPE)
- changelog = log_cmd.communicate()[0]
- with open("ChangeLog", "w") as changelog_file:
- changelog_file.write(changelog)
- sdist.run(self)
-
-cmdclass = {'sdist': local_sdist}
-
-# If Sphinx is installed on the box running setup.py,
-# enable setup.py to build the documentation, otherwise,
-# just ignore it
-try:
- from sphinx.setup_command import BuildDoc
-
- class local_BuildDoc(BuildDoc):
- def run(self):
- for builder in ['html', 'man']:
- self.builder = builder
- self.finalize_options()
- BuildDoc.run(self)
- cmdclass['build_sphinx'] = local_BuildDoc
-
-except:
- pass
-
-
-setup(
- name='windc',
- version=version.canonical_version_string(),
- description='The WinDC project provides a simple WSGI server for Windows Environment Management',
- license='Apache License (2.0)',
- author='OpenStack',
- author_email='openstack@lists.launchpad.net',
- url='http://windc.openstack.org/',
- packages=find_packages(exclude=['tests', 'bin']),
- test_suite='nose.collector',
- cmdclass=cmdclass,
- include_package_data=True,
- classifiers=[
- 'Development Status :: 4 - Beta',
- 'License :: OSI Approved :: Apache Software License',
- 'Operating System :: POSIX :: Linux',
- 'Programming Language :: Python :: 2.6',
- 'Environment :: No Input/Output (Daemon)',
- ],
- scripts=['bin/windc',
- 'bin/windc-api'])
diff --git a/windc/tests/manual/createDataCenter.sh b/windc/tests/manual/createDataCenter.sh
deleted file mode 100755
index 2c1e5315..00000000
--- a/windc/tests/manual/createDataCenter.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-
-URL=http://localhost:8082/foo/datacenters
-curl -v -H "Content-Type: application/json" -X POST -d@createDataCenterParameters$1 $URL
diff --git a/windc/tests/manual/createDataCenterParameters b/windc/tests/manual/createDataCenterParameters
deleted file mode 100644
index 6200231d..00000000
--- a/windc/tests/manual/createDataCenterParameters
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-"name": "Test Data Center 2",
-"type": "SingleZone",
-"version":"1.1",
-"KMS":"172.16.1.2",
-"WSUS":"172.16.1.3"
-}
diff --git a/windc/tests/manual/createService.sh b/windc/tests/manual/createService.sh
deleted file mode 100755
index 7a508884..00000000
--- a/windc/tests/manual/createService.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-
-URL=http://localhost:8082/foo/datacenters/$1/services
-curl -v -H "Content-Type: application/json" -X POST -d@createServiceParameters$2 $URL
diff --git a/windc/tests/manual/createServiceParameters b/windc/tests/manual/createServiceParameters
deleted file mode 100644
index 0954f925..00000000
--- a/windc/tests/manual/createServiceParameters
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-"type": "active_directory_service",
-"zones": ["zone1"],
-"domain": "ACME.cloud",
-"AdminUser": "Admin",
-"AdminPassword": "StrongPassword",
-"DomainControllerNames": ["AD-DC001"]
-}
diff --git a/windc/tests/manual/listDataCenter.sh b/windc/tests/manual/listDataCenter.sh
deleted file mode 100755
index bb956800..00000000
--- a/windc/tests/manual/listDataCenter.sh
+++ /dev/null
@@ -1 +0,0 @@
-curl -X GET http://localhost:8082/foo/datacenters
diff --git a/windc/tests/unit/test_base_driver.py b/windc/tests/unit/test_base_driver.py
deleted file mode 100644
index dafdf733..00000000
--- a/windc/tests/unit/test_base_driver.py
+++ /dev/null
@@ -1,14 +0,0 @@
-import unittest
-import mock
-
-
-from windc.api.v1.router import API
-
-
-class TestBaseDriver(unittest.TestCase):
- def setUp(self):
- super(TestBaseDriver, self).setUp()
- self.conf = mock.Mock()
-
- def testAPI(self):
- api = API(None)
diff --git a/windc/tools/install_venv.py b/windc/tools/install_venv.py
deleted file mode 100644
index c3b81718..00000000
--- a/windc/tools/install_venv.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-#
-# Copyright 2010 OpenStack LLC.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Installation script for Glance's development virtualenv
-"""
-
-import os
-import subprocess
-import sys
-
-
-ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
-VENV = os.path.join(ROOT, '.venv')
-PIP_REQUIRES = os.path.join(ROOT, 'tools', 'pip-requires')
-TEST_REQUIRES = os.path.join(ROOT, 'tools', 'test-requires')
-
-
-def die(message, *args):
- print >> sys.stderr, message % args
- sys.exit(1)
-
-
-def run_command(cmd, redirect_output=True, check_exit_code=True):
- """
- Runs a command in an out-of-process shell, returning the
- output of that command. Working directory is ROOT.
- """
- if redirect_output:
- stdout = subprocess.PIPE
- else:
- stdout = None
-
- proc = subprocess.Popen(cmd, cwd=ROOT, stdout=stdout)
- output = proc.communicate()[0]
- if check_exit_code and proc.returncode != 0:
- die('Command "%s" failed.\n%s', ' '.join(cmd), output)
- return output
-
-
-HAS_EASY_INSTALL = bool(run_command(['which', 'easy_install'],
- check_exit_code=False).strip())
-HAS_VIRTUALENV = bool(run_command(['which', 'virtualenv'],
- check_exit_code=False).strip())
-
-
-def check_dependencies():
- """Make sure virtualenv is in the path."""
-
- if not HAS_VIRTUALENV:
- print 'not found.'
- # Try installing it via easy_install...
- if HAS_EASY_INSTALL:
- print 'Installing virtualenv via easy_install...',
- if not run_command(['which', 'easy_install']):
- die('ERROR: virtualenv not found.\n\n'
- 'Balancer development requires virtualenv, please install'
- ' it using your favorite package management tool')
- print 'done.'
- print 'done.'
-
-
-def create_virtualenv(venv=VENV):
- """
- Creates the virtual environment and installs PIP only into the
- virtual environment
- """
- print 'Creating venv...',
- run_command(['virtualenv', '-q', '--no-site-packages', VENV])
- print 'done.'
- print 'Installing pip in virtualenv...',
- if not run_command(['tools/with_venv.sh', 'easy_install',
- 'pip>1.0']).strip():
- die("Failed to install pip.")
- print 'done.'
-
-
-def pip_install(*args):
- run_command(['tools/with_venv.sh',
- 'pip', 'install', '--upgrade'] + list(args),
- redirect_output=False)
-
-
-def install_dependencies(venv=VENV):
- print 'Installing dependencies with pip (this can take a while)...'
-
- pip_install('pip')
-
- pip_install('-r', PIP_REQUIRES)
- pip_install('-r', TEST_REQUIRES)
-
- # Tell the virtual env how to "import glance"
- py_ver = _detect_python_version(venv)
- pthfile = os.path.join(venv, "lib", py_ver,
- "site-packages", "balancer.pth")
- f = open(pthfile, 'w')
- f.write("%s\n" % ROOT)
-
-
-def _detect_python_version(venv):
- lib_dir = os.path.join(venv, "lib")
- for pathname in os.listdir(lib_dir):
- if pathname.startswith('python'):
- return pathname
- raise Exception('Unable to detect Python version')
-
-
-def print_help():
- help = """
- Glance development environment setup is complete.
-
- Glance development uses virtualenv to track and manage Python dependencies
- while in development and testing.
-
- To activate the Glance virtualenv for the extent of your current shell session
- you can run:
-
- $ source .venv/bin/activate
-
- Or, if you prefer, you can run commands in the virtualenv on a case by case
- basis by running:
-
- $ tools/with_venv.sh
-
- Also, make test will automatically use the virtualenv.
- """
- print help
-
-
-def main(argv):
- check_dependencies()
- create_virtualenv()
- install_dependencies()
- print_help()
-
-if __name__ == '__main__':
- main(sys.argv)
diff --git a/windc/tools/pip-requires b/windc/tools/pip-requires
deleted file mode 100644
index 20109d9f..00000000
--- a/windc/tools/pip-requires
+++ /dev/null
@@ -1,23 +0,0 @@
-# The greenlet package must be compiled with gcc and needs
-# the Python.h headers. Make sure you install the python-dev
-# package to get the right headers...
-greenlet>=0.3.1
-
-SQLAlchemy<=0.7.9
-anyjson
-eventlet>=0.9.12
-PasteDeploy
-Routes
-webob==1.0.8
-wsgiref
-argparse
-sqlalchemy-migrate>=0.7.2
-httplib2
-kombu
-iso8601>=0.1.4
-PyChef
-# For paste.util.template used in keystone.common.template
-Paste
-
-passlib
-puka
diff --git a/windc/tools/test-requires b/windc/tools/test-requires
deleted file mode 100644
index 0e7a44da..00000000
--- a/windc/tools/test-requires
+++ /dev/null
@@ -1,16 +0,0 @@
-# Packages needed for dev testing
-# For translations processing
-Babel
-
-# Needed for testing
-unittest2
-mock==0.8.0
-nose
-nose-exclude
-nosexcover
-#openstack.nose_plugin
-pep8==1.0.1
-sphinx>=1.1.2
-paramiko
-ipaddr
-pysqlite
diff --git a/windc/windc/api/v1/__init__.py b/windc/windc/api/v1/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/windc/windc/api/v1/datacenters.py b/windc/windc/api/v1/datacenters.py
deleted file mode 100644
index 9e050623..00000000
--- a/windc/windc/api/v1/datacenters.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import logging
-
-from openstack.common import wsgi
-
-from windc import utils
-from windc.core import api as core_api
-from windc.db import api as db_api
-
-LOG = logging.getLogger(__name__)
-
-
-class Datacenters_Controller(object):
- def __init__(self, conf):
- LOG.debug("Creating data centers controller with config:"
- "datacenters.py %s", conf)
- self.conf = conf
-
- @utils.verify_tenant
- def index(self, req, tenant_id):
- LOG.debug("Got index request. Request: %s", req)
- result = core_api.dc_get_index(self.conf, tenant_id)
- LOG.debug("Got list of datacenters: %s", result)
- return {'datacenters': result}
-
- @utils.http_success_code(202)
- @utils.verify_tenant
- def create(self, req, tenant_id, body):
- LOG.debug("Got create request. Request: %s", req)
- #here we need to decide which device should be used
- params = body.copy()
- LOG.debug("Headers: %s", req.headers)
- # We need to create DataCenter object and return its id
- params['tenant_id'] = tenant_id
- dc_id = core_api.create_dc(self.conf, params)
- return {'datacenter': {'id': dc_id}}
-
- @utils.verify_tenant
- def delete(self, req, tenant_id, datacenter_id):
- LOG.debug("Got delete request. Request: %s", req)
- core_api.delete_dc(self.conf, tenant_id, datacenter_id)
-
- @utils.verify_tenant
- def show(self, req, tenant_id, datacenter_id):
- LOG.debug("Got datacenter info request. Request: %s", req)
- result = core_api.dc_get_data(self.conf, tenant_id, datacenter_id)
- return {'datacenter': result}
-
- @utils.verify_tenant
- def update(self, req, tenant_id, datacenter_id, body):
- LOG.debug("Got update request. Request: %s", req)
- core_api.update_dc(self.conf, tenant_id, datacenter_id, body)
- return {'datacenter': {'id': dc_id}}
-
-
-def create_resource(conf):
- """Datacenters resource factory method"""
- deserializer = wsgi.JSONRequestDeserializer()
- serializer = wsgi.JSONResponseSerializer()
- return wsgi.Resource(Datacenters_Controller(conf), deserializer, serializer)
diff --git a/windc/windc/api/v1/filters.py b/windc/windc/api/v1/filters.py
deleted file mode 100644
index 9212626a..00000000
--- a/windc/windc/api/v1/filters.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2012, Piston Cloud Computing, Inc.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-def validate(filter, value):
- return FILTER_FUNCTIONS.get(filter, lambda v: True)(value)
-
-
-def validate_int_in_range(min=0, max=None):
- def _validator(v):
- try:
- if max is None:
- return min <= int(v)
- return min <= int(v) <= max
- except ValueError:
- return False
- return _validator
-
-
-def validate_boolean(v):
- return v.lower() in ('none', 'true', 'false', '1', '0')
-
-
-FILTER_FUNCTIONS = {'size_max': validate_int_in_range(), # build validator
- 'size_min': validate_int_in_range(), # build validator
- 'min_ram': validate_int_in_range(), # build validator
- 'protected': validate_boolean,
- 'is_public': validate_boolean, }
diff --git a/windc/windc/api/v1/router.py b/windc/windc/api/v1/router.py
deleted file mode 100644
index 79acc716..00000000
--- a/windc/windc/api/v1/router.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import logging
-
-import routes
-
-from windc.api.v1 import datacenters
-from windc.api.v1 import services
-from openstack.common import wsgi
-
-
-LOG = logging.getLogger(__name__)
-
-
-class API(wsgi.Router):
-
- """WSGI router for windc v1 API requests."""
-
- def __init__(self, conf, **local_conf):
- self.conf = conf
- mapper = routes.Mapper()
- tenant_mapper = mapper.submapper(path_prefix="/{tenant_id}")
- datacenter_resource = datacenters.create_resource(self.conf)
- datacenter_collection = tenant_mapper.collection(
- "datacenters", "datacenter",
- controller=datacenter_resource,
- member_prefix="/{datacenter_id}",
- formatted=False)
- service_resource = services.create_resource(self.conf)
- service_collection = datacenter_collection.member.\
- collection('services','service',
- controller=service_resource,
- member_prefix="/{service_id}",
- formatted=False)
- service_collection.member.connect("/{status}",
- action="changeServiceStatus",
- conditions={'method': ["PUT"]})
- mapper.connect("/servicetypes",
- controller=datacenter_resource,
- action="show_servicetypes",
- conditions={'method': ["GET"]})
- super(API, self).__init__(mapper)
diff --git a/windc/windc/api/v1/services.py b/windc/windc/api/v1/services.py
deleted file mode 100644
index 0a05593b..00000000
--- a/windc/windc/api/v1/services.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import logging
-
-from openstack.common import wsgi
-
-from windc import utils
-from windc.core import api as core_api
-from windc.db import api as db_api
-
-LOG = logging.getLogger(__name__)
-
-
-class Services_Controller(object):
- def __init__(self, conf):
- LOG.debug("Creating services controller with config:"
- "services.py %s", conf)
- self.conf = conf
-
- @utils.verify_tenant
- def index(self, req, tenant_id, datacenter_id):
- LOG.debug("Got index request. Request: %s", req)
- result = core_api.service_get_index(self.conf, tenant_id,
- datacenter_id)
- return {'services': result}
-
- @utils.http_success_code(202)
- @utils.verify_tenant
- def create(self, req, tenant_id, datacenter_id, body):
- LOG.debug("Got create request. Request: %s", req)
- #here we need to decide which device should be used
- params = body.copy()
- LOG.debug("Headers: %s", req.headers)
- # We need to create Service object and return its id
- params['tenant_id'] = tenant_id
- params['datacenter_id'] = datacenter_id
- params['type'] = 'active_directory_service'
- service_id = core_api.create_service(self.conf, params)
- return {'service': {'id': service_id}}
-
- @utils.http_success_code(204)
- @utils.verify_tenant
- def delete(self, req, tenant_id, datacenter_id, service_id):
- LOG.debug("Got delete request. Request: %s", req)
- core_api.delete_service(self.conf, tenant_id,
- datacenter_id, service_id)
-
- @utils.verify_tenant
- def show(self, req, tenant_id, datacenter_id, service_id):
- LOG.debug("Got loadbalancerr info request. Request: %s", req)
- result = core_api.service_get_data(self.conf, tenant_id,
- datacenter_id, service_id)
- return {'service': result}
-
- @utils.http_success_code(202)
- @utils.verify_tenant
- def update(self, req, tenant_id, datacenter_id, service_id, body):
- LOG.debug("Got update request. Request: %s", req)
- core_api.update_service(self.conf, tenant_id, datacenter_id,
- service_id, body)
- return {'service': {'id': service_id}}
-
-
-def create_resource(conf):
- """Services resource factory method"""
- deserializer = wsgi.JSONRequestDeserializer()
- serializer = wsgi.JSONResponseSerializer()
- return wsgi.Resource(Services_Controller(conf), deserializer, serializer)
diff --git a/windc/windc/api/versions.py b/windc/windc/api/versions.py
deleted file mode 100644
index d6516a4b..00000000
--- a/windc/windc/api/versions.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Controller that returns information on the Glance API versions
-"""
-
-import httplib
-import json
-
-import webob.dec
-
-
-class Controller(object):
-
- """
- A controller that produces information on the Glance API versions.
- """
-
- def __init__(self, conf):
- self.conf = conf
-
- @webob.dec.wsgify
- def __call__(self, req):
- """Respond to a request for all OpenStack API versions."""
- version_objs = [
- {
- "id": "v1.0",
- "status": "CURRENT",
- "links": [
- {
- "rel": "self",
- "href": self.get_href(req)}]},
- {
- "id": "v1.1",
- "status": "SUPPORTED",
- "links": [
- {
- "rel": "self",
- "href": self.get_href(req)}]}]
-
- body = json.dumps(dict(versions=version_objs))
-
- response = webob.Response(request=req,
- status=httplib.MULTIPLE_CHOICES,
- content_type='application/json')
- response.body = body
-
- return response
-
- def get_href(self, req):
- return "%s/v1/" % req.host_url
diff --git a/windc/windc/common/cfg.py b/windc/windc/common/cfg.py
deleted file mode 100644
index c63cd157..00000000
--- a/windc/windc/common/cfg.py
+++ /dev/null
@@ -1,1135 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 Red Hat, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-r"""
-Configuration options which may be set on the command line or in config files.
-
-The schema for each option is defined using the Opt sub-classes e.g.
-
- common_opts = [
- cfg.StrOpt('bind_host',
- default='0.0.0.0',
- help='IP address to listen on'),
- cfg.IntOpt('bind_port',
- default=9292,
- help='Port number to listen on')
- ]
-
-Options can be strings, integers, floats, booleans, lists or 'multi strings':
-
- enabled_apis_opt = \
- cfg.ListOpt('enabled_apis',
- default=['ec2', 'osapi'],
- help='List of APIs to enable by default')
-
- DEFAULT_EXTENSIONS = [
- 'nova.api.openstack.contrib.standard_extensions'
- ]
- osapi_extension_opt = \
- cfg.MultiStrOpt('osapi_extension',
- default=DEFAULT_EXTENSIONS)
-
-Option schemas are registered with with the config manager at runtime, but
-before the option is referenced:
-
- class ExtensionManager(object):
-
- enabled_apis_opt = cfg.ListOpt(...)
-
- def __init__(self, conf):
- self.conf = conf
- self.conf.register_opt(enabled_apis_opt)
- ...
-
- def _load_extensions(self):
- for ext_factory in self.conf.osapi_extension:
- ....
-
-A common usage pattern is for each option schema to be defined in the module or
-class which uses the option:
-
- opts = ...
-
- def add_common_opts(conf):
- conf.register_opts(opts)
-
- def get_bind_host(conf):
- return conf.bind_host
-
- def get_bind_port(conf):
- return conf.bind_port
-
-An option may optionally be made available via the command line. Such options
-must registered with the config manager before the command line is parsed (for
-the purposes of --help and CLI arg validation):
-
- cli_opts = [
- cfg.BoolOpt('verbose',
- short='v',
- default=False,
- help='Print more verbose output'),
- cfg.BoolOpt('debug',
- short='d',
- default=False,
- help='Print debugging output'),
- ]
-
- def add_common_opts(conf):
- conf.register_cli_opts(cli_opts)
-
-The config manager has a single CLI option defined by default, --config-file:
-
- class ConfigOpts(object):
-
- config_file_opt = \
- MultiStrOpt('config-file',
- ...
-
- def __init__(self, ...):
- ...
- self.register_cli_opt(self.config_file_opt)
-
-Option values are parsed from any supplied config files using SafeConfigParser.
-If none are specified, a default set is used e.g. balancer-api.conf and
-balancer-common.conf:
-
- balancer-api.conf:
- [DEFAULT]
- bind_port = 9292
-
- balancer-common.conf:
- [DEFAULT]
- bind_host = 0.0.0.0
-
-Option values in config files override those on the command line. Config files
-are parsed in order, with values in later files overriding those in earlier
-files.
-
-The parsing of CLI args and config files is initiated by invoking the config
-manager e.g.
-
- conf = ConfigOpts()
- conf.register_opt(BoolOpt('verbose', ...))
- conf(sys.argv[1:])
- if conf.verbose:
- ...
-
-Options can be registered as belonging to a group:
-
- rabbit_group = cfg.OptionGroup(name='rabbit',
- title='RabbitMQ options')
-
- rabbit_host_opt = \
- cfg.StrOpt('host',
- group='rabbit',
- default='localhost',
- help='IP/hostname to listen on'),
- rabbit_port_opt = \
- cfg.IntOpt('port',
- default=5672,
- help='Port number to listen on')
- rabbit_ssl_opt = \
- conf.BoolOpt('use_ssl',
- default=False,
- help='Whether to support SSL connections')
-
- def register_rabbit_opts(conf):
- conf.register_group(rabbit_group)
- # options can be registered under a group in any of these ways:
- conf.register_opt(rabbit_host_opt)
- conf.register_opt(rabbit_port_opt, group='rabbit')
- conf.register_opt(rabbit_ssl_opt, group=rabbit_group)
-
-If no group is specified, options belong to the 'DEFAULT' section of config
-files:
-
- balancer-api.conf:
- [DEFAULT]
- bind_port = 9292
- ...
-
- [rabbit]
- host = localhost
- port = 5672
- use_ssl = False
- userid = guest
- password = guest
- virtual_host = /
-
-Command-line options in a group are automatically prefixed with the group name:
-
- --rabbit-host localhost --rabbit-use-ssl False
-
-Option values in the default group are referenced as attributes/properties on
-the config manager; groups are also attributes on the config manager, with
-attributes for each of the options associated with the group:
-
- server.start(app, conf.bind_port, conf.bind_host, conf)
-
- self.connection = kombu.connection.BrokerConnection(
- hostname=conf.rabbit.host,
- port=conf.rabbit.port,
- ...)
-
-Option values may reference other values using PEP 292 string substitution:
-
- opts = [
- cfg.StrOpt('state_path',
- default=os.path.join(os.path.dirname(__file__), '../'),
- help='Top-level directory for maintaining nova state'),
- cfg.StrOpt('sqlite_db',
- default='nova.sqlite',
- help='file name for sqlite'),
- cfg.StrOpt('sql_connection',
- default='sqlite:///$state_path/$sqlite_db',
- help='connection string for sql database'),
- ]
-
-Note that interpolation can be avoided by using '$$'.
-"""
-
-import sys
-import ConfigParser
-import copy
-import optparse
-import os
-import string
-
-
-class Error(Exception):
- """Base class for cfg exceptions."""
-
- def __init__(self, msg=None):
- self.msg = msg
-
- def __str__(self):
- return self.msg
-
-
-class ArgsAlreadyParsedError(Error):
- """Raised if a CLI opt is registered after parsing."""
-
- def __str__(self):
- ret = "arguments already parsed"
- if self.msg:
- ret += ": " + self.msg
- return ret
-
-
-class NoSuchOptError(Error):
- """Raised if an opt which doesn't exist is referenced."""
-
- def __init__(self, opt_name, group=None):
- self.opt_name = opt_name
- self.group = group
-
- def __str__(self):
- if self.group is None:
- return "no such option: %s" % self.opt_name
- else:
- return "no such option in group %s: %s" % (self.group.name,
- self.opt_name)
-
-
-class NoSuchGroupError(Error):
- """Raised if a group which doesn't exist is referenced."""
-
- def __init__(self, group_name):
- self.group_name = group_name
-
- def __str__(self):
- return "no such group: %s" % self.group_name
-
-
-class DuplicateOptError(Error):
- """Raised if multiple opts with the same name are registered."""
-
- def __init__(self, opt_name):
- self.opt_name = opt_name
-
- def __str__(self):
- return "duplicate option: %s" % self.opt_name
-
-
-class TemplateSubstitutionError(Error):
- """Raised if an error occurs substituting a variable in an opt value."""
-
- def __str__(self):
- return "template substitution error: %s" % self.msg
-
-
-class ConfigFilesNotFoundError(Error):
- """Raised if one or more config files are not found."""
-
- def __init__(self, config_files):
- self.config_files = config_files
-
- def __str__(self):
- return 'Failed to read some config files: %s' % \
- string.join(self.config_files, ',')
-
-
-class ConfigFileParseError(Error):
- """Raised if there is an error parsing a config file."""
-
- def __init__(self, config_file, msg):
- self.config_file = config_file
- self.msg = msg
-
- def __str__(self):
- return 'Failed to parse %s: %s' % (self.config_file, self.msg)
-
-
-class ConfigFileValueError(Error):
- """Raised if a config file value does not match its opt type."""
- pass
-
-
-def find_config_files(project=None, prog=None, filetype="conf"):
- """Return a list of default configuration files.
-
- We default to two config files: [${project}.conf, ${prog}.conf]
-
- And we look for those config files in the following directories:
-
- ~/.${project}/
- ~/
- /etc/${project}/
- /etc/
-
- We return an absolute path for (at most) one of each the default config
- files, for the topmost directory it exists in.
-
- For example, if project=foo, prog=bar and /etc/foo/foo.conf, /etc/bar.conf
- and ~/.foo/bar.conf all exist, then we return ['/etc/foo/foo.conf',
- '~/.foo/bar.conf']
-
- If no project name is supplied, we only look for ${prog.conf}.
-
- :param project: an optional project name
- :param prog: the program name, defaulting to the basename of sys.argv[0]
- """
- if prog is None:
- prog = os.path.basename(sys.argv[0])
-
- fix_path = lambda p: os.path.abspath(os.path.expanduser(p))
-
- cfg_dirs = [
- fix_path(os.path.join('~', '.' + project)) if project else None,
- fix_path('~'),
- os.path.join('/etc', project) if project else None,
- '/etc',
- 'etc',
- ]
- cfg_dirs = filter(bool, cfg_dirs)
-
- def search_dirs(dirs, basename):
- for d in dirs:
- path = os.path.join(d, basename)
- if os.path.exists(path):
- return path
-
- config_files = []
-
- if project:
- project_config = search_dirs(cfg_dirs, '%s.%s' % (project, filetype))
- config_files.append(project_config)
-
- config_files.append(search_dirs(cfg_dirs, '%s.%s' % (prog, filetype)))
-
- return filter(bool, config_files)
-
-
-def _is_opt_registered(opts, opt):
- """Check whether an opt with the same name is already registered.
-
- The same opt may be registered multiple times, with only the first
- registration having any effect. However, it is an error to attempt
- to register a different opt with the same name.
-
- :param opts: the set of opts already registered
- :param opt: the opt to be registered
- :returns: True if the opt was previously registered, False otherwise
- :raises: DuplicateOptError if a naming conflict is detected
- """
- if opt.dest in opts:
- if opts[opt.dest]['opt'] is not opt:
- raise DuplicateOptError(opt.name)
- return True
- else:
- return False
-
-
-class Opt(object):
-
- """Base class for all configuration options.
-
- An Opt object has no public methods, but has a number of public string
- properties:
-
- name:
- the name of the option, which may include hyphens
- dest:
- the (hyphen-less) ConfigOpts property which contains the option value
- short:
- a single character CLI option name
- default:
- the default value of the option
- metavar:
- the name shown as the argument to a CLI option in --help output
- help:
- an string explaining how the options value is used
- """
-
- def __init__(self, name, dest=None, short=None,
- default=None, metavar=None, help=None):
- """Construct an Opt object.
-
- The only required parameter is the option's name. However, it is
- common to also supply a default and help string for all options.
-
- :param name: the option's name
- :param dest: the name of the corresponding ConfigOpts property
- :param short: a single character CLI option name
- :param default: the default value of the option
- :param metavar: the option argument to show in --help
- :param help: an explanation of how the option is used
- """
- self.name = name
- if dest is None:
- self.dest = self.name.replace('-', '_')
- else:
- self.dest = dest
- self.short = short
- self.default = default
- self.metavar = metavar
- self.help = help
-
- def _get_from_config_parser(self, cparser, section):
- """Retrieves the option value from a ConfigParser object.
-
- This is the method ConfigOpts uses to look up the option value from
- config files. Most opt types override this method in order to perform
- type appropriate conversion of the returned value.
-
- :param cparser: a ConfigParser object
- :param section: a section name
- """
- return cparser.get(section, self.dest)
-
- def _add_to_cli(self, parser, group=None):
- """Makes the option available in the command line interface.
-
- This is the method ConfigOpts uses to add the opt to the CLI interface
- as appropriate for the opt type. Some opt types may extend this method,
- others may just extend the helper methods it uses.
-
- :param parser: the CLI option parser
- :param group: an optional OptGroup object
- """
- container = self._get_optparse_container(parser, group)
- kwargs = self._get_optparse_kwargs(group)
- prefix = self._get_optparse_prefix('', group)
- self._add_to_optparse(container, self.name, self.short, kwargs, prefix)
-
- def _add_to_optparse(self, container, name, short, kwargs, prefix=''):
- """Add an option to an optparse parser or group.
-
- :param container: an optparse.OptionContainer object
- :param name: the opt name
- :param short: the short opt name
- :param kwargs: the keyword arguments for add_option()
- :param prefix: an optional prefix to prepend to the opt name
- :raises: DuplicateOptError if a naming confict is detected
- """
- args = ['--' + prefix + name]
- if short:
- args += ['-' + short]
- for a in args:
- if container.has_option(a):
- raise DuplicateOptError(a)
- container.add_option(*args, **kwargs)
-
- def _get_optparse_container(self, parser, group):
- """Returns an optparse.OptionContainer.
-
- :param parser: an optparse.OptionParser
- :param group: an (optional) OptGroup object
- :returns: an optparse.OptionGroup if a group is given, else the parser
- """
- if group is not None:
- return group._get_optparse_group(parser)
- else:
- return parser
-
- def _get_optparse_kwargs(self, group, **kwargs):
- """Build a dict of keyword arguments for optparse's add_option().
-
- Most opt types extend this method to customize the behaviour of the
- options added to optparse.
-
- :param group: an optional group
- :param kwargs: optional keyword arguments to add to
- :returns: a dict of keyword arguments
- """
- dest = self.dest
- if group is not None:
- dest = group.name + '_' + dest
- kwargs.update({
- 'dest': dest,
- 'metavar': self.metavar,
- 'help': self.help,
- })
- return kwargs
-
- def _get_optparse_prefix(self, prefix, group):
- """Build a prefix for the CLI option name, if required.
-
- CLI options in a group are prefixed with the group's name in order
- to avoid conflicts between similarly named options in different
- groups.
-
- :param prefix: an existing prefix to append to (e.g. 'no' or '')
- :param group: an optional OptGroup object
- :returns: a CLI option prefix including the group name, if appropriate
- """
- if group is not None:
- return group.name + '-' + prefix
- else:
- return prefix
-
-
-class StrOpt(Opt):
- """
- String opts do not have their values transformed and are returned as
- str objects.
- """
- pass
-
-
-class BoolOpt(Opt):
-
- """
- Bool opts are set to True or False on the command line using --optname or
- --noopttname respectively.
-
- In config files, boolean values are case insensitive and can be set using
- 1/0, yes/no, true/false or on/off.
- """
-
- def _get_from_config_parser(self, cparser, section):
- """Retrieve the opt value as a boolean from ConfigParser."""
- return cparser.getboolean(section, self.dest)
-
- def _add_to_cli(self, parser, group=None):
- """Extends the base class method to add the --nooptname option."""
- super(BoolOpt, self)._add_to_cli(parser, group)
- self._add_inverse_to_optparse(parser, group)
-
- def _add_inverse_to_optparse(self, parser, group):
- """Add the --nooptname option to the option parser."""
- container = self._get_optparse_container(parser, group)
- kwargs = self._get_optparse_kwargs(group, action='store_false')
- prefix = self._get_optparse_prefix('no', group)
- kwargs["help"] = "The inverse of --" + self.name
- self._add_to_optparse(container, self.name, None, kwargs, prefix)
-
- def _get_optparse_kwargs(self, group, action='store_true', **kwargs):
- """Extends the base optparse keyword dict for boolean options."""
- return super(BoolOpt,
- self)._get_optparse_kwargs(group, action=action, **kwargs)
-
-
-class IntOpt(Opt):
-
- """Int opt values are converted to integers using the int() builtin."""
-
- def _get_from_config_parser(self, cparser, section):
- """Retrieve the opt value as a integer from ConfigParser."""
- return cparser.getint(section, self.dest)
-
- def _get_optparse_kwargs(self, group, **kwargs):
- """Extends the base optparse keyword dict for integer options."""
- return super(IntOpt,
- self)._get_optparse_kwargs(group, type='int', **kwargs)
-
-
-class FloatOpt(Opt):
-
- """Float opt values are converted to floats using the float() builtin."""
-
- def _get_from_config_parser(self, cparser, section):
- """Retrieve the opt value as a float from ConfigParser."""
- return cparser.getfloat(section, self.dest)
-
- def _get_optparse_kwargs(self, group, **kwargs):
- """Extends the base optparse keyword dict for float options."""
- return super(FloatOpt,
- self)._get_optparse_kwargs(group, type='float', **kwargs)
-
-
-class ListOpt(Opt):
-
- """
- List opt values are simple string values separated by commas. The opt value
- is a list containing these strings.
- """
-
- def _get_from_config_parser(self, cparser, section):
- """Retrieve the opt value as a list from ConfigParser."""
- return cparser.get(section, self.dest).split(',')
-
- def _get_optparse_kwargs(self, group, **kwargs):
- """Extends the base optparse keyword dict for list options."""
- return super(ListOpt,
- self)._get_optparse_kwargs(group,
- type='string',
- action='callback',
- callback=self._parse_list,
- **kwargs)
-
- def _parse_list(self, option, opt, value, parser):
- """An optparse callback for parsing an option value into a list."""
- setattr(parser.values, self.dest, value.split(','))
-
-
-class MultiStrOpt(Opt):
-
- """
- Multistr opt values are string opts which may be specified multiple times.
- The opt value is a list containing all the string values specified.
- """
-
- def _get_from_config_parser(self, cparser, section):
- """Retrieve the opt value as a multistr from ConfigParser."""
- # FIXME(markmc): values spread across the CLI and multiple
- # config files should be appended
- value = \
- super(MultiStrOpt, self)._get_from_config_parser(cparser, section)
- return value if value is None else [value]
-
- def _get_optparse_kwargs(self, group, **kwargs):
- """Extends the base optparse keyword dict for multi str options."""
- return super(MultiStrOpt,
- self)._get_optparse_kwargs(group, action='append')
-
-
-class OptGroup(object):
-
- """
- Represents a group of opts.
-
- CLI opts in the group are automatically prefixed with the group name.
-
- Each group corresponds to a section in config files.
-
- An OptGroup object has no public methods, but has a number of public string
- properties:
-
- name:
- the name of the group
- title:
- the group title as displayed in --help
- help:
- the group description as displayed in --help
- """
-
- def __init__(self, name, title=None, help=None):
- """Constructs an OptGroup object.
-
- :param name: the group name
- :param title: the group title for --help
- :param help: the group description for --help
- """
- self.name = name
- if title is None:
- self.title = "%s options" % title
- else:
- self.title = title
- self.help = help
-
- self._opts = {} # dict of dicts of {opt:, override:, default:)
- self._optparse_group = None
-
- def _register_opt(self, opt):
- """Add an opt to this group.
-
- :param opt: an Opt object
- :returns: False if previously registered, True otherwise
- :raises: DuplicateOptError if a naming conflict is detected
- """
- if _is_opt_registered(self._opts, opt):
- return False
-
- self._opts[opt.dest] = {'opt': opt, 'override': None, 'default': None}
-
- return True
-
- def _get_optparse_group(self, parser):
- """Build an optparse.OptionGroup for this group."""
- if self._optparse_group is None:
- self._optparse_group = \
- optparse.OptionGroup(parser, self.title, self.help)
- return self._optparse_group
-
-
-class ConfigOpts(object):
-
- """
- Config options which may be set on the command line or in config files.
-
- ConfigOpts is a configuration option manager with APIs for registering
- option schemas, grouping options, parsing option values and retrieving
- the values of options.
- """
-
- def __init__(self,
- project=None,
- prog=None,
- version=None,
- usage=None,
- default_config_files=None):
- """Construct a ConfigOpts object.
-
- Automatically registers the --config-file option with either a supplied
- list of default config files, or a list from find_config_files().
-
- :param project: the toplevel project name, used to locate config files
- :param prog: the name of the program (defaults to sys.argv[0] basename)
- :param version: the program version (for --version)
- :param usage: a usage string (%prog will be expanded)
- :param default_config_files: config files to use by default
- """
- if prog is None:
- prog = os.path.basename(sys.argv[0])
-
- if default_config_files is None:
- default_config_files = find_config_files(project, prog)
-
- self.project = project
- self.prog = prog
- self.version = version
- self.usage = usage
- self.default_config_files = default_config_files
-
- self._opts = {} # dict of dicts of (opt:, override:, default:)
- self._groups = {}
-
- self._args = None
- self._cli_values = {}
-
- self._oparser = optparse.OptionParser(prog=self.prog,
- version=self.version,
- usage=self.usage)
- self._cparser = None
-
- self.register_cli_opt(\
- MultiStrOpt('config-file',
- default=self.default_config_files,
- metavar='PATH',
- help='Path to a config file to use. Multiple config '
- 'files can be specified, with values in later '
- 'files taking precedence. The default files used '
- 'are: %s' % (self.default_config_files, )))
-
- def __call__(self, args=None):
- """Parse command line arguments and config files.
-
- Calling a ConfigOpts object causes the supplied command line arguments
- and config files to be parsed, causing opt values to be made available
- as attributes of the object.
-
- The object may be called multiple times, each time causing the previous
- set of values to be overwritten.
-
- :params args: command line arguments (defaults to sys.argv[1:])
- :returns: the list of arguments left over after parsing options
- :raises: SystemExit, ConfigFilesNotFoundError, ConfigFileParseError
- """
- self.reset()
-
- self._args = args
-
- (values, args) = self._oparser.parse_args(self._args)
-
- self._cli_values = vars(values)
-
- if self.config_file:
- self._parse_config_files(self.config_file)
-
- return args
-
- def __getattr__(self, name):
- """Look up an option value and perform string substitution.
-
- :param name: the opt name (or 'dest', more precisely)
- :returns: the option value (after string subsititution) or a GroupAttr
- :raises: NoSuchOptError,ConfigFileValueError,TemplateSubstitutionError
- """
- return self._substitute(self._get(name))
-
- def reset(self):
- """Reset the state of the object to before it was called."""
- self._args = None
- self._cli_values = None
- self._cparser = None
-
- def register_opt(self, opt, group=None):
- """Register an option schema.
-
- Registering an option schema makes any option value which is previously
- or subsequently parsed from the command line or config files available
- as an attribute of this object.
-
- :param opt: an instance of an Opt sub-class
- :param group: an optional OptGroup object or group name
- :return: False if the opt was already register, True otherwise
- :raises: DuplicateOptError
- """
- if group is not None:
- return self._get_group(group)._register_opt(opt)
-
- if _is_opt_registered(self._opts, opt):
- return False
-
- self._opts[opt.dest] = {'opt': opt, 'override': None, 'default': None}
-
- return True
-
- def register_opts(self, opts, group=None):
- """Register multiple option schemas at once."""
- for opt in opts:
- self.register_opt(opt, group)
-
- def register_cli_opt(self, opt, group=None):
- """Register a CLI option schema.
-
- CLI option schemas must be registered before the command line and
- config files are parsed. This is to ensure that all CLI options are
- show in --help and option validation works as expected.
-
- :param opt: an instance of an Opt sub-class
- :param group: an optional OptGroup object or group name
- :return: False if the opt was already register, True otherwise
- :raises: DuplicateOptError, ArgsAlreadyParsedError
- """
- if self._args != None:
- raise ArgsAlreadyParsedError("cannot register CLI option")
-
- if not self.register_opt(opt, group):
- return False
-
- if group is not None:
- group = self._get_group(group)
-
- opt._add_to_cli(self._oparser, group)
-
- return True
-
- def register_cli_opts(self, opts, group=None):
- """Register multiple CLI option schemas at once."""
- for opt in opts:
- self.register_cli_opt(opt, group)
-
- def register_group(self, group):
- """Register an option group.
-
- An option group must be registered before options can be registered
- with the group.
-
- :param group: an OptGroup object
- """
- if group.name in self._groups:
- return
-
- self._groups[group.name] = copy.copy(group)
-
- def set_override(self, name, override, group=None):
- """Override an opt value.
-
- Override the command line, config file and default values of a
- given option.
-
- :param name: the name/dest of the opt
- :param override: the override value
- :param group: an option OptGroup object or group name
- :raises: NoSuchOptError, NoSuchGroupError
- """
- opt_info = self._get_opt_info(name, group)
- opt_info['override'] = override
-
- def set_default(self, name, default, group=None):
- """Override an opt's default value.
-
- Override the default value of given option. A command line or
- config file value will still take precedence over this default.
-
- :param name: the name/dest of the opt
- :param default: the default value
- :param group: an option OptGroup object or group name
- :raises: NoSuchOptError, NoSuchGroupError
- """
- opt_info = self._get_opt_info(name, group)
- opt_info['default'] = default
-
- def log_opt_values(self, logger, lvl):
- """Log the value of all registered opts.
-
- It's often useful for an app to log its configuration to a log file at
- startup for debugging. This method dumps to the entire config state to
- the supplied logger at a given log level.
-
- :param logger: a logging.Logger object
- :param lvl: the log level (e.g. logging.DEBUG) arg to logger.log()
- """
- logger.log(lvl, "*" * 80)
- logger.log(lvl, "Configuration options gathered from:")
- logger.log(lvl, "command line args: %s", self._args)
- logger.log(lvl, "config files: %s", self.config_file)
- logger.log(lvl, "=" * 80)
-
- for opt_name in sorted(self._opts):
- logger.log(lvl, "%-30s = %s", opt_name, getattr(self, opt_name))
-
- for group_name in self._groups:
- group_attr = self.GroupAttr(self, group_name)
- for opt_name in sorted(self._groups[group_name]._opts):
- logger.log(lvl, "%-30s = %s",
- "%s.%s" % (group_name, opt_name),
- getattr(group_attr, opt_name))
-
- logger.log(lvl, "*" * 80)
-
- def print_usage(self, file=None):
- """Print the usage message for the current program."""
- self._oparser.print_usage(file)
-
- def _get(self, name, group=None):
- """Look up an option value.
-
- :param name: the opt name (or 'dest', more precisely)
- :param group: an option OptGroup
- :returns: the option value, or a GroupAttr object
- :raises: NoSuchOptError, NoSuchGroupError, ConfigFileValueError,
- TemplateSubstitutionError
- """
- if group is None and name in self._groups:
- return self.GroupAttr(self, name)
-
- if group is not None:
- group = self._get_group(group)
-
- info = self._get_opt_info(name, group)
- default, opt, override = map(lambda k: info[k], sorted(info.keys()))
-
- if override is not None:
- return override
-
- if self._cparser is not None:
- section = group.name if group is not None else 'DEFAULT'
- try:
- return opt._get_from_config_parser(self._cparser, section)
- except (ConfigParser.NoOptionError,
- ConfigParser.NoSectionError):
- pass
- except ValueError, ve:
- raise ConfigFileValueError(str(ve))
-
- name = name if group is None else group.name + '_' + name
- value = self._cli_values.get(name, None)
- if value is not None:
- return value
-
- if default is not None:
- return default
-
- return opt.default
-
- def _substitute(self, value):
- """Perform string template substitution.
-
- Substititue any template variables (e.g. $foo, ${bar}) in the supplied
- string value(s) with opt values.
-
- :param value: the string value, or list of string values
- :returns: the substituted string(s)
- """
- if isinstance(value, list):
- return [self._substitute(i) for i in value]
- elif isinstance(value, str):
- tmpl = string.Template(value)
- return tmpl.safe_substitute(self.StrSubWrapper(self))
- else:
- return value
-
- def _get_group(self, group_or_name):
- """Looks up a OptGroup object.
-
- Helper function to return an OptGroup given a parameter which can
- either be the group's name or an OptGroup object.
-
- The OptGroup object returned is from the internal dict of OptGroup
- objects, which will be a copy of any OptGroup object that users of
- the API have access to.
-
- :param group_or_name: the group's name or the OptGroup object itself
- :raises: NoSuchGroupError
- """
- if isinstance(group_or_name, OptGroup):
- group_name = group_or_name.name
- else:
- group_name = group_or_name
-
- if not group_name in self._groups:
- raise NoSuchGroupError(group_name)
-
- return self._groups[group_name]
-
- def _get_opt_info(self, opt_name, group=None):
- """Return the (opt, override, default) dict for an opt.
-
- :param opt_name: an opt name/dest
- :param group: an optional group name or OptGroup object
- :raises: NoSuchOptError, NoSuchGroupError
- """
- if group is None:
- opts = self._opts
- else:
- group = self._get_group(group)
- opts = group._opts
-
- if not opt_name in opts:
- raise NoSuchOptError(opt_name, group)
-
- return opts[opt_name]
-
- def _parse_config_files(self, config_files):
- """Parse the supplied configuration files.
-
- :raises: ConfigFilesNotFoundError, ConfigFileParseError
- """
- self._cparser = ConfigParser.SafeConfigParser()
-
- try:
- read_ok = self._cparser.read(config_files)
- except ConfigParser.ParsingError, cpe:
- raise ConfigFileParseError(cpe.filename, cpe.message)
-
- if read_ok != config_files:
- not_read_ok = filter(lambda f: f not in read_ok, config_files)
- raise ConfigFilesNotFoundError(not_read_ok)
-
- class GroupAttr(object):
-
- """
- A helper class representing the option values of a group as attributes.
- """
-
- def __init__(self, conf, group):
- """Construct a GroupAttr object.
-
- :param conf: a ConfigOpts object
- :param group: a group name or OptGroup object
- """
- self.conf = conf
- self.group = group
-
- def __getattr__(self, name):
- """Look up an option value and perform template substitution."""
- return self.conf._substitute(self.conf._get(name, self.group))
-
- class StrSubWrapper(object):
-
- """
- A helper class exposing opt values as a dict for string substitution.
- """
-
- def __init__(self, conf):
- """Construct a StrSubWrapper object.
-
- :param conf: a ConfigOpts object
- """
- self.conf = conf
-
- def __getitem__(self, key):
- """Look up an opt value from the ConfigOpts object.
-
- :param key: an opt name
- :returns: an opt value
- :raises: TemplateSubstitutionError if attribute is a group
- """
- value = getattr(self.conf, key)
- if isinstance(value, self.conf.GroupAttr):
- raise TemplateSubstitutionError(
- 'substituting group %s not supported' % key)
- return value
-
-
-class CommonConfigOpts(ConfigOpts):
-
- DEFAULT_LOG_FORMAT = ('%(asctime)s %(process)d %(levelname)8s '
- '[%(name)s] %(message)s')
- DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
-
- common_cli_opts = [
- BoolOpt('debug',
- short='d',
- default=False,
- help='Print debugging output'),
- BoolOpt('verbose',
- short='v',
- default=False,
- help='Print more verbose output'),
- ]
-
- logging_cli_opts = [
- StrOpt('log-config',
- metavar='PATH',
- help='If this option is specified, the logging configuration '
- 'file specified is used and overrides any other logging '
- 'options specified. Please see the Python logging module '
- 'documentation for details on logging configuration '
- 'files.'),
- StrOpt('log-format',
- default=DEFAULT_LOG_FORMAT,
- metavar='FORMAT',
- help='A logging.Formatter log message format string which may '
- 'use any of the available logging.LogRecord attributes. '
- 'Default: %default'),
- StrOpt('log-date-format',
- default=DEFAULT_LOG_DATE_FORMAT,
- metavar='DATE_FORMAT',
- help='Format string for %(asctime)s in log records. '
- 'Default: %default'),
- StrOpt('log-file',
- metavar='PATH',
- help='(Optional) Name of log file to output to. '
- 'If not set, logging will go to stdout.'),
- StrOpt('log-dir',
- help='(Optional) The directory to keep log files in '
- '(will be prepended to --logfile)'),
- BoolOpt('use-syslog',
- default=False,
- help='Use syslog for logging.'),
- StrOpt('syslog-log-facility',
- default='LOG_USER',
- help='syslog facility to receive log lines')
- ]
-
- def __init__(self, **kwargs):
- super(CommonConfigOpts, self).__init__(**kwargs)
- self.register_cli_opts(self.common_cli_opts)
- self.register_cli_opts(self.logging_cli_opts)
diff --git a/windc/windc/common/client.py b/windc/windc/common/client.py
deleted file mode 100644
index 6f383a3d..00000000
--- a/windc/windc/common/client.py
+++ /dev/null
@@ -1,605 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010-2011 OpenStack, LLC
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# HTTPSClientAuthConnection code comes courtesy of ActiveState website:
-# http://code.activestate.com/recipes/
-# 577548-https-httplib-client-connection-with-certificate-v/
-
-import collections
-import errno
-import functools
-import httplib
-import os
-import select
-import urllib
-import urlparse
-
-try:
- from eventlet.green import socket, ssl
-except ImportError:
- import socket
- import ssl
-
-try:
- import sendfile
- SENDFILE_SUPPORTED = True
-except ImportError:
- SENDFILE_SUPPORTED = False
-
-#from glance.common import auth
-#from glance.common import exception, utils
-
-
-# common chunk size for get and put
-CHUNKSIZE = 65536
-
-
-def handle_unauthorized(func):
- """
- Wrap a function to re-authenticate and retry.
- """
- @functools.wraps(func)
- def wrapped(self, *args, **kwargs):
- try:
- return func(self, *args, **kwargs)
- except exception.NotAuthorized:
- self._authenticate(force_reauth=True)
- return func(self, *args, **kwargs)
- return wrapped
-
-
-def handle_redirects(func):
- """
- Wrap the _do_request function to handle HTTP redirects.
- """
- MAX_REDIRECTS = 5
-
- @functools.wraps(func)
- def wrapped(self, method, url, body, headers):
- for _ in xrange(MAX_REDIRECTS):
- try:
- return func(self, method, url, body, headers)
- except exception.RedirectException as redirect:
- if redirect.url is None:
- raise exception.InvalidRedirect()
- url = redirect.url
- raise exception.MaxRedirectsExceeded(redirects=MAX_REDIRECTS)
- return wrapped
-
-
-class ImageBodyIterator(object):
-
- """
- A class that acts as an iterator over an image file's
- chunks of data. This is returned as part of the result
- tuple from `glance.client.Client.get_image`
- """
-
- def __init__(self, source):
- """
- Constructs the object from a readable image source
- (such as an HTTPResponse or file-like object)
- """
- self.source = source
-
- def __iter__(self):
- """
- Exposes an iterator over the chunks of data in the
- image file.
- """
- while True:
- chunk = self.source.read(CHUNKSIZE)
- if chunk:
- yield chunk
- else:
- break
-
-
-class SendFileIterator:
- """
- Emulate iterator pattern over sendfile, in order to allow
- send progress be followed by wrapping the iteration.
- """
- def __init__(self, connection, body):
- self.connection = connection
- self.body = body
- self.offset = 0
- self.sending = True
-
- def __iter__(self):
- class OfLength:
- def __init__(self, len):
- self.len = len
-
- def __len__(self):
- return self.len
-
- while self.sending:
- try:
- sent = sendfile.sendfile(self.connection.sock.fileno(),
- self.body.fileno(),
- self.offset,
- CHUNKSIZE)
- except OSError as e:
- # suprisingly, sendfile may fail transiently instead of
- # blocking, in which case we select on the socket in order
- # to wait on its return to a writeable state before resuming
- # the send loop
- if e.errno in (errno.EAGAIN, errno.EBUSY):
- wlist = [self.connection.sock.fileno()]
- rfds, wfds, efds = select.select([], wlist, [])
- if wfds:
- continue
- raise
-
- self.sending = (sent != 0)
- self.offset += sent
- yield OfLength(sent)
-
-
-class HTTPSClientAuthConnection(httplib.HTTPSConnection):
- """
- Class to make a HTTPS connection, with support for
- full client-based SSL Authentication
-
- :see http://code.activestate.com/recipes/
- 577548-https-httplib-client-connection-with-certificate-v/
- """
-
- def __init__(self, host, port, key_file, cert_file,
- ca_file, timeout=None, insecure=False):
- httplib.HTTPSConnection.__init__(self, host, port, key_file=key_file,
- cert_file=cert_file)
- self.key_file = key_file
- self.cert_file = cert_file
- self.ca_file = ca_file
- self.timeout = timeout
- self.insecure = insecure
-
- def connect(self):
- """
- Connect to a host on a given (SSL) port.
- If ca_file is pointing somewhere, use it to check Server Certificate.
-
- Redefined/copied and extended from httplib.py:1105 (Python 2.6.x).
- This is needed to pass cert_reqs=ssl.CERT_REQUIRED as parameter to
- ssl.wrap_socket(), which forces SSL to check server certificate against
- our client certificate.
- """
- sock = socket.create_connection((self.host, self.port), self.timeout)
- if self._tunnel_host:
- self.sock = sock
- self._tunnel()
- # Check CA file unless 'insecure' is specificed
- if self.insecure is True:
- self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
- cert_reqs=ssl.CERT_NONE)
- else:
- self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
- ca_certs=self.ca_file,
- cert_reqs=ssl.CERT_REQUIRED)
-
-
-class BaseClient(object):
-
- """A base client class"""
-
- DEFAULT_PORT = 80
- DEFAULT_DOC_ROOT = None
- # Standard CA file locations for Debian/Ubuntu, RedHat/Fedora,
- # Suse, FreeBSD/OpenBSD
- DEFAULT_CA_FILE_PATH = '/etc/ssl/certs/ca-certificates.crt:'\
- '/etc/pki/tls/certs/ca-bundle.crt:'\
- '/etc/ssl/ca-bundle.pem:'\
- '/etc/ssl/cert.pem'
-
- OK_RESPONSE_CODES = (
- httplib.OK,
- httplib.CREATED,
- httplib.ACCEPTED,
- httplib.NO_CONTENT,
- )
-
- REDIRECT_RESPONSE_CODES = (
- httplib.MOVED_PERMANENTLY,
- httplib.FOUND,
- httplib.SEE_OTHER,
- httplib.USE_PROXY,
- httplib.TEMPORARY_REDIRECT,
- )
-
- def __init__(self, host, port=None, use_ssl=False, auth_tok=None,
- creds=None, doc_root=None, key_file=None,
- cert_file=None, ca_file=None, insecure=False,
- configure_via_auth=True):
- """
- Creates a new client to some service.
-
- :param host: The host where service resides
- :param port: The port where service resides
- :param use_ssl: Should we use HTTPS?
- :param auth_tok: The auth token to pass to the server
- :param creds: The credentials to pass to the auth plugin
- :param doc_root: Prefix for all URLs we request from host
- :param key_file: Optional PEM-formatted file that contains the private
- key.
- If use_ssl is True, and this param is None (the
- default), then an environ variable
- GLANCE_CLIENT_KEY_FILE is looked for. If no such
- environ variable is found, ClientConnectionError
- will be raised.
- :param cert_file: Optional PEM-formatted certificate chain file.
- If use_ssl is True, and this param is None (the
- default), then an environ variable
- GLANCE_CLIENT_CERT_FILE is looked for. If no such
- environ variable is found, ClientConnectionError
- will be raised.
- :param ca_file: Optional CA cert file to use in SSL connections
- If use_ssl is True, and this param is None (the
- default), then an environ variable
- GLANCE_CLIENT_CA_FILE is looked for.
- :param insecure: Optional. If set then the server's certificate
- will not be verified.
- """
- self.host = host
- self.port = port or self.DEFAULT_PORT
- self.use_ssl = use_ssl
- self.auth_tok = auth_tok
- self.creds = creds or {}
- self.connection = None
- self.configure_via_auth = configure_via_auth
- # doc_root can be a nullstring, which is valid, and why we
- # cannot simply do doc_root or self.DEFAULT_DOC_ROOT below.
- self.doc_root = (doc_root if doc_root is not None
- else self.DEFAULT_DOC_ROOT)
- self.auth_plugin = self.make_auth_plugin(self.creds)
-
- self.key_file = key_file
- self.cert_file = cert_file
- self.ca_file = ca_file
- self.insecure = insecure
- self.connect_kwargs = self.get_connect_kwargs()
-
- def get_connect_kwargs(self):
- connect_kwargs = {}
- if self.use_ssl:
- if self.key_file is None:
- self.key_file = os.environ.get('BALANCER_CLIENT_KEY_FILE')
- if self.cert_file is None:
- self.cert_file = os.environ.get('BALANCER_CLIENT_CERT_FILE')
- if self.ca_file is None:
- self.ca_file = os.environ.get('BALANCER_CLIENT_CA_FILE')
-
- # Check that key_file/cert_file are either both set or both unset
- if self.cert_file is not None and self.key_file is None:
- msg = _("You have selected to use SSL in connecting, "
- "and you have supplied a cert, "
- "however you have failed to supply either a "
- "key_file parameter or set the "
- "BALANCER_CLIENT_KEY_FILE environ variable")
- raise exception.ClientConnectionError(msg)
-
- if self.key_file is not None and self.cert_file is None:
- msg = _("You have selected to use SSL in connecting, "
- "and you have supplied a key, "
- "however you have failed to supply either a "
- "cert_file parameter or set the "
- "BALANCER_CLIENT_CERT_FILE environ variable")
- raise exception.ClientConnectionError(msg)
-
- if (self.key_file is not None and
- not os.path.exists(self.key_file)):
- msg = _("The key file you specified %s does not "
- "exist") % self.key_file
- raise exception.ClientConnectionError(msg)
- connect_kwargs['key_file'] = self.key_file
-
- if (self.cert_file is not None and
- not os.path.exists(self.cert_file)):
- msg = _("The cert file you specified %s does not "
- "exist") % self.cert_file
- raise exception.ClientConnectionError(msg)
- connect_kwargs['cert_file'] = self.cert_file
-
- if (self.ca_file is not None and
- not os.path.exists(self.ca_file)):
- msg = _("The CA file you specified %s does not "
- "exist") % self.ca_file
- raise exception.ClientConnectionError(msg)
-
- if self.ca_file is None:
- for ca in self.DEFAULT_CA_FILE_PATH.split(":"):
- if os.path.exists(ca):
- self.ca_file = ca
- break
-
- connect_kwargs['ca_file'] = self.ca_file
- connect_kwargs['insecure'] = self.insecure
-
- return connect_kwargs
-
- def set_auth_token(self, auth_tok):
- """
- Updates the authentication token for this client connection.
- """
- # FIXME(sirp): Nova image/glance.py currently calls this. Since this
- # method isn't really doing anything useful[1], we should go ahead and
- # rip it out, first in Nova, then here. Steps:
- #
- # 1. Change auth_tok in Glance to auth_token
- # 2. Change image/glance.py in Nova to use client.auth_token
- # 3. Remove this method
- #
- # [1] http://mail.python.org/pipermail/tutor/2003-October/025932.html
- self.auth_tok = auth_tok
-
- def configure_from_url(self, url):
- """
- Setups the connection based on the given url.
-
- The form is:
-
- ://:port/doc_root
- """
- parsed = urlparse.urlparse(url)
- self.use_ssl = parsed.scheme == 'https'
- self.host = parsed.hostname
- self.port = parsed.port or 80
- self.doc_root = parsed.path
-
- # ensure connection kwargs are re-evaluated after the service catalog
- # publicURL is parsed for potential SSL usage
- self.connect_kwargs = self.get_connect_kwargs()
-
- def make_auth_plugin(self, creds):
- """
- Returns an instantiated authentication plugin.
- """
- strategy = creds.get('strategy', 'noauth')
- plugin = auth.get_plugin_from_strategy(strategy, creds)
- return plugin
-
- def get_connection_type(self):
- """
- Returns the proper connection type
- """
- if self.use_ssl:
- return HTTPSClientAuthConnection
- else:
- return httplib.HTTPConnection
-
- def _authenticate(self, force_reauth=False):
- """
- Use the authentication plugin to authenticate and set the auth token.
-
- :param force_reauth: For re-authentication to bypass cache.
- """
- auth_plugin = self.auth_plugin
-
- if not auth_plugin.is_authenticated or force_reauth:
- auth_plugin.authenticate()
-
- self.auth_tok = auth_plugin.auth_token
-
- management_url = auth_plugin.management_url
- if management_url and self.configure_via_auth:
- self.configure_from_url(management_url)
-
- @handle_unauthorized
- def do_request(self, method, action, body=None, headers=None,
- params=None):
- """
- Make a request, returning an HTTP response object.
-
- :param method: HTTP verb (GET, POST, PUT, etc.)
- :param action: Requested path to append to self.doc_root
- :param body: Data to send in the body of the request
- :param headers: Headers to send with the request
- :param params: Key/value pairs to use in query string
- :returns: HTTP response object
- """
- if not self.auth_tok:
- self._authenticate()
-
- url = self._construct_url(action, params)
- return self._do_request(method=method, url=url, body=body,
- headers=headers)
-
- def _construct_url(self, action, params=None):
- """
- Create a URL object we can use to pass to _do_request().
- """
- path = '/'.join([self.doc_root or '', action.lstrip('/')])
- scheme = "https" if self.use_ssl else "http"
- netloc = "%s:%d" % (self.host, self.port)
-
- if isinstance(params, dict):
- for (key, value) in params.items():
- if value is None:
- del params[key]
- query = urllib.urlencode(params)
- else:
- query = None
-
- return urlparse.ParseResult(scheme, netloc, path, '', query, '')
-
- @handle_redirects
- def _do_request(self, method, url, body, headers):
- """
- Connects to the server and issues a request. Handles converting
- any returned HTTP error status codes to OpenStack/Glance exceptions
- and closing the server connection. Returns the result data, or
- raises an appropriate exception.
-
- :param method: HTTP method ("GET", "POST", "PUT", etc...)
- :param url: urlparse.ParsedResult object with URL information
- :param body: data to send (as string, filelike or iterable),
- or None (default)
- :param headers: mapping of key/value pairs to add as headers
-
- :note
-
- If the body param has a read attribute, and method is either
- POST or PUT, this method will automatically conduct a chunked-transfer
- encoding and use the body as a file object or iterable, transferring
- chunks of data using the connection's send() method. This allows large
- objects to be transferred efficiently without buffering the entire
- body in memory.
- """
- if url.query:
- path = url.path + "?" + url.query
- else:
- path = url.path
-
- try:
- connection_type = self.get_connection_type()
- headers = headers or {}
-
- if 'x-auth-token' not in headers and self.auth_tok:
- headers['x-auth-token'] = self.auth_tok
-
- c = connection_type(url.hostname, url.port, **self.connect_kwargs)
-
- def _pushing(method):
- return method.lower() in ('post', 'put')
-
- def _simple(body):
- return body is None or isinstance(body, basestring)
-
- def _filelike(body):
- return hasattr(body, 'read')
-
- def _sendbody(connection, iter):
- connection.endheaders()
- for sent in iter:
- # iterator has done the heavy lifting
- pass
-
- def _chunkbody(connection, iter):
- connection.putheader('Transfer-Encoding', 'chunked')
- connection.endheaders()
- for chunk in iter:
- connection.send('%x\r\n%s\r\n' % (len(chunk), chunk))
- connection.send('0\r\n\r\n')
-
- # Do a simple request or a chunked request, depending
- # on whether the body param is file-like or iterable and
- # the method is PUT or POST
- #
- if not _pushing(method) or _simple(body):
- # Simple request...
- c.request(method, path, body, headers)
- elif _filelike(body) or self._iterable(body):
- c.putrequest(method, path)
-
- for header, value in headers.items():
- c.putheader(header, value)
-
- iter = self.image_iterator(c, headers, body)
-
- if self._sendable(body):
- # send actual file without copying into userspace
- _sendbody(c, iter)
- else:
- # otherwise iterate and chunk
- _chunkbody(c, iter)
- else:
- raise TypeError('Unsupported image type: %s' % body.__class__)
-
- res = c.getresponse()
- status_code = self.get_status_code(res)
- if status_code in self.OK_RESPONSE_CODES:
- return res
- elif status_code in self.REDIRECT_RESPONSE_CODES:
- raise exception.RedirectException(res.getheader('Location'))
- elif status_code == httplib.UNAUTHORIZED:
- raise exception.NotAuthorized(res.read())
- elif status_code == httplib.FORBIDDEN:
- raise exception.NotAuthorized(res.read())
- elif status_code == httplib.NOT_FOUND:
- raise exception.NotFound(res.read())
- elif status_code == httplib.CONFLICT:
- raise exception.Duplicate(res.read())
- elif status_code == httplib.BAD_REQUEST:
- raise exception.Invalid(res.read())
- elif status_code == httplib.MULTIPLE_CHOICES:
- raise exception.MultipleChoices(body=res.read())
- elif status_code == httplib.INTERNAL_SERVER_ERROR:
- raise Exception("Internal Server error: %s" % res.read())
- else:
- raise Exception("Unknown error occurred! %s" % res.read())
-
- except (socket.error, IOError), e:
- raise exception.ClientConnectionError(e)
-
- def _seekable(self, body):
- # pipes are not seekable, avoids sendfile() failure on e.g.
- # cat /path/to/image | glance add ...
- # or where add command is launched via popen
- try:
- os.lseek(body.fileno(), 0, os.SEEK_SET)
- return True
- except OSError as e:
- return (e.errno != errno.ESPIPE)
-
- def _sendable(self, body):
- return (SENDFILE_SUPPORTED and hasattr(body, 'fileno') and
- self._seekable(body) and not self.use_ssl)
-
- def _iterable(self, body):
- return isinstance(body, collections.Iterable)
-
- def image_iterator(self, connection, headers, body):
- if self._sendable(body):
- return SendFileIterator(connection, body)
- elif self._iterable(body):
- return utils.chunkreadable(body)
- else:
- return ImageBodyIterator(body)
-
- def get_status_code(self, response):
- """
- Returns the integer status code from the response, which
- can be either a Webob.Response (used in testing) or httplib.Response
- """
- if hasattr(response, 'status_int'):
- return response.status_int
- else:
- return response.status
-
- def _extract_params(self, actual_params, allowed_params):
- """
- Extract a subset of keys from a dictionary. The filters key
- will also be extracted, and each of its values will be returned
- as an individual param.
-
- :param actual_params: dict of keys to filter
- :param allowed_params: list of keys that 'actual_params' will be
- reduced to
- :retval subset of 'params' dict
- """
- try:
- # expect 'filters' param to be a dict here
- result = dict(actual_params.get('filters'))
- except TypeError:
- result = {}
-
- for allowed_param in allowed_params:
- if allowed_param in actual_params:
- result[allowed_param] = actual_params[allowed_param]
-
- return result
diff --git a/windc/windc/common/config.py b/windc/windc/common/config.py
deleted file mode 100644
index 79b4e236..00000000
--- a/windc/windc/common/config.py
+++ /dev/null
@@ -1,181 +0,0 @@
-#!/usr/bin/env python
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Routines for configuring balancer
-"""
-
-import logging
-import logging.config
-import logging.handlers
-import os
-import sys
-
-from windc.common import cfg
-from windc.common import wsgi
-from windc import version
-
-
-paste_deploy_group = cfg.OptGroup('paste_deploy')
-paste_deploy_opts = [
- cfg.StrOpt('flavor'),
- cfg.StrOpt('config_file')
- ]
-
-
-class WindcConfigOpts(cfg.CommonConfigOpts):
- def __init__(self, default_config_files=None, **kwargs):
- super(WindcConfigOpts, self).__init__(
- project='windc',
- version='%%prog %s' % version.version_string(),
- default_config_files=default_config_files,
- **kwargs)
-
-
-class WindcCacheConfigOpts(WindcConfigOpts):
-
- def __init__(self, **kwargs):
- config_files = cfg.find_config_files(project='windc',
- prog='windc-cache')
- super(BalancerCacheConfigOpts, self).__init__(config_files, **kwargs)
-
-
-def setup_logging(conf):
- """
- Sets up the logging options for a log with supplied name
-
- :param conf: a cfg.ConfOpts object
- """
-
- if conf.log_config:
- # Use a logging configuration file for all settings...
- if os.path.exists(conf.log_config):
- logging.config.fileConfig(conf.log_config)
- return
- else:
- raise RuntimeError("Unable to locate specified logging "
- "config file: %s" % conf.log_config)
-
- root_logger = logging.root
- if conf.debug:
- root_logger.setLevel(logging.DEBUG)
- elif conf.verbose:
- root_logger.setLevel(logging.INFO)
- else:
- root_logger.setLevel(logging.WARNING)
-
- formatter = logging.Formatter(conf.log_format, conf.log_date_format)
-
- if conf.use_syslog:
- try:
- facility = getattr(logging.handlers.SysLogHandler,
- conf.syslog_log_facility)
- except AttributeError:
- raise ValueError(_("Invalid syslog facility"))
-
- handler = logging.handlers.SysLogHandler(address='/dev/log',
- facility=facility)
- elif conf.log_file:
- logfile = conf.log_file
- if conf.log_dir:
- logfile = os.path.join(conf.log_dir, logfile)
- handler = logging.handlers.WatchedFileHandler(logfile)
- else:
- handler = logging.StreamHandler(sys.stdout)
-
- handler.setFormatter(formatter)
- root_logger.addHandler(handler)
-
-
-def _register_paste_deploy_opts(conf):
- """
- Idempotent registration of paste_deploy option group
-
- :param conf: a cfg.ConfigOpts object
- """
- conf.register_group(paste_deploy_group)
- conf.register_opts(paste_deploy_opts, group=paste_deploy_group)
-
-
-def _get_deployment_flavor(conf):
- """
- Retrieve the paste_deploy.flavor config item, formatted appropriately
- for appending to the application name.
-
- :param conf: a cfg.ConfigOpts object
- """
- _register_paste_deploy_opts(conf)
- flavor = conf.paste_deploy.flavor
- return '' if not flavor else ('-' + flavor)
-
-
-def _get_deployment_config_file(conf):
- """
- Retrieve the deployment_config_file config item, formatted as an
- absolute pathname.
-
- :param conf: a cfg.ConfigOpts object
- """
- _register_paste_deploy_opts(conf)
- config_file = conf.paste_deploy.config_file
- if not config_file:
- # Assume paste config is in a paste.ini file corresponding
- # to the last config file
- path = conf.config_file[-1].replace(".conf", "-paste.ini")
- else:
- path = config_file
- return os.path.abspath(path)
-
-
-def load_paste_app(conf, app_name=None):
- """
- Builds and returns a WSGI app from a paste config file.
-
- We assume the last config file specified in the supplied ConfigOpts
- object is the paste config file.
-
- :param conf: a cfg.ConfigOpts object
- :param app_name: name of the application to load
-
- :raises RuntimeError when config file cannot be located or application
- cannot be loaded from config file
- """
- if app_name is None:
- app_name = conf.prog
-
- # append the deployment flavor to the application name,
- # in order to identify the appropriate paste pipeline
- app_name += _get_deployment_flavor(conf)
-
- conf_file = _get_deployment_config_file(conf)
-
- try:
- # Setup logging early
- setup_logging(conf)
-
- app = wsgi.paste_deploy_app(conf_file, app_name, conf)
-
- # Log the options used when starting if we're in debug mode...
- if conf.debug:
- conf.log_opt_values(logging.getLogger(app_name), logging.DEBUG)
-
- return app
- except (LookupError, ImportError), e:
- raise RuntimeError("Unable to load %(app_name)s from "
- "configuration file %(conf_file)s."
- "\nGot: %(e)r" % locals())
diff --git a/windc/windc/common/context.py b/windc/windc/common/context.py
deleted file mode 100644
index 5a69410d..00000000
--- a/windc/windc/common/context.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from balancer.common import cfg
-from balancer.common import exception
-from balancer.common import utils
-from balancer.common import wsgi
-
-
-class RequestContext(object):
- """
- Stores information about the security context under which the user
- accesses the system, as well as additional request information.
- """
-
- def __init__(self, auth_tok=None, user=None, user_id=None, tenant=None,
- tenant_id=None, roles=None, is_admin=False, read_only=False,
- show_deleted=False, owner_is_tenant=True):
- self.auth_tok = auth_tok
- self.user = user
- self.user_id = user_id
- self.tenant = tenant
- self.tenant_id = tenant_id
- self.roles = roles or []
- self.is_admin = is_admin
- self.read_only = read_only
- self._show_deleted = show_deleted
- self.owner_is_tenant = owner_is_tenant
-
- @property
- def owner(self):
- """Return the owner to correlate with an image."""
- return self.tenant if self.owner_is_tenant else self.user
-
- @property
- def show_deleted(self):
- """Admins can see deleted by default"""
- if self._show_deleted or self.is_admin:
- return True
- return False
-
-
-class ContextMiddleware(wsgi.Middleware):
-
- opts = [
- cfg.BoolOpt('owner_is_tenant', default=True),
- ]
-
- def __init__(self, app, conf, **local_conf):
- self.conf = conf
- self.conf.register_opts(self.opts)
-
- # Determine the context class to use
- self.ctxcls = RequestContext
- if 'context_class' in local_conf:
- self.ctxcls = utils.import_class(local_conf['context_class'])
-
- super(ContextMiddleware, self).__init__(app)
-
- def make_context(self, *args, **kwargs):
- """
- Create a context with the given arguments.
- """
- kwargs.setdefault('owner_is_tenant', self.conf.owner_is_tenant)
-
- return self.ctxcls(*args, **kwargs)
-
- def process_request(self, req):
- """
- Extract any authentication information in the request and
- construct an appropriate context from it.
-
- A few scenarios exist:
-
- 1. If X-Auth-Token is passed in, then consult TENANT and ROLE headers
- to determine permissions.
-
- 2. An X-Auth-Token was passed in, but the Identity-Status is not
- confirmed. For now, just raising a NotAuthorized exception.
-
- 3. X-Auth-Token is omitted. If we were using Keystone, then the
- tokenauth middleware would have rejected the request, so we must be
- using NoAuth. In that case, assume that is_admin=True.
- """
- # TODO(sirp): should we be using the balancer_tokeauth shim from
- # Keystone here? If we do, we need to make sure it handles the NoAuth
- # case
- auth_tok = req.headers.get('X-Auth-Token',
- req.headers.get('X-Storage-Token'))
- if auth_tok:
- if req.headers.get('X-Identity-Status') == 'Confirmed':
- # 1. Auth-token is passed, check other headers
- user = req.headers.get('X-User-Name')
- user_id = req.headers.get('X-User-Id')
- tenant = req.headers.get('X-Tenant-Name')
- tenant_id = req.headers.get('X-Tenant-Id')
- roles = [r.strip()
- for r in req.headers.get('X-Role', '').split(',')]
- is_admin = any(role.lower() == 'admin' for role in roles)
- else:
- # 2. Indentity-Status not confirmed
- # FIXME(sirp): not sure what the correct behavior in this case
- # is; just raising NotAuthorized for now
- raise exception.NotAuthorized()
- else:
- # 3. Auth-token is ommited, assume NoAuth
- user = None
- user_id = None
- tenant = None
- tenant_id = None
- roles = []
- is_admin = True
-
- req.context = self.make_context(auth_tok=auth_tok, user=user,
- user_id=user_id, tenant=tenant, tenant_id=tenant_id,
- roles=roles, is_admin=is_admin)
diff --git a/windc/windc/common/exception.py b/windc/windc/common/exception.py
deleted file mode 100644
index 24dcf619..00000000
--- a/windc/windc/common/exception.py
+++ /dev/null
@@ -1,184 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""Glance exception subclasses"""
-
-import urlparse
-
-
-class RedirectException(Exception):
- def __init__(self, url):
- self.url = urlparse.urlparse(url)
-
-
-class GlanceException(Exception):
- """
- Base Glance Exception
-
- To correctly use this class, inherit from it and define
- a 'message' property. That message will get printf'd
- with the keyword arguments provided to the constructor.
- """
- message = "An unknown exception occurred"
-
- def __init__(self, *args, **kwargs):
- try:
- self._error_string = self.message % kwargs
- except Exception:
- # at least get the core message out if something happened
- self._error_string = self.message
- if len(args) > 0:
- # If there is a non-kwarg parameter, assume it's the error
- # message or reason description and tack it on to the end
- # of the exception message
- # Convert all arguments into their string representations...
- args = ["%s" % arg for arg in args]
- self._error_string = (self._error_string +
- "\nDetails: %s" % '\n'.join(args))
-
- def __str__(self):
- return self._error_string
-
-
-class MissingArgumentError(GlanceException):
- message = "Missing required argument."
-
-
-class MissingCredentialError(GlanceException):
- message = "Missing required credential: %(required)s"
-
-
-class BadAuthStrategy(GlanceException):
- message = "Incorrect auth strategy, expected \"%(expected)s\" but "
-
-
-class NotFound(GlanceException):
- message = "An object with the specified identifier was not found."
-
-
-class UnknownScheme(GlanceException):
- message = "Unknown scheme '%(scheme)s' found in URI"
-
-
-class BadStoreUri(GlanceException):
- message = "The Store URI %(uri)s was malformed. Reason: %(reason)s"
-
-
-class Duplicate(GlanceException):
- message = "An object with the same identifier already exists."
-
-
-class StorageFull(GlanceException):
- message = "There is not enough disk space on the image storage media."
-
-
-class StorageWriteDenied(GlanceException):
- message = "Permission to write image storage media denied."
-
-
-class ImportFailure(GlanceException):
- message = "Failed to import requested object/class: '%(import_str)s'. \
- Reason: %(reason)s"
-
-
-class AuthBadRequest(GlanceException):
- message = "Connect error/bad request to Auth service at URL %(url)s."
-
-
-class AuthUrlNotFound(GlanceException):
- message = "Auth service at URL %(url)s not found."
-
-
-class AuthorizationFailure(GlanceException):
- message = "Authorization failed."
-
-
-class NotAuthorized(GlanceException):
- message = "You are not authorized to complete this action."
-
-
-class NotAuthorizedPublicImage(NotAuthorized):
- message = "You are not authorized to complete this action."
-
-
-class Invalid(GlanceException):
- message = "Data supplied was not valid."
-
-
-class AuthorizationRedirect(GlanceException):
- message = "Redirecting to %(uri)s for authorization."
-
-
-class DatabaseMigrationError(GlanceException):
- message = "There was an error migrating the database."
-
-
-class ClientConnectionError(GlanceException):
- message = "There was an error connecting to a server"
-
-
-class ClientConfigurationError(GlanceException):
- message = "There was an error configuring the client."
-
-
-class MultipleChoices(GlanceException):
- message = "The request returned a 302 Multiple Choices. This generally "
-
-
-class InvalidContentType(GlanceException):
- message = "Invalid content type %(content_type)s"
-
-
-class BadRegistryConnectionConfiguration(GlanceException):
- message = "Registry was not configured correctly on API server. "
-
-
-class BadStoreConfiguration(GlanceException):
- message = "Store %(store_name)s could not be configured correctly. "
-
-
-class BadDriverConfiguration(GlanceException):
- message = "Driver %(driver_name)s could not be configured correctly. "
-
-
-class StoreDeleteNotSupported(GlanceException):
- message = "Deleting images from this store is not supported."
-
-
-class StoreAddDisabled(GlanceException):
- message = "Configuration for store failed. Adding images to this "
-
-
-class InvalidNotifierStrategy(GlanceException):
- message = "'%(strategy)s' is not an available notifier strategy."
-
-
-class MaxRedirectsExceeded(GlanceException):
- message = "Maximum redirects (%(redirects)s) was exceeded."
-
-
-class InvalidRedirect(GlanceException):
- message = "Received invalid HTTP redirect."
-
-
-class NoServiceEndpoint(GlanceException):
- message = "Response from Keystone does not contain a Glance endpoint."
-
-
-class RegionAmbiguity(GlanceException):
- message = "Multiple 'image' service matches for region %(region)s. This "
diff --git a/windc/windc/common/policy.py b/windc/windc/common/policy.py
deleted file mode 100644
index 1579409e..00000000
--- a/windc/windc/common/policy.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright (c) 2011 OpenStack, LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""Common Policy Engine Implementation"""
-
-import json
-
-
-class NotAuthorized(Exception):
- pass
-
-
-_BRAIN = None
-
-
-def set_brain(brain):
- """Set the brain used by enforce().
-
- Defaults use Brain() if not set.
-
- """
- global _BRAIN
- _BRAIN = brain
-
-
-def reset():
- """Clear the brain used by enforce()."""
- global _BRAIN
- _BRAIN = None
-
-
-def enforce(match_list, target_dict, credentials_dict):
- """Enforces authorization of some rules against credentials.
-
- :param match_list: nested tuples of data to match against
- The basic brain supports three types of match lists:
- 1) rules
- looks like: ('rule:compute:get_instance',)
- Retrieves the named rule from the rules dict and recursively
- checks against the contents of the rule.
- 2) roles
- looks like: ('role:compute:admin',)
- Matches if the specified role is in credentials_dict['roles'].
- 3) generic
- ('tenant_id:%(tenant_id)s',)
- Substitutes values from the target dict into the match using
- the % operator and matches them against the creds dict.
-
- Combining rules:
- The brain returns True if any of the outer tuple of rules match
- and also True if all of the inner tuples match. You can use this to
- perform simple boolean logic. For example, the following rule would
- return True if the creds contain the role 'admin' OR the if the
- tenant_id matches the target dict AND the the creds contains the
- role 'compute_sysadmin':
-
- {
- "rule:combined": (
- 'role:admin',
- ('tenant_id:%(tenant_id)s', 'role:compute_sysadmin')
- )
- }
-
-
- Note that rule and role are reserved words in the credentials match, so
- you can't match against properties with those names. Custom brains may
- also add new reserved words. For example, the HttpBrain adds http as a
- reserved word.
-
- :param target_dict: dict of object properties
- Target dicts contain as much information as we can about the object being
- operated on.
-
- :param credentials_dict: dict of actor properties
- Credentials dicts contain as much information as we can about the user
- performing the action.
-
- :raises NotAuthorized if the check fails
-
- """
- global _BRAIN
- if not _BRAIN:
- _BRAIN = Brain()
- if not _BRAIN.check(match_list, target_dict, credentials_dict):
- raise NotAuthorized()
-
-
-class Brain(object):
- """Implements policy checking."""
- @classmethod
- def load_json(cls, data, default_rule=None):
- """Init a brain using json instead of a rules dictionary."""
- rules_dict = json.loads(data)
- return cls(rules=rules_dict, default_rule=default_rule)
-
- def __init__(self, rules=None, default_rule=None):
- self.rules = rules or {}
- self.default_rule = default_rule
-
- def add_rule(self, key, match):
- self.rules[key] = match
-
- def _check(self, match, target_dict, cred_dict):
- match_kind, match_value = match.split(':', 1)
- try:
- f = getattr(self, '_check_%s' % match_kind)
- except AttributeError:
- if not self._check_generic(match, target_dict, cred_dict):
- return False
- else:
- if not f(match_value, target_dict, cred_dict):
- return False
- return True
-
- def check(self, match_list, target_dict, cred_dict):
- """Checks authorization of some rules against credentials.
-
- Detailed description of the check with examples in policy.enforce().
-
- :param match_list: nested tuples of data to match against
- :param target_dict: dict of object properties
- :param credentials_dict: dict of actor properties
-
- :returns: True if the check passes
-
- """
- if not match_list:
- return True
- for and_list in match_list:
- if isinstance(and_list, basestring):
- and_list = (and_list,)
- if all([self._check(item, target_dict, cred_dict)
- for item in and_list]):
- return True
- return False
-
- def _check_rule(self, match, target_dict, cred_dict):
- """Recursively checks credentials based on the brains rules."""
- try:
- new_match_list = self.rules[match]
- except KeyError:
- if self.default_rule and match != self.default_rule:
- new_match_list = ('rule:%s' % self.default_rule,)
- else:
- return False
-
- return self.check(new_match_list, target_dict, cred_dict)
-
- def _check_role(self, match, target_dict, cred_dict):
- """Check that there is a matching role in the cred dict."""
- return match in cred_dict['roles']
-
- def _check_generic(self, match, target_dict, cred_dict):
- """Check an individual match.
-
- Matches look like:
-
- tenant:%(tenant_id)s
- role:compute:admin
-
- """
-
- # TODO(termie): do dict inspection via dot syntax
- match = match % target_dict
- key, value = match.split(':', 1)
- if key in cred_dict:
- return value == cred_dict[key]
- return False
diff --git a/windc/windc/common/utils.py b/windc/windc/common/utils.py
deleted file mode 100644
index 2910816b..00000000
--- a/windc/windc/common/utils.py
+++ /dev/null
@@ -1,421 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-System-level utilities and helper functions.
-"""
-
-import datetime
-import errno
-import logging
-import os
-import platform
-import subprocess
-import sys
-import uuid
-
-import iso8601
-
-from windc.common import exception
-
-
-LOG = logging.getLogger(__name__)
-
-TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
-
-
-class Singleton:
- """
- A non-thread-safe helper class to ease implementing singletons.
- This should be used as a decorator -- not a metaclass -- to the
- class that should be a singleton.
-
- The decorated class can define one `__init__` function that
- takes only the `self` argument. Other than that, there are
- no restrictions that apply to the decorated class.
-
- To get the singleton instance, use the `Instance` method. Trying
- to use `__call__` will result in a `TypeError` being raised.
-
- Limitations: The decorated class cannot be inherited from and the
- type of the singleton instance cannot be checked with `isinstance`..
-
- """
-
- def __init__(self, decorated):
- self._decorated = decorated
-
- def Instance(self, conf):
- """
- Returns the singleton instance. Upon its first call, it creates a
- new instance of the decorated class and calls its `__init__` method.
- On all subsequent calls, the already created instance is returned.
-
- """
- try:
- return self._instance
- except AttributeError:
- self._instance = self._decorated(conf)
- return self._instance
-
- def __call__(self):
- """
- Call method that raises an exception in order to prevent creation
- of multiple instances of the singleton. The `Instance` method should
- be used instead.
-
- """
- raise TypeError(
- 'Singletons must be accessed through the `Instance` method.')
-
-
-def checkNone(obj):
- if bool(obj):
- if obj != 'None':
- return True
- return False
-
-
-def chunkreadable(iter, chunk_size=65536):
- """
- Wrap a readable iterator with a reader yielding chunks of
- a preferred size, otherwise leave iterator unchanged.
-
- :param iter: an iter which may also be readable
- :param chunk_size: maximum size of chunk
- """
- return chunkiter(iter, chunk_size) if hasattr(iter, 'read') else iter
-
-
-def chunkiter(fp, chunk_size=65536):
- """
- Return an iterator to a file-like obj which yields fixed size chunks
-
- :param fp: a file-like object
- :param chunk_size: maximum size of chunk
- """
- while True:
- chunk = fp.read(chunk_size)
- if chunk:
- yield chunk
- else:
- break
-
-
-def image_meta_to_http_headers(image_meta):
- """
- Returns a set of image metadata into a dict
- of HTTP headers that can be fed to either a Webob
- Request object or an httplib.HTTP(S)Connection object
-
- :param image_meta: Mapping of image metadata
- """
- headers = {}
- for k, v in image_meta.items():
- if v is not None:
- if k == 'properties':
- for pk, pv in v.items():
- if pv is not None:
- headers["x-image-meta-property-%s"
- % pk.lower()] = unicode(pv)
- else:
- headers["x-image-meta-%s" % k.lower()] = unicode(v)
- return headers
-
-
-def add_features_to_http_headers(features, headers):
- """
- Adds additional headers representing balancer features to be enabled.
-
- :param headers: Base set of headers
- :param features: Map of enabled features
- """
- if features:
- for k, v in features.items():
- if v is not None:
- headers[k.lower()] = unicode(v)
-
-
-def get_image_meta_from_headers(response):
- """
- Processes HTTP headers from a supplied response that
- match the x-image-meta and x-image-meta-property and
- returns a mapping of image metadata and properties
-
- :param response: Response to process
- """
- result = {}
- properties = {}
-
- if hasattr(response, 'getheaders'): # httplib.HTTPResponse
- headers = response.getheaders()
- else: # webob.Response
- headers = response.headers.items()
-
- for key, value in headers:
- key = str(key.lower())
- if key.startswith('x-image-meta-property-'):
- field_name = key[len('x-image-meta-property-'):].replace('-', '_')
- properties[field_name] = value or None
- elif key.startswith('x-image-meta-'):
- field_name = key[len('x-image-meta-'):].replace('-', '_')
- result[field_name] = value or None
- result['properties'] = properties
- if 'size' in result:
- try:
- result['size'] = int(result['size'])
- except ValueError:
- raise exception.Invalid
- for key in ('is_public', 'deleted', 'protected'):
- if key in result:
- result[key] = bool_from_header_value(result[key])
- return result
-
-
-def bool_from_header_value(value):
- """
- Returns True if value is a boolean True or the
- string 'true', case-insensitive, False otherwise
- """
- if isinstance(value, bool):
- return value
- elif isinstance(value, (basestring, unicode)):
- if str(value).lower() == 'true':
- return True
- return False
-
-
-def bool_from_string(subject):
- """
- Interpret a string as a boolean.
-
- Any string value in:
- ('True', 'true', 'On', 'on', '1')
- is interpreted as a boolean True.
-
- Useful for JSON-decoded stuff and config file parsing
- """
- if isinstance(subject, bool):
- return subject
- elif isinstance(subject, int):
- return subject == 1
- if hasattr(subject, 'startswith'): # str or unicode...
- if subject.strip().lower() in ('true', 'on', '1'):
- return True
- return False
-
-
-def import_class(import_str):
- """Returns a class from a string including module and class"""
- mod_str, _sep, class_str = import_str.rpartition('.')
- try:
- __import__(mod_str)
- return getattr(sys.modules[mod_str], class_str)
- except (ImportError, ValueError, AttributeError), e:
- raise exception.ImportFailure(import_str=import_str,
- reason=e)
-
-
-def import_object(import_str):
- """Returns an object including a module or module and class"""
- try:
- __import__(import_str)
- return sys.modules[import_str]
- except ImportError:
- cls = import_class(import_str)
- return cls()
-
-
-def generate_uuid():
- return str(uuid.uuid4())
-
-
-def is_uuid_like(value):
- try:
- uuid.UUID(value)
- return True
- except Exception:
- return False
-
-
-def isotime(at=None):
- """Stringify time in ISO 8601 format"""
- if not at:
- at = datetime.datetime.utcnow()
- str = at.strftime(TIME_FORMAT)
- tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
- str += ('Z' if tz == 'UTC' else tz)
- return str
-
-
-def parse_isotime(timestr):
- """Parse time from ISO 8601 format"""
- try:
- return iso8601.parse_date(timestr)
- except iso8601.ParseError as e:
- raise ValueError(e.message)
- except TypeError as e:
- raise ValueError(e.message)
-
-
-def normalize_time(timestamp):
- """Normalize time in arbitrary timezone to UTC"""
- offset = timestamp.utcoffset()
- return timestamp.replace(tzinfo=None) - offset if offset else timestamp
-
-
-def safe_mkdirs(path):
- try:
- os.makedirs(path)
- except OSError, e:
- if e.errno != errno.EEXIST:
- raise
-
-
-def safe_remove(path):
- try:
- os.remove(path)
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
-
-
-class PrettyTable(object):
- """Creates an ASCII art table for use in bin/balancer
-
- Example:
-
- ID Name Size Hits
- --- ----------------- ------------ -----
- 122 image 22 0
- """
- def __init__(self):
- self.columns = []
-
- def add_column(self, width, label="", just='l'):
- """Add a column to the table
-
- :param width: number of characters wide the column should be
- :param label: column heading
- :param just: justification for the column, 'l' for left,
- 'r' for right
- """
- self.columns.append((width, label, just))
-
- def make_header(self):
- label_parts = []
- break_parts = []
- for width, label, _ in self.columns:
- # NOTE(sirp): headers are always left justified
- label_part = self._clip_and_justify(label, width, 'l')
- label_parts.append(label_part)
-
- break_part = '-' * width
- break_parts.append(break_part)
-
- label_line = ' '.join(label_parts)
- break_line = ' '.join(break_parts)
- return '\n'.join([label_line, break_line])
-
- def make_row(self, *args):
- row = args
- row_parts = []
- for data, (width, _, just) in zip(row, self.columns):
- row_part = self._clip_and_justify(data, width, just)
- row_parts.append(row_part)
-
- row_line = ' '.join(row_parts)
- return row_line
-
- @staticmethod
- def _clip_and_justify(data, width, just):
- # clip field to column width
- clipped_data = str(data)[:width]
-
- if just == 'r':
- # right justify
- justified = clipped_data.rjust(width)
- else:
- # left justify
- justified = clipped_data.ljust(width)
-
- return justified
-
-
-def get_terminal_size():
-
- def _get_terminal_size_posix():
- import fcntl
- import struct
- import termios
-
- height_width = None
-
- try:
- height_width = struct.unpack('hh', fcntl.ioctl(sys.stderr.fileno(),
- termios.TIOCGWINSZ,
- struct.pack('HH', 0, 0)))
- except:
- pass
-
- if not height_width:
- try:
- p = subprocess.Popen(['stty', 'size'],
- shell=False,
- stdout=subprocess.PIPE)
- return tuple(int(x) for x in p.communicate()[0].split())
- except:
- pass
-
- return height_width
-
- def _get_terminal_size_win32():
- try:
- from ctypes import windll, create_string_buffer
- handle = windll.kernel32.GetStdHandle(-12)
- csbi = create_string_buffer(22)
- res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)
- except:
- return None
- if res:
- import struct
- unpack_tmp = struct.unpack("hhhhHhhhhhh", csbi.raw)
- (bufx, bufy, curx, cury, wattr,
- left, top, right, bottom, maxx, maxy) = unpack_tmp
- height = bottom - top + 1
- width = right - left + 1
- return (height, width)
- else:
- return None
-
- def _get_terminal_size_unknownOS():
- raise NotImplementedError
-
- func = {'posix': _get_terminal_size_posix,
- 'win32': _get_terminal_size_win32}
-
- height_width = func.get(platform.os.name, _get_terminal_size_unknownOS)()
-
- if height_width == None:
- raise exception.Invalid()
-
- for i in height_width:
- if not isinstance(i, int) or i <= 0:
- raise exception.Invalid()
-
- return height_width[0], height_width[1]
diff --git a/windc/windc/common/wsgi.py b/windc/windc/common/wsgi.py
deleted file mode 100644
index 3f1c6b5b..00000000
--- a/windc/windc/common/wsgi.py
+++ /dev/null
@@ -1,652 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2010 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Utility methods for working with WSGI servers
-"""
-
-import datetime
-import errno
-import json
-import logging
-import os
-import signal
-import sys
-import time
-
-import eventlet
-import eventlet.greenio
-from eventlet.green import socket, ssl
-import eventlet.wsgi
-from paste import deploy
-import routes
-import routes.middleware
-import webob.dec
-import webob.exc
-
-from windc.common import cfg
-from windc.common import exception
-from windc.common import utils
-
-
-bind_opts = [
- cfg.StrOpt('bind_host', default='localhost'),
- cfg.IntOpt('bind_port'),
-]
-
-socket_opts = [
- cfg.IntOpt('backlog', default=4096),
- cfg.StrOpt('cert_file'),
- cfg.StrOpt('key_file'),
-]
-
-workers_opt = cfg.IntOpt('workers', default=0)
-
-
-class WritableLogger(object):
- """A thin wrapper that responds to `write` and logs."""
-
- def __init__(self, logger, level=logging.DEBUG):
- self.logger = logger
- self.level = level
-
- def write(self, msg):
- self.logger.log(self.level, msg.strip("\n"))
-
-
-def get_bind_addr(conf, default_port=None):
- """Return the host and port to bind to."""
- conf.register_opts(bind_opts)
- return (conf.bind_host, conf.bind_port or default_port)
-
-
-def get_socket(conf, default_port):
- """
- Bind socket to bind ip:port in conf
-
- note: Mostly comes from Swift with a few small changes...
-
- :param conf: a cfg.ConfigOpts object
- :param default_port: port to bind to if none is specified in conf
-
- :returns : a socket object as returned from socket.listen or
- ssl.wrap_socket if conf specifies cert_file
- """
- bind_addr = get_bind_addr(conf, default_port)
-
- # TODO(jaypipes): eventlet's greened socket module does not actually
- # support IPv6 in getaddrinfo(). We need to get around this in the
- # future or monitor upstream for a fix
- address_family = [addr[0] for addr in socket.getaddrinfo(bind_addr[0],
- bind_addr[1], socket.AF_UNSPEC, socket.SOCK_STREAM)
- if addr[0] in (socket.AF_INET, socket.AF_INET6)][0]
-
- conf.register_opts(socket_opts)
-
- cert_file = conf.cert_file
- key_file = conf.key_file
- use_ssl = cert_file or key_file
- if use_ssl and (not cert_file or not key_file):
- raise RuntimeError(_("When running server in SSL mode, you must "
- "specify both a cert_file and key_file "
- "option value in your configuration file"))
-
- sock = None
- retry_until = time.time() + 30
- while not sock and time.time() < retry_until:
- try:
- sock = eventlet.listen(bind_addr, backlog=conf.backlog,
- family=address_family)
- if use_ssl:
- sock = ssl.wrap_socket(sock, certfile=cert_file,
- keyfile=key_file)
- except socket.error, err:
- if err.args[0] != errno.EADDRINUSE:
- raise
- eventlet.sleep(0.1)
- if not sock:
- raise RuntimeError(_("Could not bind to %s:%s after trying for 30 "
- "seconds") % bind_addr)
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- # in my experience, sockets can hang around forever without keepalive
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
-
- # This option isn't available in the OS X version of eventlet
- if hasattr(socket, 'TCP_KEEPIDLE'):
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 600)
-
- return sock
-
-
-class Server(object):
- """Server class to manage multiple WSGI sockets and applications."""
-
- def __init__(self, threads=1000):
- self.threads = threads
- self.children = []
- self.running = True
-
- def start(self, application, conf, default_port):
- """
- Run a WSGI server with the given application.
-
- :param application: The application to run in the WSGI server
- :param conf: a cfg.ConfigOpts object
- :param default_port: Port to bind to if none is specified in conf
- """
- def kill_children(*args):
- """Kills the entire process group."""
- self.logger.error(_('SIGTERM received'))
- signal.signal(signal.SIGTERM, signal.SIG_IGN)
- self.running = False
- os.killpg(0, signal.SIGTERM)
-
- def hup(*args):
- """
- Shuts down the server, but allows running requests to complete
- """
- self.logger.error(_('SIGHUP received'))
- signal.signal(signal.SIGHUP, signal.SIG_IGN)
- self.running = False
-
- self.application = application
- self.sock = get_socket(conf, default_port)
- conf.register_opt(workers_opt)
-
- self.logger = logging.getLogger('eventlet.wsgi.server')
-
- if conf.workers == 0:
- # Useful for profiling, test, debug etc.
- self.pool = eventlet.GreenPool(size=self.threads)
- self.pool.spawn_n(self._single_run, application, self.sock)
- return
-
- self.logger.info(_("Starting %d workers") % conf.workers)
- signal.signal(signal.SIGTERM, kill_children)
- signal.signal(signal.SIGHUP, hup)
- while len(self.children) < conf.workers:
- self.run_child()
-
- def wait_on_children(self):
- while self.running:
- try:
- pid, status = os.wait()
- if os.WIFEXITED(status) or os.WIFSIGNALED(status):
- self.logger.error(_('Removing dead child %s') % pid)
- self.children.remove(pid)
- self.run_child()
- except OSError, err:
- if err.errno not in (errno.EINTR, errno.ECHILD):
- raise
- except KeyboardInterrupt:
- sys.exit(1)
- self.logger.info(_('Caught keyboard interrupt. Exiting.'))
- break
- eventlet.greenio.shutdown_safe(self.sock)
- self.sock.close()
- self.logger.debug(_('Exited'))
-
- def wait(self):
- """Wait until all servers have completed running."""
- try:
- if self.children:
- self.wait_on_children()
- else:
- self.pool.waitall()
- except KeyboardInterrupt:
- pass
-
- def run_child(self):
- pid = os.fork()
- if pid == 0:
- signal.signal(signal.SIGHUP, signal.SIG_DFL)
- signal.signal(signal.SIGTERM, signal.SIG_DFL)
- self.run_server()
- self.logger.info(_('Child %d exiting normally') % os.getpid())
- return
- else:
- self.logger.info(_('Started child %s') % pid)
- self.children.append(pid)
-
- def run_server(self):
- """Run a WSGI server."""
- eventlet.wsgi.HttpProtocol.default_request_version = "HTTP/1.0"
- eventlet.hubs.use_hub('poll')
- eventlet.patcher.monkey_patch(all=False, socket=True)
- self.pool = eventlet.GreenPool(size=self.threads)
- try:
- eventlet.wsgi.server(self.sock, self.application,
- log=WritableLogger(self.logger), custom_pool=self.pool)
- except socket.error, err:
- if err[0] != errno.EINVAL:
- raise
- self.pool.waitall()
-
- def _single_run(self, application, sock):
- """Start a WSGI server in a new green thread."""
- self.logger.info(_("Starting single process server"))
- eventlet.wsgi.server(sock, application, custom_pool=self.pool,
- log=WritableLogger(self.logger))
-
-
-class Middleware(object):
- """
- Base WSGI middleware wrapper. These classes require an application to be
- initialized that will be called next. By default the middleware will
- simply call its wrapped app, or you can override __call__ to customize its
- behavior.
- """
-
- def __init__(self, application):
- self.application = application
-
- def process_request(self, req):
- """
- Called on each request.
-
- If this returns None, the next application down the stack will be
- executed. If it returns a response then that response will be returned
- and execution will stop here.
-
- """
- return None
-
- def process_response(self, response):
- """Do whatever you'd like to the response."""
- return response
-
- @webob.dec.wsgify
- def __call__(self, req):
- response = self.process_request(req)
- if response:
- return response
- response = req.get_response(self.application)
- return self.process_response(response)
-
-
-class Debug(Middleware):
- """
- Helper class that can be inserted into any WSGI application chain
- to get information about the request and response.
- """
-
- @webob.dec.wsgify
- def __call__(self, req):
- print ("*" * 40) + " REQUEST ENVIRON"
- for key, value in req.environ.items():
- print key, "=", value
- print
- resp = req.get_response(self.application)
-
- print ("*" * 40) + " RESPONSE HEADERS"
- for (key, value) in resp.headers.iteritems():
- print key, "=", value
- print
-
- resp.app_iter = self.print_generator(resp.app_iter)
-
- return resp
-
- @staticmethod
- def print_generator(app_iter):
- """
- Iterator that prints the contents of a wrapper string iterator
- when iterated.
- """
- print ("*" * 40) + " BODY"
- for part in app_iter:
- sys.stdout.write(part)
- sys.stdout.flush()
- yield part
- print
-
-
-class Router(object):
- """
- WSGI middleware that maps incoming requests to WSGI apps.
- """
-
- def __init__(self, mapper):
- """
- Create a router for the given routes.Mapper.
-
- Each route in `mapper` must specify a 'controller', which is a
- WSGI app to call. You'll probably want to specify an 'action' as
- well and have your controller be a wsgi.Controller, who will route
- the request to the action method.
-
- Examples:
- mapper = routes.Mapper()
- sc = ServerController()
-
- # Explicit mapping of one route to a controller+action
- mapper.connect(None, "/svrlist", controller=sc, action="list")
-
- # Actions are all implicitly defined
- mapper.resource("server", "servers", controller=sc)
-
- # Pointing to an arbitrary WSGI app. You can specify the
- # {path_info:.*} parameter so the target app can be handed just that
- # section of the URL.
- mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp())
- """
- self.map = mapper
- self._router = routes.middleware.RoutesMiddleware(self._dispatch,
- self.map)
-
- @webob.dec.wsgify
- def __call__(self, req):
- """
- Route the incoming request to a controller based on self.map.
- If no match, return a 404.
- """
- return self._router
-
- @staticmethod
- @webob.dec.wsgify
- def _dispatch(req):
- """
- Called by self._router after matching the incoming request to a route
- and putting the information into req.environ. Either returns 404
- or the routed WSGI app's response.
- """
- match = req.environ['wsgiorg.routing_args'][1]
- if not match:
- return webob.exc.HTTPNotFound()
- app = match['controller']
- return app
-
-
-class Request(webob.Request):
- """Add some Openstack API-specific logic to the base webob.Request."""
-
- def best_match_content_type(self):
- """Determine the requested response content-type."""
- supported = ('application/json',)
- bm = self.accept.best_match(supported)
- return bm or 'application/json'
-
- def get_content_type(self, allowed_content_types):
- """Determine content type of the request body."""
- if not "Content-Type" in self.headers:
- raise exception.InvalidContentType(content_type=None)
-
- content_type = self.content_type
-
- if content_type not in allowed_content_types:
- raise exception.InvalidContentType(content_type=content_type)
- else:
- return content_type
-
-
-class JSONRequestDeserializer(object):
- def has_body(self, request):
- """
- Returns whether a Webob.Request object will possess an entity body.
-
- :param request: Webob.Request object
- """
- if 'transfer-encoding' in request.headers:
- return True
- elif request.content_length > 0:
- return True
-
- return False
-
- def from_json(self, datastring):
- return json.loads(datastring)
-
- def default(self, request):
- if self.has_body(request):
- return {'body': self.from_json(request.body)}
- else:
- return {}
-
-
-class JSONResponseSerializer(object):
-
- def to_json(self, data):
- def sanitizer(obj):
- if isinstance(obj, datetime.datetime):
- return obj.isoformat()
- return obj
-
- return json.dumps(data, default=sanitizer)
-
- def default(self, response, result):
- response.content_type = 'application/json'
- response.body = self.to_json(result)
-
-
-class Resource(object):
- """
- WSGI app that handles (de)serialization and controller dispatch.
-
- Reads routing information supplied by RoutesMiddleware and calls
- the requested action method upon its deserializer, controller,
- and serializer. Those three objects may implement any of the basic
- controller action methods (create, update, show, index, delete)
- along with any that may be specified in the api router. A 'default'
- method may also be implemented to be used in place of any
- non-implemented actions. Deserializer methods must accept a request
- argument and return a dictionary. Controller methods must accept a
- request argument. Additionally, they must also accept keyword
- arguments that represent the keys returned by the Deserializer. They
- may raise a webob.exc exception or return a dict, which will be
- serialized by requested content type.
- """
- def __init__(self, controller, deserializer, serializer):
- """
- :param controller: object that implement methods created by routes lib
- :param deserializer: object that supports webob request deserialization
- through controller-like actions
- :param serializer: object that supports webob response serialization
- through controller-like actions
- """
- self.controller = controller
- self.serializer = serializer
- self.deserializer = deserializer
-
- @webob.dec.wsgify(RequestClass=Request)
- def __call__(self, request):
- """WSGI method that controls (de)serialization and method dispatch."""
- action_args = self.get_action_args(request.environ)
- action = action_args.pop('action', None)
-
- deserialized_request = self.dispatch(self.deserializer,
- action, request)
- action_args.update(deserialized_request)
-
- action_result = self.dispatch(self.controller, action,
- request, **action_args)
- try:
- response = webob.Response(request=request)
- self.dispatch(self.serializer, action, response, action_result)
- return response
-
- # return unserializable result (typically a webob exc)
- except Exception:
- return action_result
-
- def dispatch(self, obj, action, *args, **kwargs):
- """Find action-specific method on self and call it."""
- try:
- method = getattr(obj, action)
- except AttributeError:
- method = getattr(obj, 'default')
-
- return method(*args, **kwargs)
-
- def get_action_args(self, request_environment):
- """Parse dictionary created by routes library."""
- try:
- args = request_environment['wsgiorg.routing_args'][1].copy()
- except Exception:
- return {}
-
- try:
- del args['controller']
- except KeyError:
- pass
-
- try:
- del args['format']
- except KeyError:
- pass
-
- return args
-
-
-class BasePasteFactory(object):
-
- """A base class for paste app and filter factories.
-
- Sub-classes must override the KEY class attribute and provide
- a __call__ method.
- """
-
- KEY = None
-
- def __init__(self, conf):
- self.conf = conf
-
- def __call__(self, global_conf, **local_conf):
- raise NotImplementedError
-
- def _import_factory(self, local_conf):
- """Import an app/filter class.
-
- Lookup the KEY from the PasteDeploy local conf and import the
- class named there. This class can then be used as an app or
- filter factory.
-
- Note we support the : format.
-
- Note also that if you do e.g.
-
- key =
- value
-
- then ConfigParser returns a value with a leading newline, so
- we strip() the value before using it.
- """
- class_name = local_conf[self.KEY].replace(':', '.').strip()
- return utils.import_class(class_name)
-
-
-class AppFactory(BasePasteFactory):
-
- """A Generic paste.deploy app factory.
-
- This requires balancer.app_factory to be set to a callable which returns a
- WSGI app when invoked. The format of the name is : e.g.
-
- [app:apiv1app]
- paste.app_factory = balancer.common.wsgi:app_factory
- balancer.app_factory = balancer.api.v1:API
-
- The WSGI app constructor must accept a ConfigOpts object and a local config
- dict as its two arguments.
- """
- print "DEBUG AppFactory start\n"
- KEY = 'windc.app_factory'
-
- def __call__(self, global_conf, **local_conf):
- """The actual paste.app_factory protocol method."""
- print "DEBUG Call factory"
- factory = self._import_factory(local_conf)
- return factory(self.conf, **local_conf)
-
-
-class FilterFactory(AppFactory):
-
- """A Generic paste.deploy filter factory.
-
- This requires balancer.filter_factory to be set to a callable which returns
- a WSGI filter when invoked. The format is : e.g.
-
- [filter:cache]
- paste.filter_factory = balancer.common.wsgi:filter_factory
- balancer.filter_factory = balancer.api.middleware.cache:CacheFilter
-
- The WSGI filter constructor must accept a WSGI app, a ConfigOpts object and
- a local config dict as its three arguments.
- """
-
- KEY = 'windc.filter_factory'
-
- def __call__(self, global_conf, **local_conf):
- """The actual paste.filter_factory protocol method."""
- factory = self._import_factory(local_conf)
-
- def filter(app):
- return factory(app, self.conf, **local_conf)
-
- return filter
-
-
-def setup_paste_factories(conf):
- """Set up the generic paste app and filter factories.
-
- Set things up so that:
-
- paste.app_factory = balancer.common.wsgi:app_factory
-
- and
-
- paste.filter_factory = balancer.common.wsgi:filter_factory
-
- work correctly while loading PasteDeploy configuration.
-
- The app factories are constructed at runtime to allow us to pass a
- ConfigOpts object to the WSGI classes.
-
- :param conf: a ConfigOpts object
- """
- print "DEBUG Setup Factories\n"
- global app_factory, filter_factory
- app_factory = AppFactory(conf)
- filter_factory = FilterFactory(conf)
-
-
-def teardown_paste_factories():
- """Reverse the effect of setup_paste_factories()."""
- global app_factory, filter_factory
- del app_factory
- del filter_factory
-
-
-def paste_deploy_app(paste_config_file, app_name, conf):
- """Load a WSGI app from a PasteDeploy configuration.
-
- Use deploy.loadapp() to load the app from the PasteDeploy configuration,
- ensuring that the supplied ConfigOpts object is passed to the app and
- filter constructors.
-
- :param paste_config_file: a PasteDeploy config file
- :param app_name: the name of the app/pipeline to load from the file
- :param conf: a ConfigOpts object to supply to the app and its filters
- :returns: the WSGI app
- """
- print "DEBUG Paste deploy\n"
- setup_paste_factories(conf)
- try:
- return deploy.loadapp("config:%s" % paste_config_file, name=app_name)
- finally:
- teardown_paste_factories()
diff --git a/windc/windc/core/api.py b/windc/windc/core/api.py
deleted file mode 100644
index c9d1160b..00000000
--- a/windc/windc/core/api.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from windc.db import api as db_api
-from windc.core import change_events as events
-
-def dc_get_index(conf, tenant_id):
- dcs = db_api.datacenter_get_all(conf, tenant_id)
- dc_list = [db_api.unpack_extra(dc) for dc in dcs]
- return dc_list
- pass
-
-def create_dc(conf, params):
- # We need to pack all attributes which are not defined by the model explicitly
- dc_params = db_api.datacenter_pack_extra(params)
- dc = db_api.datacenter_create(conf, dc_params)
- event = events.Event(events.SCOPE_DATACENTER_CHANGE, events.ACTION_ADD)
- events.change_event(conf, event, dc)
- return dc.id
- pass
-
-def delete_dc(conf, tenant_id, datacenter_id):
- dc = db_api.datacenter_get(conf, tenant_id, datacenter_id)
- event = events.Event(events.SCOPE_DATACENTER_CHANGE, events.ACTION_DELETE)
- events.change_event(conf, event, dc)
- db_api.datacenter_destroy(conf, datacenter_id)
- pass
-
-def dc_get_data(conf, tenant_id, datacenter_id):
- dc = db_api.datacenter_get(conf, tenant_id, datacenter_id)
- dc_data = db_api.unpack_extra(dc)
- return dc_data
- pass
-
-def update_dc(conf, tenant_id, datacenter_id, body):
- dc = db_api.datacenter_get(conf, tenant_id, datacenter_id)
- old_dc = copy.deepcopy(dc)
- db_api.pack_update(dc, body)
- dc = db_api.datacenter_update(conf, datacenter_id, dc)
- event = events.Event(events.SCOPE_DATACENTER_CHANGE,
- events.ACTION_MODIFY)
- event.previous_state = old_dc
- events.change_event(conf, event, dc)
- pass
-
-def service_get_index(conf, tenant_id, datacenter_id):
- srvcs = db_api.service_get_all_by_datacenter_id(conf, tenant_id,
- datacenter_id)
- srv_list = [db_api.unpack_extra(srv) for srv in srvcs]
- return srv_list
- pass
-
-def create_service(conf, params):
- # We need to pack all attributes which are not defined
- # by the model explicitly
- srv_params = db_api.service_pack_extra(params)
- srv = db_api.service_create(conf, srv_params)
- event = events.Event(events.SCOPE_SERVICE_CHANGE, events.ACTION_ADD)
- events.change_event(conf, event, srv)
- return srv.id
- pass
-
-def delete_service(conf, tenant_id, datacenter_id, service_id):
- srv = db_api.service_get(conf, service_id, tenant_id)
- srv_data = db_api.unpack_extra(srv)
- event = events.Event(events.SCOPE_SERVICE_CHANGE, events.ACTION_DELETE)
- events.change_event(conf, event, srv)
- db_api.service_destroy(conf,service_id)
- pass
-
-def service_get_data(conf, tenant_id, datacenter_id, service_id):
- srv = db_api.service_get(conf, service_id, tenant_id)
- srv_data = db_api.unpack_extra(srv)
- return srv_data
- pass
-
-def update_service(conf, tenant_id, datacenter_id, service_id, body):
- srv = db_api.service_get(conf, service_id, tenant_id)
- old_srv = copy.deepcopy(srv)
- db_api.pack_update(srv, body)
- srv = db_api.service_update(conf, service_id, srv)
- event = events.Event(events.SCOPE_SERVICE_CHANGE, events.ACTION_MODIFY)
- event.previous_state = old_srv
- events.change_event(conf, event, srv)
- pass
diff --git a/windc/windc/core/builder_set.py b/windc/windc/core/builder_set.py
deleted file mode 100644
index 0f366018..00000000
--- a/windc/windc/core/builder_set.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from builder import Builder
-
-import imp
-import os
-import sys, glob
-import logging
-import traceback
-
-LOG = logging.getLogger(__name__)
-global builders
-
-def load_from_file(filepath, conf):
- class_inst = None
-
- mod_name,file_ext = os.path.splitext(os.path.split(filepath)[-1])
-
- if file_ext.lower() == '.py':
- py_mod = imp.load_source(mod_name, filepath)
-
- elif file_ext.lower() == '.pyc':
- py_mod = imp.load_compiled(mod_name, filepath)
-
- if hasattr(py_mod, mod_name):
- callable = getattr(__import__(mod_name),mod_name)
- class_inst = callable(conf)
-
- return class_inst
-
-
-class BuilderSet:
- def __init__(self):
- self.path = './windc/core/builders'
- sys.path.append(self.path)
- self.set = {}
-
- def load(self, conf):
-
- files = glob.glob(self.path+'/*.py')
-
- for file in files:
- LOG.debug("Trying to load builder from file: %s", file)
- try:
- builder = load_from_file(file, conf)
- LOG.info("Buider '%s' loaded.", builder.name)
- self.set[builder.type] = builder
- except:
- exc_type, exc_value, exc_traceback = sys.exc_info()
- LOG.error('Can`t load builder from the file %s. Skip it.', file)
- LOG.debug(repr(traceback.format_exception(exc_type, exc_value,
- exc_traceback)))
-
-
- def reload(self):
- self.set = {}
- self.load()
diff --git a/windc/windc/core/builders/ActiveDirectory.py b/windc/windc/core/builders/ActiveDirectory.py
deleted file mode 100644
index 2bae2a01..00000000
--- a/windc/windc/core/builders/ActiveDirectory.py
+++ /dev/null
@@ -1,210 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-import logging
-import uuid
-import os
-from sphinx.ext.autosummary import generate
-
-LOG = logging.getLogger(__name__)
-
-from windc.core.builder import Builder
-from windc.core import change_events as events
-from windc.db import api as db_api
-from windc.core.templates import Template
-from windc.core import commands as command_api
-import json
-from windc.common import cfg
-from random import choice
-
-chars = 'abcdefghklmnopqrstvwxyz2345689'
-
-
-class ActiveDirectory(Builder):
- def __init__(self, conf):
- self.name = "Active Directory Builder"
- self.type = "active_directory_service"
- self.version = 1
- self.conf = conf
-
- conf.register_group(cfg.OptGroup(name="rabbitmq"))
- conf.register_opts([
- cfg.StrOpt('host', default='10.0.0.1'),
- cfg.StrOpt('vhost', default='keero'),
- ], group="rabbitmq")
-
-
- def build(self, context, event, data, executor):
- dc = db_api.unpack_extra(data)
- if event.scope == events.SCOPE_SERVICE_CHANGE:
- LOG.info ("Got service change event. Analysing..")
- if self.do_analysis(context, event, dc):
- self.plan_changes(context, event, dc)
-
- self.submit_commands(context, event, dc, executor)
- else:
- LOG.debug("Not in my scope. Skip event.")
- pass
-
- def generate(self, length):
- return ''.join(choice(chars) for _ in range(length))
-
- def do_analysis(self, context, event, data):
- LOG.debug("Doing analysis for data: %s", data)
- print data
-
- context['zones'] = ['a1']
- if data['type'] == self.type:
- LOG.debug("It is a service which I should build.")
- datacenter_id = data['datacenter_id']
- dc = db_api.datacenter_get(context['conf'],data['tenant_id'],
- data['datacenter_id'])
- datacenter = db_api.unpack_extra(dc)
- context['stack_name']=datacenter['name']
- return True
- else:
- return False
-
- def plan_changes(self, context, event, data):
- # Here we can plan multiple command execution.
- # It might be Heat call command, then chef call command and other
- #
- LOG.debug("Plan changes...")
- self.prepare_template(context, event, data)
- # self.chef_configuration(context, event, data)
- # context['commands'].append(self.deploy_template_command(context, event, data))
- # context['commands'].append(self.chef_configuration_command(context, event, data))
- pass
-
- def prepare_template(self, context, event, data):
- LOG.debug("Prepare CloudFormation Template...")
- # template = Template()
- # template.add_description('Base template for Active Directory deployment')
- # sec_grp = template.create_security_group('Security group for AD')
- # rule = template.create_securitygroup_rule('tcp','3389','3389','0.0.0.0/0')
- # template.add_rule_to_securitygroup(sec_grp, rule)
- # template.add_resource('ADSecurityGroup', sec_grp)
- #
- # instance = template.create_instance()
- # instance_name= 'AD-DC001'
- # template.add_security_group(instance, 'ADSecurityGroup')
- # template.add_resource(instance_name, instance)
- #
- # template.add_output_value(instance_name+'-IP',{"Fn::GetAtt" : [instance_name,'PublicIp']},
- # 'Public IP for the domain controller.')
-
- print "-------------------"
- print data
- print "-------------------"
- print context
- print "********"
- try:
- print self.conf.rabbitmq.vhost
- except Exception, ex:
- print ex
- print "********"
-
- with open('data/Windows.template', 'r') as f:
- read_data = f.read()
-
- template = json.loads(read_data)
-
- instance_template = template['Resources']['InstanceTemplate']
-
- del template['Resources']['InstanceTemplate']
- context['instances'] = []
- context['template_arguments'] = {
- "KeyName": "keero-linux-keys",
- "InstanceType": "m1.medium",
- "ImageName": "ws-2012-full-agent"
- }
-
- for i in range(data['dc_count']):
- instance_name = 'dc' + str(i) + "x" + self.generate(9)
- context['instances'].append(instance_name)
- template['Resources'][instance_name] = instance_template
-
- context['template']=template
- pass
-
- def deploy_template_command(self, context, event, data, executor):
- LOG.debug("Creating CloudFormation Template deployment command...")
- #print context['template'].to_json()
- LOG.debug(context['template'])
- if not os.path.exists("templates"):
- os.mkdir("templates")
- fname = "templates/"+str(uuid.uuid4())
- print "Saving template to", fname
- f=open(fname, "w")
- f.write(json.dumps(context['template']))
- f.close()
- context['template_name']=fname
- command = command_api.Command(command_api.TEMPLATE_DEPLOYMENT_COMMAND, context)
- executor.execute(command)
-
- def chef_configuration(self, context, event, data):
- LOG.debug("Creating Chef configuration...")
- context['Role'] = 'pdc'
- pass
-
- def transform(self, path, map):
- with open(path, 'r') as f:
- read_data = f.read()
-
- template = json.loads(read_data)
- if 'Commands' in template:
- for command in template['Commands']:
- if 'Arguments' in command:
- for argument, argument_value in command['Arguments'].items():
- if isinstance(argument_value, (str, unicode)) and argument_value.startswith("@"):
- command['Arguments'][argument] = map[argument_value[1:]]
-
- return json.dumps(template)
-
- def deploy_execution_plan(self, context, event, data, executor):
- i = 0
- for instance in context['instances']:
- i += 1
- if i == 1:
- files = ["data/CreatePrimaryDC.json"]
- else:
- files = []
-
- for file in files:
- queueData = {
- "queueName" : str("%s-%s" % (context['stack_name'], instance)),
- "resultQueueName": "-execution-results",
- "body": self.transform(file, data)
- }
- command = command_api.Command(command_api.EXECUTION_PLAN_DEPLOYMENT_COMMAND, context, queueData)
- executor.execute(command)
-
-
-
-
- def chef_configuration_command(self, context, event, data):
- LOG.debug("Creating Chef configuration command...")
- command = command_api.Command(command_api.CHEF_COMMAND, context)
- return command
-
- def submit_commands(self, context, event, data, executor):
- LOG.debug("Submit commands for execution...")
- self.deploy_template_command(context, event, data, executor)
- self.deploy_execution_plan(context, event, data, executor)
- print "Commands submitted"
- pass
diff --git a/windc/windc/core/builders/DataCenter.py b/windc/windc/core/builders/DataCenter.py
deleted file mode 100644
index d425bae7..00000000
--- a/windc/windc/core/builders/DataCenter.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-import logging
-LOG = logging.getLogger(__name__)
-
-from windc.core.builder import Builder
-from windc.core import change_events as events
-
-class DataCenter(Builder):
- def __init__(self, conf):
- self.name = "Data Center Builder"
- self.type = "datacenter"
- self.version = 1
-
- def build(self, context, event, data, executor):
- if event.scope == events.SCOPE_DATACENTER_CHANGE:
- LOG.info ("Got Data Center change event. Analysing...")
- else:
- LOG.debug("Not in my scope. Skip event.")
- pass
-
diff --git a/windc/windc/core/change_events.py b/windc/windc/core/change_events.py
deleted file mode 100644
index c73ea29e..00000000
--- a/windc/windc/core/change_events.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-import logging
-LOG = logging.getLogger(__name__)
-
-from windc.core import builder_set
-from windc.core import builder
-from windc.drivers import command_executor
-#Declare events types
-
-SCOPE_SERVICE_CHANGE = "Service"
-SCOPE_DATACENTER_CHANGE = "Datacenter"
-SCOPE_VM_CHANGE = "VMChange"
-
-ACTION_ADD = "Add"
-ACTION_MODIFY = "Modify"
-ACTION_DELETE = "Delete"
-
-class Event:
- scope = None
- action = None
- previous_state = None
- def __init__(self, scope, action):
- self.scope = scope
- self.action = action
-
-def change_event(conf, event, data):
- LOG.info("Change event of type: %s ", event)
- context = builder.create_context()
- context['conf'] = conf
- executor = command_executor.Executor(conf)
- for builder_type in builder_set.builders.set:
- builder_instance = builder_set.builders.set[builder_type]
- builder_instance.build(context, event, data, executor)
-
-
-
-
-
diff --git a/windc/windc/core/service_status.py b/windc/windc/core/service_status.py
deleted file mode 100644
index 564d5a12..00000000
--- a/windc/windc/core/service_status.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2011 Piston Cloud Computing, Inc.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-"""Possible load balancer statuses."""
-
-BUILD = "BUILD"
-ACTIVE = "ACTIVE"
-PENDING_UPDATE = "PENDING_UPDATE"
-ERROR = "ERROR"
diff --git a/windc/windc/core/templates.py b/windc/windc/core/templates.py
deleted file mode 100644
index 47a7c90d..00000000
--- a/windc/windc/core/templates.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-import logging
-from windc.common.wsgi import JSONResponseSerializer
-LOG = logging.getLogger(__name__)
-
-class Template:
- def __init__(self):
- self.content = {'AWSTemplateFormatVersion':'2010-09-09', 'Description':'',
- 'Parameters':{}}
- self.content['Mappings'] = {
- "AWSInstanceType2Arch" : {
- "t1.micro" : { "Arch" : "32" },
- "m1.small" : { "Arch" : "32" },
- "m1.large" : { "Arch" : "64" },
- "m1.xlarge" : { "Arch" : "64" },
- "m2.xlarge" : { "Arch" : "64" },
- "m2.2xlarge" : { "Arch" : "64" },
- "m2.4xlarge" : { "Arch" : "64" },
- "c1.medium" : { "Arch" : "32" },
- "c1.xlarge" : { "Arch" : "64" },
- "cc1.4xlarge" : { "Arch" : "64" }
- },
- "DistroArch2AMI": {
- "F16" : { "32" : "F16-i386-cfntools", "64" : "F16-x86_64-cfntools" },
- "F17" : { "32" : "F17-i386-cfntools", "64" : "F17-x86_64-cfntools" },
- "U10" : { "32" : "U10-i386-cfntools", "64" : "U10-x86_64-cfntools" },
- "RHEL-6.1": { "32" : "rhel61-i386-cfntools", "64" : "rhel61-x86_64-cfntools" },
- "RHEL-6.2": { "32" : "rhel62-i386-cfntools", "64" : "rhel62-x86_64-cfntools" },
- "RHEL-6.3": { "32" : "rhel63-i386-cfntools", "64" : "rhel63-x86_64-cfntools" }
- }
- }
- self.content['Resources'] = {}
- self.content['Outputs'] = {}
-
- def to_json(self):
- serializer = JSONResponseSerializer()
- json = serializer.to_json(self.content)
- return json
-
-
- def empty_template(self):
- pass
-
- def add_description(self, description):
- self.content['Description'] = description
-
- def add_parameter(self, name, parameter):
- self.content['Parameters'].update({name : parameter})
-
- def add_resource(self, name, resource):
- self.content['Resources'].update({name : resource})
-
- def create_parameter(self, defult, type, decription):
- parameter = {'Default':default, 'Type':type, 'Description':description}
- return parameter
-
- def create_security_group(self, description):
- sec_grp = {'Type':'AWS::EC2::SecurityGroup'}
- sec_grp['Properties'] = {}
- sec_grp['Properties']['GroupDescription'] = description
- sec_grp['Properties']['SecurityGroupIngress'] = []
- return sec_grp
-
- def add_rule_to_securitygroup(self, grp, rule):
- grp['Properties']['SecurityGroupIngress'].append(rule)
-
- def create_securitygroup_rule(self, proto, f_port, t_port, cidr):
- rule = {'IpProtocol':proto, 'FromPort':f_port, 'ToPort':t_port,'CidrIp': cidr}
- return rule
-
- def create_instance(self):
- instance = {'Type':'AWS::EC2::Instance','Metadata':{},'Properties':{}}
- instance['Properties']['ImageId'] = 'U10-x86_64-cfntools'
- instance['Properties']['SecurityGroups']=[]
- instance['Properties']['KeyName'] = 'keero-linux-keys'
- instance['Properties']['InstanceType'] = 'm1.small'
- return instance
-
- def add_security_group(self, instance, grp_name):
- instance['Properties']['SecurityGroups'].append({'Ref': grp_name})
-
- def add_output_value(self, name, value, description):
- self.content['Outputs'].update({name:{'Value':value, 'Description':description}})
-
- def get_content(self):
- return self.content
-
-
-
-
diff --git a/windc/windc/db/__init__.py b/windc/windc/db/__init__.py
deleted file mode 100644
index dd5411af..00000000
--- a/windc/windc/db/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from windc.db.api import *
diff --git a/windc/windc/db/api.py b/windc/windc/db/api.py
deleted file mode 100644
index 1e06f780..00000000
--- a/windc/windc/db/api.py
+++ /dev/null
@@ -1,164 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2011 Piston Cloud Computing, Inc.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-"""Database storage API."""
-
-import functools
-import datetime
-
-from windc.db import models
-from windc.db.session import get_session
-from windc import exception
-
-
-
-# XXX(akscram): pack_ and unpack_ are helper methods to compatibility
-def pack_extra(model, values):
- obj_ref = model()
- pack_update(obj_ref, values)
- return obj_ref
-
-
-def unpack_extra(obj_ref):
- obj_dict = dict(obj_ref.iteritems())
- obj_dict.update(obj_dict.pop('extra', None) or {})
- return obj_dict
-
-
-def pack_update(obj_ref, values):
- obj_dict = values.copy()
- for k, v in values.iteritems():
- if k in obj_ref.keys():
- obj_ref[k] = obj_dict.pop(k)
- if obj_dict:
- if obj_ref['extra'] is not None:
- obj_ref['extra'].update(obj_dict)
- else:
- obj_ref['extra'] = obj_dict.copy()
-
-
-datacenter_pack_extra = functools.partial(pack_extra, models.DataCenter)
-service_pack_extra = functools.partial(pack_extra, models.Service)
-
-
-# Datacenter
-
-
-def datacenter_get(conf, tenant_id, datacenter_id, session=None):
- session = session or get_session(conf)
- datacenter_ref = session.query(models.DataCenter).\
- filter_by(id=datacenter_id).first()
- if not datacenter_ref:
- raise exception.DeviceNotFound(datacenter_id=datacenter_id)
- return datacenter_ref
-
-
-def datacenter_get_all(conf, tenant_id):
- session = get_session(conf)
- query = session.query(models.DataCenter).\
- filter_by(tenant_id=tenant_id)
- return query.all()
-
-
-def datacenter_create(conf, values):
- session = get_session(conf)
- with session.begin():
- datacenter_ref = models.DataCenter()
- datacenter_ref.update(values)
- session.add(datacenter_ref)
- return datacenter_ref
-
-
-def datacenter_update(conf, datacenter_id, values):
- session = get_session(conf)
- with session.begin():
- datacenter_ref = session.query(models.DataCenter).\
- filter_by(id=datacenter_id).first()
- datacenter_ref.update(values)
- return datacenter_ref
-
-
-def datacenter_destroy(conf, datacenter_id):
- session = get_session(conf)
- with session.begin():
- datacenter_ref = session.query(models.DataCenter).\
- filter_by(id=datacenter_id).first()
- session.delete(datacenter_ref)
- return datacenter_ref
-
-# Service
-
-
-def service_get(conf, service_id, tenant_id=None, session=None):
- session = session or get_session(conf)
- query = session.query(models.Service).filter_by(id=service_id)
- if tenant_id:
- query = query.filter_by(tenant_id=tenant_id)
- service_ref = query.first()
- if not service_ref:
- raise exception.ServiceNotFound(service_ref=service_ref)
- return service_ref
-
-def service_get_all_by_datacenter_id(conf, tenant_id, datacenter_id):
- session = get_session(conf)
- query = session.query(models.Service).filter_by(datacenter_id=datacenter_id)
- return query.all()
-
-def service_create(conf, values):
- session = get_session(conf)
- with session.begin():
- service_ref = models.Service()
- service_ref.update(values)
- session.add(service_ref)
- return service_ref
-
-def service_update(conf, service_id, values):
- session = get_session(conf)
- with session.begin():
- service_ref = service_get(conf, service_id, session=session)
- service_ref.update(values)
- service_ref['updated_at'] = datetime.datetime.utcnow()
- return service_ref
-
-def service_destroy(conf, service_id):
- session = get_session(conf)
- with session.begin():
- service_ref = service_get(conf, service_id, session=session)
- session.delete(service_ref)
-
-def service_get_all_by_project(conf, tenant_id):
- session = get_session(conf)
- query = session.query(models.Service).filter_by(tenant_id=tenant_id)
- return query.all()
-
-def service_get_all_by_vm_id(conf, tenant_id, vm_id):
- session = get_session(conf)
- query = session.query(models.Service).distinct().\
- filter_by(tenant_id=tenant_id).\
- filter(vm_id == vm_id)
- return query.all()
-
-def service_count_active_by_datacenter(conf, datacenter_id):
- session = get_session(conf)
- with session.begin():
- service_count = session.query(models.Service).\
- filter_by(datacenter_id=datacenter_id).\
- filter_by(status=service_status.ACTIVE).\
- count()
- return service_count
diff --git a/windc/windc/db/base.py b/windc/windc/db/base.py
deleted file mode 100644
index bf1c2f75..00000000
--- a/windc/windc/db/base.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2011 Piston Cloud Computing, Inc.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-"""Base classes and custome fields for balancer models."""
-
-import json
-
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import object_mapper
-from sqlalchemy.types import TypeDecorator
-from sqlalchemy import Text
-
-
-Base = declarative_base()
-
-
-class DictBase(object):
- def to_dict(self):
- return dict(self.iteritems())
-
- def __setitem__(self, key, value):
- setattr(self, key, value)
-
- def __getitem__(self, key):
- return getattr(self, key)
-
- def get(self, key, default=None):
- return getattr(self, key, default)
-
- def __iter__(self):
- return (col.name for col in object_mapper(self).columns)
-
- def keys(self):
- return list(self)
-
- def update(self, values):
- for key, value in values.iteritems():
- if isinstance(value, dict):
- value = value.copy()
- setattr(self, key, value)
-
- def iteritems(self):
- items = []
- for key in self:
- value = getattr(self, key)
- if isinstance(value, dict):
- value = value.copy()
- items.append((key, value))
- return iter(items)
-
-
-class JsonBlob(TypeDecorator):
-
- impl = Text
-
- def process_bind_param(self, value, dialect):
- return json.dumps(value)
-
- def process_result_value(self, value, dialect):
- return json.loads(value)
diff --git a/windc/windc/db/migrate_repo/README b/windc/windc/db/migrate_repo/README
deleted file mode 100644
index 6218f8ca..00000000
--- a/windc/windc/db/migrate_repo/README
+++ /dev/null
@@ -1,4 +0,0 @@
-This is a database migration repository.
-
-More information at
-http://code.google.com/p/sqlalchemy-migrate/
diff --git a/windc/windc/db/migrate_repo/__init__.py b/windc/windc/db/migrate_repo/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/windc/windc/db/migrate_repo/manage.py b/windc/windc/db/migrate_repo/manage.py
deleted file mode 100644
index 39fa3892..00000000
--- a/windc/windc/db/migrate_repo/manage.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-from migrate.versioning.shell import main
-
-if __name__ == '__main__':
- main(debug='False')
diff --git a/windc/windc/db/migrate_repo/migrate.cfg b/windc/windc/db/migrate_repo/migrate.cfg
deleted file mode 100644
index 15815220..00000000
--- a/windc/windc/db/migrate_repo/migrate.cfg
+++ /dev/null
@@ -1,25 +0,0 @@
-[db_settings]
-# Used to identify which repository this database is versioned under.
-# You can use the name of your project.
-repository_id=windc
-
-# The name of the database table used to track the schema version.
-# This name shouldn't already be used by your project.
-# If this is changed once a database is under version control, you'll need to
-# change the table name in each database too.
-version_table=migrate_version
-
-# When committing a change script, Migrate will attempt to generate the
-# sql for all supported databases; normally, if one of them fails - probably
-# because you don't have that database installed - it is ignored and the
-# commit continues, perhaps ending successfully.
-# Databases in this list MUST compile successfully during a commit, or the
-# entire commit will fail. List the databases your application will actually
-# be using to ensure your updates to that database work properly.
-# This must be a list; example: ['postgres','sqlite']
-required_dbs=[]
-
-# When creating new change scripts, Migrate will stamp the new script with
-# a version number. By default this is latest_version + 1. You can set this
-# to 'true' to tell Migrate to use the UTC timestamp instead.
-use_timestamp_numbering=False
diff --git a/windc/windc/db/migrate_repo/versions/001_Add_initial_tables.py b/windc/windc/db/migrate_repo/versions/001_Add_initial_tables.py
deleted file mode 100644
index 12003287..00000000
--- a/windc/windc/db/migrate_repo/versions/001_Add_initial_tables.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from sqlalchemy.schema import MetaData, Table, Column, ForeignKey
-from sqlalchemy.types import Integer, String, Text, DateTime
-
-
-meta = MetaData()
-
-Table('datacenter', meta,
- Column('id', String(32), primary_key=True),
- Column('name', String(255)),
- Column('type', String(255)),
- Column('version', String(255)),
- Column('tenant_id',String(100)),
- Column('KMS', String(80)),
- Column('WSUS', String(80)),
- Column('extra', Text()),
-)
-
-Table('service', meta,
- Column('id', String(32), primary_key=True),
- Column('datacenter_id', String(32), ForeignKey('datacenter.id')),
- Column('name', String(255)),
- Column('type', String(40)),
- Column('status', String(255)),
- Column('tenant_id', String(40)),
- Column('created_at', DateTime, nullable=False),
- Column('updated_at', DateTime, nullable=False),
- Column('deployed', String(40)),
- Column('vm_id',String(40)),
- Column('extra', Text()),
-)
-
-
-
-def upgrade(migrate_engine):
- meta.bind = migrate_engine
- meta.create_all()
-
-
-def downgrade(migrate_engine):
- meta.bind = migrate_engine
- meta.drop_all()
diff --git a/windc/windc/db/migrate_repo/versions/__init__.py b/windc/windc/db/migrate_repo/versions/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/windc/windc/db/models.py b/windc/windc/db/models.py
deleted file mode 100644
index aa755ab9..00000000
--- a/windc/windc/db/models.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-#Copyright by Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-"""SQLAlchemy models for balancer data."""
-
-import datetime
-import uuid
-
-from sqlalchemy.orm import relationship, backref
-from sqlalchemy import (Column, ForeignKey, Integer, String, Boolean,
- DateTime)
-
-from windc.db.base import Base, DictBase, JsonBlob
-
-
-def create_uuid():
- return uuid.uuid4().hex
-
-
-class DataCenter(DictBase, Base):
- """
- Represents a data center - a Windows Environment with different
- services in it.
- """
-
- __tablename__ = 'datacenter'
- id = Column(String(32), primary_key=True, default=create_uuid)
- name = Column(String(255))
- type = Column(String(255))
- version = Column(String(255))
- tenant_id = Column(String(100))
- KMS = Column(String(80))
- WSUS = Column(String(80))
- extra = Column(JsonBlob())
-
-
-class Service(DictBase, Base):
- """
- Represents an instance of service.
-
- :var name: string
- :var type: string - type of service (e.g. Active Directory)
- :var tenant_id: string - OpenStack tenant ID
- :var extra: dictionary - additional attributes
- """
-
- __tablename__ = 'service'
- id = Column(String(32), primary_key=True, default=create_uuid)
- datacenter_id = Column(String(32), ForeignKey('datacenter.id'))
- name = Column(String(255))
- type = Column(String(40))
- status = Column(String(40))
- tenant_id = Column(String(40))
- created_at = Column(DateTime, default=datetime.datetime.utcnow,
- nullable=False)
- updated_at = Column(DateTime, default=datetime.datetime.utcnow,
- onupdate=datetime.datetime.utcnow,
- nullable=False)
- deployed = Column(String(40))
- vm_id = Column(String(40))
- extra = Column(JsonBlob())
- datacenter = relationship(DataCenter,
- backref=backref('service', order_by=id),
- uselist=False)
-
-def register_models(engine):
- """Create tables for models."""
-
- Base.metadata.create_all(engine)
diff --git a/windc/windc/db/session.py b/windc/windc/db/session.py
deleted file mode 100644
index 19161768..00000000
--- a/windc/windc/db/session.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2011 Piston Cloud Computing, Inc.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-"""Session management functions."""
-
-import os
-import logging
-
-from migrate.versioning import api as versioning_api
-from migrate import exceptions as versioning_exceptions
-from sqlalchemy import create_engine
-from sqlalchemy.engine.url import make_url
-from sqlalchemy.orm import sessionmaker
-from sqlalchemy.pool import NullPool
-from sqlalchemy.exc import DisconnectionError
-
-from windc.common import cfg
-from windc.db import migrate_repo
-
-
-DB_GROUP_NAME = 'sql'
-DB_OPTIONS = (
- cfg.IntOpt('idle_timeout', default=3600),
- cfg.StrOpt('connection', default='sqlite:///windc.sqlite'),
-)
-
-MAKER = None
-ENGINE = None
-
-
-class MySQLPingListener(object):
- """
- Ensures that MySQL connections checked out of the
- pool are alive.
-
- Borrowed from:
- http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f
-
- Error codes caught:
- * 2006 MySQL server has gone away
- * 2013 Lost connection to MySQL server during query
- * 2014 Commands out of sync; you can't run this command now
- * 2045 Can't open shared memory; no answer from server (%lu)
- * 2055 Lost connection to MySQL server at '%s', system error: %d
-
- from http://dev.mysql.com/doc/refman/5.6/en/error-messages-client.html
- """
-
- def checkout(self, dbapi_con, con_record, con_proxy):
- try:
- dbapi_con.cursor().execute('select 1')
- except dbapi_con.OperationalError, ex:
- if ex.args[0] in (2006, 2013, 2014, 2045, 2055):
- logging.warn('Got mysql server has gone away: %s', ex)
- raise DisconnectionError("Database server went away")
- else:
- raise
-
-
-def get_session(conf, autocommit=True, expire_on_commit=False):
- """Return a SQLAlchemy session."""
- global MAKER
-
- if MAKER is None:
- MAKER = sessionmaker(autocommit=autocommit,
- expire_on_commit=expire_on_commit)
- engine = get_engine(conf)
- MAKER.configure(bind=engine)
- session = MAKER()
- return session
-
-
-def get_engine(conf):
- """Return a SQLAlchemy engine."""
- global ENGINE
-
- register_conf_opts(conf)
- connection_url = make_url(conf.sql.connection)
- if ENGINE is None or not ENGINE.url == connection_url:
- engine_args = {'pool_recycle': conf.sql.idle_timeout,
- 'echo': False,
- 'convert_unicode': True
- }
- if 'sqlite' in connection_url.drivername:
- engine_args['poolclass'] = NullPool
- if 'mysql' in connection_url.drivername:
- engine_args['listeners'] = [MySQLPingListener()]
- ENGINE = create_engine(conf.sql.connection, **engine_args)
- return ENGINE
-
-
-def register_conf_opts(conf, options=DB_OPTIONS, group=DB_GROUP_NAME):
- """Register database options."""
-
- conf.register_group(cfg.OptGroup(name=group))
- conf.register_opts(options, group=group)
-
-
-def sync(conf):
- register_conf_opts(conf)
- repo_path = os.path.abspath(os.path.dirname(migrate_repo.__file__))
- try:
- versioning_api.upgrade(conf.sql.connection, repo_path)
- except versioning_exceptions.DatabaseNotControlledError:
- versioning_api.version_control(conf.sql.connection, repo_path)
- versioning_api.upgrade(conf.sql.connection, repo_path)
diff --git a/windc/windc/drivers/__init__.py b/windc/windc/drivers/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/windc/windc/drivers/command_executor.py b/windc/windc/drivers/command_executor.py
deleted file mode 100644
index f675a0ab..00000000
--- a/windc/windc/drivers/command_executor.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2011 Piston Cloud Computing, Inc.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from windc.core import commands as commands_api
-from windc.drivers import openstack_heat
-from windc.drivers import windows_agent
-
-class Executor:
-
- map = {commands_api.TEMPLATE_DEPLOYMENT_COMMAND : openstack_heat.Heat}
-
- def __init__(self, conf):
- self._conf = conf
-
- def execute(self, command):
- if command.type == commands_api.TEMPLATE_DEPLOYMENT_COMMAND:
- executor = openstack_heat.Heat()
- return executor.execute(command)
- elif command.type == commands_api.EXECUTION_PLAN_DEPLOYMENT_COMMAND:
- executor = windows_agent.Agent(self._conf)
- return executor.execute(command)
-
-
diff --git a/windc/windc/drivers/openstack_heat.py b/windc/windc/drivers/openstack_heat.py
deleted file mode 100644
index dd50fda8..00000000
--- a/windc/windc/drivers/openstack_heat.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2011 Piston Cloud Computing, Inc.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-#from heatclient import Client
-from subprocess import call
-
-import logging
-LOG = logging.getLogger(__name__)
-
-class Heat:
-
- def __init__(self):
- pass
-
- def execute(self, command):
-# client = Client('1',OS_IMAGE_ENDPOINT, OS_TENANT_ID)
- LOG.debug('Calling heat script to execute template')
- arguments = ";".join(['%s=%s' % (key, value) for (key, value) in command.context['template_arguments'].items()])
- call([
- "./heat_run","stack-create",
- "-f" + command.context['template_name'],
- "-P" + arguments,
- command.context['stack_name']
- ])
- pass
-
diff --git a/windc/windc/drivers/windows_agent.py b/windc/windc/drivers/windows_agent.py
deleted file mode 100644
index e446b05e..00000000
--- a/windc/windc/drivers/windows_agent.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2011 Piston Cloud Computing, Inc.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-import traceback
-
-import puka
-
-import logging
-import sys
-
-LOG = logging.getLogger(__name__)
-
-class Agent(object):
-
- def __init__(self, conf):
- self._conf = conf
-
- def execute(self, command):
- try:
- client = puka.Client("amqp://keero:keero@%s/%s" % (
- self._conf.rabbitmq.host, self._conf.rabbitmq.vhost))
- promise = client.connect()
- client.wait(promise)
-
-
- promise = client.queue_declare(queue=command.data['queueName'], durable=True)
- client.wait(promise)
-
- promise = client.queue_declare(queue=command.data['resultQueueName'], durable=True)
- client.wait(promise)
-
- promise = client.basic_publish(exchange='', routing_key=command.data['queueName'],
- body=command.data['body'])
- client.wait(promise)
-
- consume_promise = client.basic_consume(queue=command.data['resultQueueName'])
- result = client.wait(consume_promise)
-
- result_msg = result['body']
- client.basic_ack(result)
- client.basic_cancel(consume_promise)
-
- promise = client.close()
- client.wait(promise)
-
- return result_msg
- except Exception:
- exc_type, exc_value, exc_traceback = sys.exc_info()
- print exc_type, exc_value, exc_traceback
- print traceback.format_exc()
diff --git a/windc/windc/exception.py b/windc/windc/exception.py
deleted file mode 100644
index 3144b6ed..00000000
--- a/windc/windc/exception.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2011 Piston Cloud Computing, Inc.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-"""Balancer base exception handling."""
-
-import webob.exc as exception
-
-
-class NotFound(exception.HTTPNotFound):
- message = 'Resource not found.'
-
- def __init__(self, message=None, **kwargs):
- super(NotFound, self).__init__(message)
- self.kwargs = kwargs
-
-
-class DeviceNotFound(NotFound):
- message = 'Device not found'
-
-
-class NoValidDevice(NotFound):
- message = 'Suitable device not found'
-
-
-class ServiceNotFound(NotFound):
- message = 'LoadBalancer not found'
-
-
-class DeviceConflict(exception.HTTPConflict):
- message = 'Conflict while device deleting'
-
- def __init__(self, message=None, **kwargs):
- super(DeviceConflict, self).__init__(message)
- self.kwargs = kwargs
diff --git a/windc/windc/utils.py b/windc/windc/utils.py
deleted file mode 100644
index c0531726..00000000
--- a/windc/windc/utils.py
+++ /dev/null
@@ -1,66 +0,0 @@
-import contextlib
-import functools
-import logging
-import sys
-
-import webob.exc
-
-LOG = logging.getLogger(__name__)
-
-
-def http_success_code(code):
- """Attaches response code to a method.
-
- This decorator associates a response code with a method. Note
- that the function attributes are directly manipulated; the method
- is not wrapped.
- """
-
- def decorator(func):
- func.wsgi_code = code
- return func
- return decorator
-
-
-def verify_tenant(func):
- @functools.wraps(func)
- def __inner(self, req, tenant_id, *args, **kwargs):
- if hasattr(req, 'context') and tenant_id != req.context.tenant_id:
- LOG.info('User is not authorized to access this tenant.')
- raise webob.exc.HTTPUnauthorized
- return func(self, req, tenant_id, *args, **kwargs)
- return __inner
-
-
-def require_admin(func):
- @functools.wraps(func)
- def __inner(self, req, *args, **kwargs):
- if hasattr(req, 'context') and not req.context.is_admin:
- LOG.info('User has no admin priviledges.')
- raise webob.exc.HTTPUnauthorized
- return func(self, req, *args, **kwargs)
- return __inner
-
-
-@contextlib.contextmanager
-def save_and_reraise_exception():
- """Save current exception, run some code and then re-raise.
-
- In some cases the exception context can be cleared, resulting in None
- being attempted to be reraised after an exception handler is run. This
- can happen when eventlet switches greenthreads or when running an
- exception handler, code raises and catches an exception. In both
- cases the exception context will be cleared.
-
- To work around this, we save the exception state, run handler code, and
- then re-raise the original exception. If another exception occurs, the
- saved exception is logged and the new exception is reraised.
- """
- type_, value, traceback = sys.exc_info()
- try:
- yield
- except Exception:
- LOG.error('Original exception being dropped',
- exc_info=(type_, value, traceback))
- raise
- raise type_, value, traceback
diff --git a/windc/windc/version.py b/windc/windc/version.py
deleted file mode 100644
index 943c019a..00000000
--- a/windc/windc/version.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2011 OpenStack LLC.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""Determine version of Skeleton library"""
-
-try:
- from skeleton.vcsversion import version_info
-except ImportError:
- version_info = {'branch_nick': u'LOCALBRANCH',
- 'revision_id': 'LOCALREVISION',
- 'revno': 0}
-
-SKELETON_VERSION = ['2011', '3']
-YEAR, COUNT = SKELETON_VERSION
-
-FINAL = False # This becomes true at Release Candidate time
-
-
-def canonical_version_string():
- return '.'.join([YEAR, COUNT])
-
-
-def version_string():
- if FINAL:
- return canonical_version_string()
- else:
- return '%s-dev' % (canonical_version_string(),)
-
-
-def vcs_version_string():
- return "%s:%s" % (version_info['branch_nick'], version_info['revision_id'])
-
-
-def version_string_with_vcs():
- return "%s-%s" % (canonical_version_string(), vcs_version_string())