diff --git a/.gitignore b/.gitignore index ec91120..834401e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ *.pyc *.egg-info .tox +doc/source/sourcecode diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 0000000..0b24608 --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,17 @@ +If you would like to contribute to the development of OpenStack, +you must follow the steps in the "If you're a developer, start here" +section of this page: + + http://wiki.openstack.org/HowToContribute + +Once those steps have been completed, changes to OpenStack +should be submitted for review via the Gerrit tool, following +the workflow documented at: + + http://wiki.openstack.org/GerritWorkflow + +Pull requests submitted through GitHub will be ignored. + +Bugs should be filed on Launchpad, not GitHub: + + https://bugs.launchpad.net/kwapi diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..68c771a --- /dev/null +++ b/LICENSE @@ -0,0 +1,176 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + diff --git a/MANIFEST.in b/MANIFEST.in index 18bfdc0..fd1306a 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1 +1,5 @@ -include etc/kwapi/*.conf +include AUTHORS +include CONTRIBUTING.rst +exclude .gitignore +exclude .gitreview +global-exclude *.pyc diff --git a/README b/README deleted file mode 100644 index 2de5f63..0000000 --- a/README +++ /dev/null @@ -1,5 +0,0 @@ -Energy Efficiency Architecture for XLcloud project. - -Blueprint: http://www.xlcloud.org/bin/view/XLcloudProjectManagement/EEA - -License: Apache. diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..3091678 --- /dev/null +++ b/README.rst @@ -0,0 +1,8 @@ +Kwapi +===== + +Documentation for the project can be found at: + https://kwapi.readthedocs.org + +The project home is at: + http://launchpad.net/kwapi diff --git a/bin/kwapi-api b/bin/kwapi-api deleted file mode 100755 index 05d61ce..0000000 --- a/bin/kwapi-api +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Author: François Rossigneux -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sys - -from oslo.config import cfg - -from kwapi.plugins.api import app -from kwapi.openstack.common import log - -app_opts = [ - cfg.IntOpt('api_port', - required=True, - ), -] - -cfg.CONF.register_opts(app_opts) - -if __name__ == '__main__': - cfg.CONF(sys.argv[1:], - project='kwapi', - default_config_files=['/etc/kwapi/api.conf'] - ) - log.setup('kwapi') - log.setup('keystoneclient') - root = app.make_app() - root.run(host='0.0.0.0', port=cfg.CONF.api_port) diff --git a/bin/kwapi-drivers b/bin/kwapi-drivers deleted file mode 100755 index a20ee01..0000000 --- a/bin/kwapi-drivers +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Author: François Rossigneux -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sys -import signal - -from oslo.config import cfg - -from kwapi.drivers import driver_manager -from kwapi.openstack.common import log - -if __name__ == "__main__": - cfg.CONF(sys.argv[1:], - project='kwapi', - default_config_files=['/etc/kwapi/drivers.conf']) - log.setup('kwapi') - - driver_manager.start_zmq_server() - driver_manager.load_all_drivers() - driver_manager.check_drivers_alive() - - signal.signal(signal.SIGTERM, driver_manager.signal_handler) - try: - signal.pause() - except KeyboardInterrupt: - driver_manager.terminate() diff --git a/bin/kwapi-rrd b/bin/kwapi-rrd deleted file mode 100755 index 5e73182..0000000 --- a/bin/kwapi-rrd +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Author: François Rossigneux -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sys - -from oslo.config import cfg - -from kwapi.plugins.rrd import app -from kwapi.openstack.common import log - -app_opts = [ - cfg.IntOpt('rrd_port', - required=True, - ), -] - -cfg.CONF.register_opts(app_opts) - -if __name__ == '__main__': - cfg.CONF(sys.argv[1:], - project='kwapi', - default_config_files=['/etc/kwapi/rrd.conf']) - log.setup('kwapi') - root = app.make_app() - root.run(host='0.0.0.0', port=cfg.CONF.rrd_port) diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 0000000..811013e --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,157 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + -rm -rf $(BUILDDIR)/* + +html: check-dependencies + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +.PHONY: check-dependencies +check-dependencies: + @python -c 'import sphinxcontrib.autohttp.flask' >/dev/null 2>&1 || (echo "ERROR: Missing Sphinx dependencies. Run: pip install sphinxcontrib-httpdomain" && exit 1) + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Kwapi.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Kwapi.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/Kwapi" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Kwapi" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." diff --git a/doc/source/_templates/.placeholder b/doc/source/_templates/.placeholder new file mode 100644 index 0000000..e69de29 diff --git a/doc/source/architecture.rst b/doc/source/architecture.rst new file mode 100644 index 0000000..cd3644a --- /dev/null +++ b/doc/source/architecture.rst @@ -0,0 +1,202 @@ +.. + Copyright 2013 François Rossigneux (Inria) + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +.. _architecture: + +=================== +System Architecture +=================== + +Overview of the global layered architecture: + +.. image:: ./layered_architecture.png + +Kwapi drivers +============= + +Kwapi supports different kinds of wattmeters (IPMI, Eaton PDU, Wattsup, etc). +Wattmeters communicate via IP networks or serial links. Each wattmeter has one +or more sensors (probes). Wattmeters send their values quite often (each +second), and they are listen by wattmeter drivers. Wattmeter drivers are +derived from a Driver superclass, itself derived from Thread. So drivers are +threads. At least one driver thread is instantiated for each wattmeter. Their +constructors takes as arguments a list of probe IDs, and kwargs (specific +arguments). + +Driver threads roles are: + +#. Setting up wattmeter. +#. Listening and decoding received data. +#. Calling a driver superclass method with measurements as argument. + This method appends signature to the measurements, and publishes them on the bus. + +Message format: + +.. image:: ./message_format.png + :width: 675px + +Driver manager +-------------- + +The driver manager is the loader and the checker of driver threads. It loads +all drivers according the configuration file, and checks regularly that driver +threads are alive. In case of crash, the event is logged and the driver thread +is reloaded. We can imagine that a driver will crash if a technician unplug a +wattmeter, for example. + +Bus +--- + +Currently, the internal Kwapi bus is ZeroMQ. Publishers are driver threads, and +subscribers are plugins. + +Kwapi plugins +============= + +Kwapi API plugin +---------------- + +API plugin allows Ceilometer pollster to get consumption data through a REST +API. This plugin contains a collector that computes kWh, and an API based on +Flask. + +Collector +^^^^^^^^^ + +The collector stores these values for each probe: + +.. image:: ./collector.png + :width: 675px + +Fields: + * Probe id: could be the hostname of the monitored machine. But it is a bit + more complicated because a probe can monitor several machines (PDU). + * Timestamp: is updated when a new value is received. + * KWh: is computed by taking into account the new watt value, and the elapsed + time since the previous update. It allows Ceilometer to compute average + consumption for a given duration (knowing the kWh consumed and the time + elapsed since its last check). + * Watts: offers the possibility to know instantaneous consumption of a + device, without having to query two times a probe in a small interval to + deduce it. This could be especially useful if a probe has a large refresh + interval: there is no need to wait its next value. + +No history is kept because Ceilometer already has a storage architecture. The +collector is cleaned periodically to prevent a deleted probe from being stored +indefinitely in the collector. So when a probe has not been updated for a long +time, it is deleted. + +API +^^^ + +==== =========================== ===================================== ================================================ +Verb URL Parameters Expected result +==== =========================== ===================================== ================================================ +GET /v1/ Returns detailed information about this specific + version of the API. +GET /v1/probe-ids/ Returns all known probe IDs. +GET /v1/probes/ Returns all information about all known probes. +GET /v1/probes// probe id Returns all information about this probe + (id, timestamp, kWh, W). +GET /v1/probes/// probe id, meter { timestamp, kwh, w } Returns the probe meter value. +==== =========================== ===================================== ================================================ + +Authentication +^^^^^^^^^^^^^^ + +The pollster provides a token (X-Auth-Token). The API plugin checks the token +(Keystone request), and if the token is valid, requested data are sent. +Responses are not signed because Ceilometer trusts Kwapi plugin. + +Ceilometer pollster +^^^^^^^^^^^^^^^^^^^ + +The API plugin is queried by a Ceilometer pollster. The Ceilometer pollster is +started periodically by Ceilometer central agent. It knows the Kwapi URL by +doing a Keystone request (endpoint-get). It queries probe values through Kwapi +API, using the GET /v1/probes/ call, so that it gets all detailed informations +about all probes in just one query. For each probe, it creates a counter object +and publishes it on the Ceilometer bus. + +Published counters: + * Energy (cumulative type): represents kWh. + * Power (gauge type): represents watts. + +Counter timestamps are Kwapi timestamps, so that Ceilometer doesn't store wrong +data if a probe is not updated. Ceilometer handles correctly the case where a +probe value is reset (kWh decrease), because of its cumulative type. + +Kwapi RRD plugin +---------------- + +Web interface +^^^^^^^^^^^^^ + +The visualization plugin provides a web interface with power consumption graphs. It is based on Flask and RRDtool. + +==== =========================== ========================================================= ========================================== +Verb URL Parameters Expected result +==== =========================== ========================================================= ========================================== +GET /last// period { minute, hour, day, week, month, year } Returns a webpage with a summary graph + and all probe graphs. +GET /probe// probe id Returns a webpage with all graphs about + this probe (all periods). +GET /graph// period { minute, hour, day, week, month, year } Returns a summary graph about this period. +GET /graph/// period { minute, hour, day, week, month, year }, probe id Returns a graph about this probe. +==== =========================== ========================================================= ========================================== + +Webpage with a summary graph and all probe graphs: + +.. image:: ./webpage.png + :width: 675px + +In the menu bar, you can choose the period for which you want to display graphs +(last minutes, hour, day, week, month or year). By clicking on a probe, you can +display all graphs available for this probe, with different resolutions. + +Graphs +^^^^^^ + +The summary graph shows the total power consumption (sum of all the probes). +Each colour corresponds to a probe. + +The legend contains: + * Minimum, maximum, average and last power consumption. + * Energy consumed (kWh). + * Cost. + +File sizes: + * RRD file: 10 Ko. + * Probe graph: 12 Ko. + * Summary graph: 24 Ko. + +A cache mechanism prevents graphs from being rebuilt uselessly. + +Kwapi forwarder +=============== + +The forwarder aims at decreasing the network traffic: if multiple plugins +listen the same probe, the metric is sent once on the network, and the +forwarder duplicate it and sends a copy to each listeners. The forwarder can +also be installed on a gateway machine, in order to connect isolated networks. + +The following diagram shows these two features: + +.. image:: ./bus.png + :width: 675px + +Using the forwarder is optional, and the plugins can be configured to subscribe +directly to the drivers. Direct subscribing without using the forwarder is +recommanded if the drivers and the plugins are running on the same machine. diff --git a/doc/source/bus.png b/doc/source/bus.png new file mode 100644 index 0000000..9abc7ee Binary files /dev/null and b/doc/source/bus.png differ diff --git a/doc/source/collector.png b/doc/source/collector.png new file mode 100644 index 0000000..60396ed Binary files /dev/null and b/doc/source/collector.png differ diff --git a/doc/source/conf.py b/doc/source/conf.py new file mode 100644 index 0000000..8b534cd --- /dev/null +++ b/doc/source/conf.py @@ -0,0 +1,419 @@ +# -*- coding: utf-8 -*- +# +# Kwapi documentation build configuration file, created by +# sphinx-quickstart on Thu Oct 27 11:38:59 2011. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os + +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +ROOT = os.path.abspath(os.path.join(BASE_DIR, "..", "..")) + +sys.path.insert(0, ROOT) +sys.path.insert(0, BASE_DIR) + +# This is required for ReadTheDocs.org, but isn't a bad idea anyway. +os.environ['DJANGO_SETTINGS_MODULE'] = 'openstack_dashboard.settings' + + +def write_autodoc_index(): + + def find_autodoc_modules(module_name, sourcedir): + """Return a list of modules in the SOURCE directory.""" + modlist = [] + os.chdir(os.path.join(sourcedir, module_name)) + print "SEARCHING %s" % sourcedir + for root, dirs, files in os.walk("."): + for filename in files: + if filename.endswith(".py"): + # remove the pieces of the root + elements = root.split(os.path.sep) + # replace the leading "." with the module name + elements[0] = module_name + # and get the base module name + base, extension = os.path.splitext(filename) + if not (base == "__init__"): + elements.append(base) + result = ".".join(elements) + #print result + modlist.append(result) + return modlist + + RSTDIR = os.path.abspath(os.path.join(BASE_DIR, "sourcecode")) + SRCS = {'kwapi': ROOT} + + EXCLUDED_MODULES = ['kwapi.tests'] + CURRENT_SOURCES = {} + + if not(os.path.exists(RSTDIR)): + os.mkdir(RSTDIR) + CURRENT_SOURCES[RSTDIR] = ['autoindex.rst'] + + INDEXOUT = open(os.path.join(RSTDIR, "autoindex.rst"), "w") + INDEXOUT.write("=================\n") + INDEXOUT.write("Source Code Index\n") + INDEXOUT.write("=================\n") + + for modulename, path in SRCS.items(): + sys.stdout.write("Generating source documentation for %s\n" % + modulename) + INDEXOUT.write("\n%s\n" % modulename.capitalize()) + INDEXOUT.write("%s\n" % ("=" * len(modulename),)) + INDEXOUT.write(".. toctree::\n") + INDEXOUT.write(" :maxdepth: 1\n") + INDEXOUT.write("\n") + + MOD_DIR = os.path.join(RSTDIR, modulename) + CURRENT_SOURCES[MOD_DIR] = [] + if not(os.path.exists(MOD_DIR)): + os.mkdir(MOD_DIR) + for module in find_autodoc_modules(modulename, path): + if any([module.startswith(exclude) + for exclude + in EXCLUDED_MODULES]): + print "Excluded module %s." % module + print EXCLUDED_MODULES[0] + print module.startswith(EXCLUDED_MODULES[0]) + continue + mod_path = os.path.join(path, *module.split(".")) + generated_file = os.path.join(MOD_DIR, "%s.rst" % module) + + INDEXOUT.write(" %s/%s\n" % (modulename, module)) + + # Find the __init__.py module if this is a directory + if os.path.isdir(mod_path): + source_file = ".".join((os.path.join(mod_path, "__init__"), + "py",)) + else: + source_file = ".".join((os.path.join(mod_path), "py")) + + CURRENT_SOURCES[MOD_DIR].append("%s.rst" % module) + # Only generate a new file if the source has changed or we don't + # have a doc file to begin with. + if not os.access(generated_file, os.F_OK) or \ + os.stat(generated_file).st_mtime < \ + os.stat(source_file).st_mtime: + print "Module %s updated, generating new documentation." \ + % module + FILEOUT = open(generated_file, "w") + header = "The :mod:`%s` Module" % module + FILEOUT.write("%s\n" % ("=" * len(header),)) + FILEOUT.write("%s\n" % header) + FILEOUT.write("%s\n" % ("=" * len(header),)) + FILEOUT.write(".. automodule:: %s\n" % module) + FILEOUT.write(" :members:\n") + FILEOUT.write(" :undoc-members:\n") + FILEOUT.write(" :show-inheritance:\n") + FILEOUT.write(" :noindex:\n") + FILEOUT.close() + + INDEXOUT.close() + + # Delete auto-generated .rst files for sources which no longer exist + for directory, subdirs, files in list(os.walk(RSTDIR)): + for old_file in files: + if old_file not in CURRENT_SOURCES.get(directory, []): + print "Removing outdated file for %s" % old_file + os.remove(os.path.join(directory, old_file)) + + +write_autodoc_index() + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ---------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. +# They can be extensions coming with Sphinx (named 'sphinx.ext.*') +# or your custom ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinxcontrib.autohttp.flask', + 'wsmeext.sphinxext', + 'sphinx.ext.coverage', + 'sphinx.ext.pngmath', + 'sphinx.ext.viewcode', + 'sphinxcontrib.pecanwsme.rest', + 'oslo.sphinx', +] + +wsme_protocols = ['restjson', 'restxml'] + +todo_include_todos = True + +# Add any paths that contain templates here, relative to this directory. +if os.getenv('HUDSON_PUBLISH_DOCS'): + templates_path = ['_ga', '_templates'] +else: + templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'Kwapi' +copyright = u'2013, OpenStack, LLC' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['**/#*', '**~', '**/#*#'] + +# The reST default role (used for this markup: `text`) +# to use for all documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +primary_domain = 'py' +nitpicky = False + + +# -- Options for HTML output -------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# html_theme_path = ['.'] +# html_theme = '_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "nosidebar": "false" +} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' +git_cmd = "git log --pretty=format:'%ad, commit %h' --date=local -n1" +html_last_updated_fmt = os.popen(git_cmd).read() + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'Kwapidoc' + + +# -- Options for LaTeX output ------------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + #'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass +# [howto/manual]). +latex_documents = [ + ('index', 'Kwapi.tex', u'Kwapi Documentation', + u'OpenStack, LLC', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output ------------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'kwapi', u'Kwapi Documentation', + [u'OpenStack'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ----------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'Kwapi', u'Kwapi Documentation', u'OpenStack', + 'Kwapi', 'One line description of project.', 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + + +# -- Options for Epub output -------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = u'Kwapi' +epub_author = u'OpenStack' +epub_publisher = u'OpenStack' +epub_copyright = u'2013, OpenStack' + +# The language of the text. It defaults to the language option +# or en if the language is not set. +#epub_language = '' + +# The scheme of the identifier. Typical schemes are ISBN or URL. +#epub_scheme = '' + +# The unique identifier of the text. This can be an ISBN number +# or the project homepage. +#epub_identifier = '' + +# A unique identification for the text. +#epub_uid = '' + +# A tuple containing the cover image and cover page html template filenames. +#epub_cover = () + +# HTML files that should be inserted before the pages created by sphinx. +# The format is a list of tuples containing the path and title. +#epub_pre_files = [] + +# HTML files shat should be inserted after the pages created by sphinx. +# The format is a list of tuples containing the path and title. +#epub_post_files = [] + +# A list of files that should not be packed into the epub file. +#epub_exclude_files = [] + +# The depth of the table of contents in toc.ncx. +#epub_tocdepth = 3 + +# Allow duplicate toc entries. +#epub_tocdup = True diff --git a/doc/source/configuration.rst b/doc/source/configuration.rst new file mode 100644 index 0000000..ed8427c --- /dev/null +++ b/doc/source/configuration.rst @@ -0,0 +1,161 @@ +.. + Copyright 2013 François Rossigneux (Inria) + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +===================== +Configuration Options +===================== + +Kwapi drivers specific +====================== + +The following table lists the Kwapi drivers specific options in the drivers +configuration file. Please note that Kwapi uses openstack-common extensively, +which requires that the other parameters are set appropriately. For information +we are listing the configuration elements that we use after the Kwapi drivers +specific elements. + +=============================== ==================================== ============================================================== +Parameter Default Note +=============================== ==================================== ============================================================== +probes_endpoint ipc:///tmp/kwapi-drivers Endpoint where the drivers send their measurements + ipc:// or tcp://: +enable_signing true Enable message signing between drivers and plugins +metering_secret change this or be hacked Secret value for signing metering messages +check_drivers_interval 60 Check drivers at the specified interval and restart them if + they are crashed +=============================== ==================================== ============================================================== + +The configuration file contains a section for each wattmeter. + +A sample configuration file can be found in `drivers.conf`_. + +.. _drivers.conf: https://github.com/stackforge/kwapi/blob/master/etc/kwapi/drivers.conf + +Kwapi plugin API specific +========================= + +The following table lists the Kwapi API specific options in the API +configuration file. Please note that Kwapi uses openstack-common extensively, +which requires that the other parameters are set appropriately. For information +we are listing the configuration elements that we use after the Kwapi API +specific elements. + +=============================== ==================================== ============================================================== +Parameter Default Note +=============================== ==================================== ============================================================== +api_port 5000 API port +probes_endpoint ipc:///tmp/kwapi-forwarder Endpoint where the measurements are received +signature_checking true Enable the verification of signed metering messages +driver_metering_secret change this or be hacked Secret value for verifying signed metering messages +acl_enabled true Check the Keystone tokens provided by the clients +policy_file /etc/kwapi/policy.json Policy file +cleaning_interval 300 Delete the probes that have not been updated during the + specified interval +=============================== ==================================== ============================================================== + +A sample configuration file can be found in `api.conf`_. + +.. _api.conf: https://github.com/stackforge/kwapi/blob/master/etc/kwapi/api.conf + +Keystone Middleware Authentication +---------------------------------- + +The following table lists the Keystone middleware authentication options which are used to get admin token. +Please note that these options need to be under [keystone_authtoken] section. + +=============================== ==================================== ============================================================== +Parameter Default Note +=============================== ==================================== ============================================================== +auth_host The host providing the Keystone service API endpoint for + validating and requesting tokens +auth_port 35357 The port used to validate tokens +auth_protocol https The protocol used to validate tokens +auth_uri auth_protocol://auth_host:auth_port The full URI used to validate tokens +admin_token Either this or the following three options are required. If + set, this is a single shared secret with the Keystone + configuration used to validate tokens. +admin_user User name for retrieving admin token +admin_password Password for retrieving admin token +admin_tenant_name Tenant name for retrieving admin token +signing_dir The cache directory for signing certificate +certfile Required if Keystone server requires client cert +keyfile Required if Keystone server requires client cert. This can be + the same as certfile if the certfile includes the private key. +=============================== ==================================== ============================================================== + +Kwapi plugin RRD specific +========================= + +The following table lists the Kwapi RRD specific options in the RRD +configuration file. Please note that Kwapi uses openstack-common extensively, +which requires that the other parameters are set appropriately. For information +we are listing the configuration elements that we use after the Kwapi RRD +specific elements. + +=============================== ==================================== ============================================================== +Parameter Default Note +=============================== ==================================== ============================================================== +rrd_port 8080 Port used to display webpages +probes_endpoint ipc:///tmp/kwapi-forwarder Endpoint where the measurements are received +signature_checking true Enable the verification of signed metering messages +driver_metering_secret change this or be hacked Secret value for verifying signed metering messages +png_dir /var/lib/kwapi/kwapi-png The directory where are stored PNG files +rrd_dir /var/lib/kwapi/kwapi-rrd The directory where are stored RRD files +currency € The currency symbol used in graphs +kwh_price 0.125 The kWh price used in graphs +hue 100 The hue of the graphs +max_watts 200 The maximum value of the summary graph +refresh_interval 5 The webpage auto-refresh interval +=============================== ==================================== ============================================================== + +A sample configuration file can be found in `rrd.conf`_. + +.. _rrd.conf: https://github.com/stackforge/kwapi/blob/master/etc/kwapi/rrd.conf + +General options +=============== + +The following is the list of openstack-common options that we use: + +=========================== ==================================== ============================================================== +Parameter Default Note +=========================== ==================================== ============================================================== +log_file Log output to a named file +verbose true Print more verbose output +=========================== ==================================== ============================================================== + +Kwapi forwarder specific +========================= + +The following table lists the Kwapi forwarder specific options in the forwarder +configuration file. Please note that Kwapi uses openstack-common extensively, +which requires that the other parameters are set appropriately. For information +we are listing the configuration elements that we use after the Kwapi forwarder +specific elements. + +=============================== ==================================== ============================================================== +Parameter Default Note +=============================== ==================================== ============================================================== +forwarder_endpoint ipc:///tmp/kwapi-forwarder Endpoint where the measurements are forwarded and where the + plugins subscriptions are received +probes_endpoint ipc:///tmp/kwapi-drivers Endpoint where the drivers send their measurements. + ipc:// or tcp://: +=============================== ==================================== ============================================================== + +The configuration file contains a section for each wattmeter. + +A sample configuration file can be found in `forwarder.conf`_. + +.. _forwarder.conf: https://github.com/stackforge/kwapi/blob/master/etc/kwapi/forwarder.conf diff --git a/doc/source/contributing/areas.rst b/doc/source/contributing/areas.rst new file mode 100644 index 0000000..39928d6 --- /dev/null +++ b/doc/source/contributing/areas.rst @@ -0,0 +1,39 @@ +.. + Copyright 2013 François Rossigneux (Inria) + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +=================== +Areas to Contribute +=================== + +Drivers +======= + +Kwapi aims at supporting various wattmeters. If you have a non-supported +wattmeter, you can easily contribute by writing a new one. + + +Plugins +======= + +Kwapi plugins process the metrics. You can contribute by writing new plugins to +bring new functionnalities. + +Testing +======= + +The first version of Kwapi has not yet unit tests and has not seen much +run-time in real environments. Setting up a copy of Kwapi to monitor a real +OpenStack installation or to perform some load testing would be especially +helpful. diff --git a/doc/source/contributing/index.rst b/doc/source/contributing/index.rst new file mode 100644 index 0000000..339bf42 --- /dev/null +++ b/doc/source/contributing/index.rst @@ -0,0 +1,25 @@ +.. + Copyright 2013 François Rossigneux (Inria) + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +============================ + Contributing to Kwapi +============================ + +.. toctree:: + + user + resources + areas + source diff --git a/doc/source/contributing/resources.rst b/doc/source/contributing/resources.rst new file mode 100644 index 0000000..8983260 --- /dev/null +++ b/doc/source/contributing/resources.rst @@ -0,0 +1,29 @@ +.. + Copyright 2013 François Rossigneux (Inria) + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +.. _resources: + +======================= +Project Hosting Details +======================= + +:Bug tracker: https://bugs.launchpad.net/kwapi +:Mailing list: http://lists.openstack.org/cgi-bin/mailman/listinfo/openstack-dev (prefix subjects with ``[energy]`` for faster responses) +:Code Hosting: https://github.com/stackforge/kwapi +:Code Review: https://review.openstack.org/#/q/status:open+project:stackforge/kwapi,n,z + +.. seealso:: + + * :ref:`user` diff --git a/doc/source/contributing/source.rst b/doc/source/contributing/source.rst new file mode 100644 index 0000000..7ec5c09 --- /dev/null +++ b/doc/source/contributing/source.rst @@ -0,0 +1,55 @@ +.. + Copyright 2013 François Rossigneux (Inria) + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +======================= +Working with the Source +======================= + +Setting up a Development Sandbox +================================ + +1. Set up a server or virtual machine to run OpenStack using + devstack_. + +.. _devstack: http://www.devstack.org/ + +2. Clone the kwapi project to the machine:: + + $ cd /opt/stack + $ git clone https://github.com/stackforge/kwapi.git + $ cd ./kwapi + +3. Once this is done, you need to setup the review process:: + + $ git remote add gerrit ssh://@review.openstack.org:29418/stackforge/kwapi.git + +4. If you are preparing a patch, create a topic branch and switch to + it before making any changes:: + + $ git checkout -b TOPIC-BRANCH + +Code Reviews +============ + +Kwapi uses the OpenStack review process for all code and +developer documentation contributions. Code reviews are managed +through gerrit. + +.. seealso:: + + * http://wiki.openstack.org/GerritWorkflow + * `OpenStack Gerrit instance`_. + +.. _OpenStack Gerrit instance: https://review.openstack.org/#/q/status:open+project:openstack/kwapi,n,z diff --git a/doc/source/contributing/user.rst b/doc/source/contributing/user.rst new file mode 100644 index 0000000..aaf7803 --- /dev/null +++ b/doc/source/contributing/user.rst @@ -0,0 +1,45 @@ +.. + Copyright 2013 François Rossigneux (Inria) + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +.. _user: + +=================== +Joining the Project +=================== + +Contributor License Agreement +============================= + +In order to contribute to the Kwapi project, you need to have +signed OpenStack's contributor's agreement. + +.. seealso:: + + * http://wiki.openstack.org/HowToContribute + * http://wiki.openstack.org/CLA + +LaunchPad Project +================= + +Most of the tools used for OpenStack depend on a launchpad.net ID for +authentication. After signing up for a launchpad account, join the +"openstack" team to have access to the mailing list and receive +notifications of important events. + +.. seealso:: + + * http://launchpad.net + * http://launchpad.net/kwapi + * http://launchpad.net/~openstack diff --git a/doc/source/glossary.rst b/doc/source/glossary.rst new file mode 100644 index 0000000..afa52d3 --- /dev/null +++ b/doc/source/glossary.rst @@ -0,0 +1,36 @@ +.. + Copyright 2013 François Rossigneux (Inria) + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +======== +Glossary +======== + +.. glossary:: + + driver + Software thread running querying a wattmeter and sending the results to + the plugins. + + forwarder + Component that forwards plugins subscriptions and metrics. + Used to minimize the network traffic, or to connect isolated networks + through a gateway. + + plugin + An action triggered whenever a meter reaches a certain threshold. + + probe + A wattmeter sensor. A wattmeter can have only one probe (usually the IPMI + cards), or multiple probes (usually the PDUs). diff --git a/doc/source/index.rst b/doc/source/index.rst new file mode 100644 index 0000000..5fcf349 --- /dev/null +++ b/doc/source/index.rst @@ -0,0 +1,65 @@ +.. + Copyright 2013 François Rossigneux (Inria) + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +=========================================== +Welcome to Kwapi's developer documentation! +=========================================== + +Kwapi is a framework designed for acquiring energy consumption metrics. It +allows to upload metrics from various wattmeters to Ceilometer. + +Its architecture is based on a layer of drivers, which retrieve measurements +from wattmeters, and a layer of plugins that collect and process them. The +communication between these two layers goes through a bus. In the case of a +distributed architecture, a plugin can listen to several drivers at remote +locations. + +Drivers and plugins are easily extensible to support other types of wattmeters, +and provide other services. + +What is the purpose of the project and vision for it? +===================================================== + +Kwapi could be used to do: + * Energy monitoring of data centers + * Usage-based billing + * Efficient scheduling + +It aims at supporting various wattmeters, being scalable and easily extensible. + +This documentation offers information on how Kwapi works and how to contribute +to the project. + +Table of contents +================= + +.. toctree:: + :maxdepth: 2 + + install + architecture + configuration + + contributing/index + glossary + +.. update index + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/doc/source/install.rst b/doc/source/install.rst new file mode 100644 index 0000000..1cff234 --- /dev/null +++ b/doc/source/install.rst @@ -0,0 +1,50 @@ +.. + Copyright 2013 François Rossigneux (Inria) + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +========== +Installing +========== + +Installing Kwapi +================ + +1. Clone the Kwapi git repository to the management server:: + + $ git clone https://github.com/stackforge/kwapi.git + +2. As a user with ``root`` permissions or ``sudo`` privileges, run the + Kwapi installer and copy the configuration files:: + + $ pip install kwapi + $ cp -r kwapi/etc/kwapi /etc/ + +Running Kwapi services +====================== + + Start the drivers on all the machines that can access wattmeters:: + + $ kwapi-drivers + + Start the forwarder on a remote machine (optional):: + + $ kwapi-forwarder + + Start the API plugin if you want to use Ceilometer:: + + $ kwapi-api + + Start the RRD plugin if you want to display graphs in a web browser:: + + $ kwapi-rrd diff --git a/doc/source/layered_architecture.png b/doc/source/layered_architecture.png new file mode 100644 index 0000000..26522af Binary files /dev/null and b/doc/source/layered_architecture.png differ diff --git a/doc/source/message_format.png b/doc/source/message_format.png new file mode 100644 index 0000000..9333791 Binary files /dev/null and b/doc/source/message_format.png differ diff --git a/doc/source/webpage.png b/doc/source/webpage.png new file mode 100644 index 0000000..879b53d Binary files /dev/null and b/doc/source/webpage.png differ diff --git a/kwapi/drivers/driver_manager.py b/kwapi/drivers/driver_manager.py index b7f4cc2..2b263cd 100644 --- a/kwapi/drivers/driver_manager.py +++ b/kwapi/drivers/driver_manager.py @@ -18,6 +18,7 @@ import ast import signal +import sys import thread from threading import Lock, Timer, Thread @@ -148,3 +149,21 @@ def terminate(): join_threads.append(join_thread) for join_thread in join_threads: join_thread.join() + + +def start(): + """Starts Kwapi drivers.""" + cfg.CONF(sys.argv[1:], + project='kwapi', + default_config_files=['/etc/kwapi/drivers.conf']) + log.setup('kwapi') + + start_zmq_server() + load_all_drivers() + check_drivers_alive() + + signal.signal(signal.SIGTERM, signal_handler) + try: + signal.pause() + except KeyboardInterrupt: + terminate() diff --git a/bin/kwapi-forwarder b/kwapi/forwarder.py similarity index 97% rename from bin/kwapi-forwarder rename to kwapi/forwarder.py index b2f0a49..9a02ad7 100644 --- a/bin/kwapi-forwarder +++ b/kwapi/forwarder.py @@ -64,7 +64,8 @@ def signal_handler(signum, frame): raise KeyboardInterrupt -if __name__ == '__main__': +def start(): + """Starts Kwapi forwarder.""" cfg.CONF(sys.argv[1:], project='kwapi', default_config_files=['/etc/kwapi/forwarder.conf'] diff --git a/kwapi/openstack/common/excutils.py b/kwapi/openstack/common/excutils.py new file mode 100644 index 0000000..f26f9be --- /dev/null +++ b/kwapi/openstack/common/excutils.py @@ -0,0 +1,98 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2011 OpenStack Foundation. +# Copyright 2012, Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Exception related utilities. +""" + +import logging +import sys +import time +import traceback + +from kwapi.openstack.common.gettextutils import _ # noqa + + +class save_and_reraise_exception(object): + """Save current exception, run some code and then re-raise. + + In some cases the exception context can be cleared, resulting in None + being attempted to be re-raised after an exception handler is run. This + can happen when eventlet switches greenthreads or when running an + exception handler, code raises and catches an exception. In both + cases the exception context will be cleared. + + To work around this, we save the exception state, run handler code, and + then re-raise the original exception. If another exception occurs, the + saved exception is logged and the new exception is re-raised. + + In some cases the caller may not want to re-raise the exception, and + for those circumstances this context provides a reraise flag that + can be used to suppress the exception. For example: + + except Exception: + with save_and_reraise_exception() as ctxt: + decide_if_need_reraise() + if not should_be_reraised: + ctxt.reraise = False + """ + def __init__(self): + self.reraise = True + + def __enter__(self): + self.type_, self.value, self.tb, = sys.exc_info() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is not None: + logging.error(_('Original exception being dropped: %s'), + traceback.format_exception(self.type_, + self.value, + self.tb)) + return False + if self.reraise: + raise self.type_, self.value, self.tb + + +def forever_retry_uncaught_exceptions(infunc): + def inner_func(*args, **kwargs): + last_log_time = 0 + last_exc_message = None + exc_count = 0 + while True: + try: + return infunc(*args, **kwargs) + except Exception as exc: + if exc.message == last_exc_message: + exc_count += 1 + else: + exc_count = 1 + # Do not log any more frequently than once a minute unless + # the exception message changes + cur_time = int(time.time()) + if (cur_time - last_log_time > 60 or + exc.message != last_exc_message): + logging.exception( + _('Unexpected exception occurred %d time(s)... ' + 'retrying.') % exc_count) + last_log_time = cur_time + last_exc_message = exc.message + exc_count = 0 + # This should be a very rare event. In case it isn't, do + # a sleep. + time.sleep(1) + return inner_func diff --git a/kwapi/openstack/common/fileutils.py b/kwapi/openstack/common/fileutils.py new file mode 100644 index 0000000..2283490 --- /dev/null +++ b/kwapi/openstack/common/fileutils.py @@ -0,0 +1,110 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2011 OpenStack Foundation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import contextlib +import errno +import os + +from kwapi.openstack.common import excutils +from kwapi.openstack.common.gettextutils import _ # noqa +from kwapi.openstack.common import log as logging + +LOG = logging.getLogger(__name__) + +_FILE_CACHE = {} + + +def ensure_tree(path): + """Create a directory (and any ancestor directories required) + + :param path: Directory to create + """ + try: + os.makedirs(path) + except OSError as exc: + if exc.errno == errno.EEXIST: + if not os.path.isdir(path): + raise + else: + raise + + +def read_cached_file(filename, force_reload=False): + """Read from a file if it has been modified. + + :param force_reload: Whether to reload the file. + :returns: A tuple with a boolean specifying if the data is fresh + or not. + """ + global _FILE_CACHE + + if force_reload and filename in _FILE_CACHE: + del _FILE_CACHE[filename] + + reloaded = False + mtime = os.path.getmtime(filename) + cache_info = _FILE_CACHE.setdefault(filename, {}) + + if not cache_info or mtime > cache_info.get('mtime', 0): + LOG.debug(_("Reloading cached file %s") % filename) + with open(filename) as fap: + cache_info['data'] = fap.read() + cache_info['mtime'] = mtime + reloaded = True + return (reloaded, cache_info['data']) + + +def delete_if_exists(path): + """Delete a file, but ignore file not found error. + + :param path: File to delete + """ + + try: + os.unlink(path) + except OSError as e: + if e.errno == errno.ENOENT: + return + else: + raise + + +@contextlib.contextmanager +def remove_path_on_error(path): + """Protect code that wants to operate on PATH atomically. + Any exception will cause PATH to be removed. + + :param path: File to work with + """ + try: + yield + except Exception: + with excutils.save_and_reraise_exception(): + delete_if_exists(path) + + +def file_open(*args, **kwargs): + """Open file + + see built-in file() documentation for more details + + Note: The reason this is kept in a separate module is to easily + be able to provide a stub module that doesn't alter system + state at all (for unit tests) + """ + return file(*args, **kwargs) diff --git a/kwapi/openstack/common/gettextutils.py b/kwapi/openstack/common/gettextutils.py index 4f8e677..7912b9f 100644 --- a/kwapi/openstack/common/gettextutils.py +++ b/kwapi/openstack/common/gettextutils.py @@ -2,6 +2,7 @@ # Copyright 2012 Red Hat, Inc. # All Rights Reserved. +# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -23,11 +24,236 @@ Usual usage in an openstack.common module: from kwapi.openstack.common.gettextutils import _ """ +import copy import gettext +import logging.handlers +import os +import re +import UserString +import six -t = gettext.translation('openstack-common', 'locale', fallback=True) +_localedir = os.environ.get('kwapi'.upper() + '_LOCALEDIR') +_t = gettext.translation('kwapi', localedir=_localedir, fallback=True) def _(msg): - return t.ugettext(msg) + return _t.ugettext(msg) + + +def install(domain): + """Install a _() function using the given translation domain. + + Given a translation domain, install a _() function using gettext's + install() function. + + The main difference from gettext.install() is that we allow + overriding the default localedir (e.g. /usr/share/locale) using + a translation-domain-specific environment variable (e.g. + NOVA_LOCALEDIR). + """ + gettext.install(domain, + localedir=os.environ.get(domain.upper() + '_LOCALEDIR'), + unicode=True) + + +""" +Lazy gettext functionality. + +The following is an attempt to introduce a deferred way +to do translations on messages in OpenStack. We attempt to +override the standard _() function and % (format string) operation +to build Message objects that can later be translated when we have +more information. Also included is an example LogHandler that +translates Messages to an associated locale, effectively allowing +many logs, each with their own locale. +""" + + +def get_lazy_gettext(domain): + """Assemble and return a lazy gettext function for a given domain. + + Factory method for a project/module to get a lazy gettext function + for its own translation domain (i.e. nova, glance, cinder, etc.) + """ + + def _lazy_gettext(msg): + """Create and return a Message object. + + Message encapsulates a string so that we can translate it later when + needed. + """ + return Message(msg, domain) + + return _lazy_gettext + + +class Message(UserString.UserString, object): + """Class used to encapsulate translatable messages.""" + def __init__(self, msg, domain): + # _msg is the gettext msgid and should never change + self._msg = msg + self._left_extra_msg = '' + self._right_extra_msg = '' + self.params = None + self.locale = None + self.domain = domain + + @property + def data(self): + # NOTE(mrodden): this should always resolve to a unicode string + # that best represents the state of the message currently + + localedir = os.environ.get(self.domain.upper() + '_LOCALEDIR') + if self.locale: + lang = gettext.translation(self.domain, + localedir=localedir, + languages=[self.locale], + fallback=True) + else: + # use system locale for translations + lang = gettext.translation(self.domain, + localedir=localedir, + fallback=True) + + full_msg = (self._left_extra_msg + + lang.ugettext(self._msg) + + self._right_extra_msg) + + if self.params is not None: + full_msg = full_msg % self.params + + return six.text_type(full_msg) + + def _save_dictionary_parameter(self, dict_param): + full_msg = self.data + # look for %(blah) fields in string; + # ignore %% and deal with the + # case where % is first character on the line + keys = re.findall('(?:[^%]|^)%\((\w*)\)[a-z]', full_msg) + + # if we don't find any %(blah) blocks but have a %s + if not keys and re.findall('(?:[^%]|^)%[a-z]', full_msg): + # apparently the full dictionary is the parameter + params = copy.deepcopy(dict_param) + else: + params = {} + for key in keys: + try: + params[key] = copy.deepcopy(dict_param[key]) + except TypeError: + # cast uncopyable thing to unicode string + params[key] = unicode(dict_param[key]) + + return params + + def _save_parameters(self, other): + # we check for None later to see if + # we actually have parameters to inject, + # so encapsulate if our parameter is actually None + if other is None: + self.params = (other, ) + elif isinstance(other, dict): + self.params = self._save_dictionary_parameter(other) + else: + # fallback to casting to unicode, + # this will handle the problematic python code-like + # objects that cannot be deep-copied + try: + self.params = copy.deepcopy(other) + except TypeError: + self.params = unicode(other) + + return self + + # overrides to be more string-like + def __unicode__(self): + return self.data + + def __str__(self): + return self.data.encode('utf-8') + + def __getstate__(self): + to_copy = ['_msg', '_right_extra_msg', '_left_extra_msg', + 'domain', 'params', 'locale'] + new_dict = self.__dict__.fromkeys(to_copy) + for attr in to_copy: + new_dict[attr] = copy.deepcopy(self.__dict__[attr]) + + return new_dict + + def __setstate__(self, state): + for (k, v) in state.items(): + setattr(self, k, v) + + # operator overloads + def __add__(self, other): + copied = copy.deepcopy(self) + copied._right_extra_msg += other.__str__() + return copied + + def __radd__(self, other): + copied = copy.deepcopy(self) + copied._left_extra_msg += other.__str__() + return copied + + def __mod__(self, other): + # do a format string to catch and raise + # any possible KeyErrors from missing parameters + self.data % other + copied = copy.deepcopy(self) + return copied._save_parameters(other) + + def __mul__(self, other): + return self.data * other + + def __rmul__(self, other): + return other * self.data + + def __getitem__(self, key): + return self.data[key] + + def __getslice__(self, start, end): + return self.data.__getslice__(start, end) + + def __getattribute__(self, name): + # NOTE(mrodden): handle lossy operations that we can't deal with yet + # These override the UserString implementation, since UserString + # uses our __class__ attribute to try and build a new message + # after running the inner data string through the operation. + # At that point, we have lost the gettext message id and can just + # safely resolve to a string instead. + ops = ['capitalize', 'center', 'decode', 'encode', + 'expandtabs', 'ljust', 'lstrip', 'replace', 'rjust', 'rstrip', + 'strip', 'swapcase', 'title', 'translate', 'upper', 'zfill'] + if name in ops: + return getattr(self.data, name) + else: + return UserString.UserString.__getattribute__(self, name) + + +class LocaleHandler(logging.Handler): + """Handler that can have a locale associated to translate Messages. + + A quick example of how to utilize the Message class above. + LocaleHandler takes a locale and a target logging.Handler object + to forward LogRecord objects to after translating the internal Message. + """ + + def __init__(self, locale, target): + """Initialize a LocaleHandler + + :param locale: locale to use for translating messages + :param target: logging.Handler object to forward + LogRecord objects to after translation + """ + logging.Handler.__init__(self) + self.locale = locale + self.target = target + + def emit(self, record): + if isinstance(record.msg, Message): + # set the locale and resolve to a string + record.msg.locale = self.locale + + self.target.emit(record) diff --git a/kwapi/openstack/common/importutils.py b/kwapi/openstack/common/importutils.py new file mode 100644 index 0000000..7a303f9 --- /dev/null +++ b/kwapi/openstack/common/importutils.py @@ -0,0 +1,68 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2011 OpenStack Foundation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Import related utilities and helper functions. +""" + +import sys +import traceback + + +def import_class(import_str): + """Returns a class from a string including module and class.""" + mod_str, _sep, class_str = import_str.rpartition('.') + try: + __import__(mod_str) + return getattr(sys.modules[mod_str], class_str) + except (ValueError, AttributeError): + raise ImportError('Class %s cannot be found (%s)' % + (class_str, + traceback.format_exception(*sys.exc_info()))) + + +def import_object(import_str, *args, **kwargs): + """Import a class and return an instance of it.""" + return import_class(import_str)(*args, **kwargs) + + +def import_object_ns(name_space, import_str, *args, **kwargs): + """Tries to import object from default namespace. + + Imports a class and return an instance of it, first by trying + to find the class in a default namespace, then failing back to + a full path if not found in the default namespace. + """ + import_value = "%s.%s" % (name_space, import_str) + try: + return import_class(import_value)(*args, **kwargs) + except ImportError: + return import_class(import_str)(*args, **kwargs) + + +def import_module(import_str): + """Import a module.""" + __import__(import_str) + return sys.modules[import_str] + + +def try_import(import_str, default=None): + """Try to import a module and if it fails return default.""" + try: + return import_module(import_str) + except ImportError: + return default diff --git a/kwapi/openstack/common/jsonutils.py b/kwapi/openstack/common/jsonutils.py index 901a1b4..d804088 100644 --- a/kwapi/openstack/common/jsonutils.py +++ b/kwapi/openstack/common/jsonutils.py @@ -38,11 +38,24 @@ import functools import inspect import itertools import json +import types import xmlrpclib +import netaddr +import six + from kwapi.openstack.common import timeutils +_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod, + inspect.isfunction, inspect.isgeneratorfunction, + inspect.isgenerator, inspect.istraceback, inspect.isframe, + inspect.iscode, inspect.isbuiltin, inspect.isroutine, + inspect.isabstract] + +_simple_types = (types.NoneType, int, basestring, bool, float, long) + + def to_primitive(value, convert_instances=False, convert_datetime=True, level=0, max_depth=3): """Convert a complex object into primitives. @@ -58,19 +71,32 @@ def to_primitive(value, convert_instances=False, convert_datetime=True, Therefore, convert_instances=True is lossy ... be aware. """ - nasty = [inspect.ismodule, inspect.isclass, inspect.ismethod, - inspect.isfunction, inspect.isgeneratorfunction, - inspect.isgenerator, inspect.istraceback, inspect.isframe, - inspect.iscode, inspect.isbuiltin, inspect.isroutine, - inspect.isabstract] - for test in nasty: - if test(value): - return unicode(value) + # handle obvious types first - order of basic types determined by running + # full tests on nova project, resulting in the following counts: + # 572754 + # 460353 + # 379632 + # 274610 + # 199918 + # 114200 + # 51817 + # 26164 + # 6491 + # 283 + # 19 + if isinstance(value, _simple_types): + return value - # value of itertools.count doesn't get caught by inspects - # above and results in infinite loop when list(value) is called. + if isinstance(value, datetime.datetime): + if convert_datetime: + return timeutils.strtime(value) + else: + return value + + # value of itertools.count doesn't get caught by nasty_type_tests + # and results in infinite loop when list(value) is called. if type(value) == itertools.count: - return unicode(value) + return six.text_type(value) # FIXME(vish): Workaround for LP bug 852095. Without this workaround, # tests that raise an exception in a mocked method that @@ -91,17 +117,18 @@ def to_primitive(value, convert_instances=False, convert_datetime=True, convert_datetime=convert_datetime, level=level, max_depth=max_depth) + if isinstance(value, dict): + return dict((k, recursive(v)) for k, v in value.iteritems()) + elif isinstance(value, (list, tuple)): + return [recursive(lv) for lv in value] + # It's not clear why xmlrpclib created their own DateTime type, but # for our purposes, make it a datetime type which is explicitly # handled if isinstance(value, xmlrpclib.DateTime): value = datetime.datetime(*tuple(value.timetuple())[:6]) - if isinstance(value, (list, tuple)): - return [recursive(v) for v in value] - elif isinstance(value, dict): - return dict((k, recursive(v)) for k, v in value.iteritems()) - elif convert_datetime and isinstance(value, datetime.datetime): + if convert_datetime and isinstance(value, datetime.datetime): return timeutils.strtime(value) elif hasattr(value, 'iteritems'): return recursive(dict(value.iteritems()), level=level + 1) @@ -111,12 +138,16 @@ def to_primitive(value, convert_instances=False, convert_datetime=True, # Likely an instance of something. Watch for cycles. # Ignore class member vars. return recursive(value.__dict__, level=level + 1) + elif isinstance(value, netaddr.IPAddress): + return six.text_type(value) else: + if any(test(value) for test in _nasty_type_tests): + return six.text_type(value) return value except TypeError: # Class objects are tricky since they may define something like # __iter__ defined but it isn't callable as list(). - return unicode(value) + return six.text_type(value) def dumps(value, default=to_primitive, **kwargs): diff --git a/kwapi/openstack/common/local.py b/kwapi/openstack/common/local.py index 8bdc837..f1bfc82 100644 --- a/kwapi/openstack/common/local.py +++ b/kwapi/openstack/common/local.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/kwapi/openstack/common/log.py b/kwapi/openstack/common/log.py index f8dba33..9d07c05 100644 --- a/kwapi/openstack/common/log.py +++ b/kwapi/openstack/common/log.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. @@ -29,26 +29,24 @@ It also allows setting of formatting information through conf. """ -import cStringIO import inspect import itertools import logging import logging.config import logging.handlers import os -import stat import sys import traceback from oslo.config import cfg +from six import moves -from kwapi.openstack.common.gettextutils import _ +from kwapi.openstack.common.gettextutils import _ # noqa +from kwapi.openstack.common import importutils from kwapi.openstack.common import jsonutils from kwapi.openstack.common import local -from kwapi.openstack.common import notifier -_DEFAULT_LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s" _DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" common_cli_opts = [ @@ -73,11 +71,14 @@ logging_cli_opts = [ 'documentation for details on logging configuration ' 'files.'), cfg.StrOpt('log-format', - default=_DEFAULT_LOG_FORMAT, + default=None, metavar='FORMAT', - help='A logging.Formatter log message format string which may ' + help='DEPRECATED. ' + 'A logging.Formatter log message format string which may ' 'use any of the available logging.LogRecord attributes. ' - 'Default: %(default)s'), + 'This option is deprecated. Please use ' + 'logging_context_format_string and ' + 'logging_default_format_string instead.'), cfg.StrOpt('log-date-format', default=_DEFAULT_LOG_DATE_FORMAT, metavar='DATE_FORMAT', @@ -87,11 +88,11 @@ logging_cli_opts = [ metavar='PATH', deprecated_name='logfile', help='(Optional) Name of log file to output to. ' - 'If not set, logging will go to stdout.'), + 'If no default is set, logging will go to stdout.'), cfg.StrOpt('log-dir', deprecated_name='logdir', - help='(Optional) The directory to keep log files in ' - '(will be prepended to --log-file)'), + help='(Optional) The base directory used for relative ' + '--log-file paths'), cfg.BoolOpt('use-syslog', default=False, help='Use syslog for logging.'), @@ -103,17 +104,14 @@ logging_cli_opts = [ generic_log_opts = [ cfg.BoolOpt('use_stderr', default=True, - help='Log output to standard error'), - cfg.StrOpt('logfile_mode', - default='0644', - help='Default file mode used when creating log files'), + help='Log output to standard error') ] log_opts = [ cfg.StrOpt('logging_context_format_string', - default='%(asctime)s.%(msecs)03d %(levelname)s %(name)s ' - '[%(request_id)s %(user)s %(tenant)s] %(instance)s' - '%(message)s', + default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' + '%(name)s [%(request_id)s %(user)s %(tenant)s] ' + '%(instance)s%(message)s', help='format string to use for log messages with context'), cfg.StrOpt('logging_default_format_string', default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' @@ -210,7 +208,27 @@ def _get_log_file_path(binary=None): return '%s.log' % (os.path.join(logdir, binary),) -class ContextAdapter(logging.LoggerAdapter): +class BaseLoggerAdapter(logging.LoggerAdapter): + + def audit(self, msg, *args, **kwargs): + self.log(logging.AUDIT, msg, *args, **kwargs) + + +class LazyAdapter(BaseLoggerAdapter): + def __init__(self, name='unknown', version='unknown'): + self._logger = None + self.extra = {} + self.name = name + self.version = version + + @property + def logger(self): + if not self._logger: + self._logger = getLogger(self.name, self.version) + return self._logger + + +class ContextAdapter(BaseLoggerAdapter): warn = logging.LoggerAdapter.warning def __init__(self, logger, project_name, version_string): @@ -218,8 +236,9 @@ class ContextAdapter(logging.LoggerAdapter): self.project = project_name self.version = version_string - def audit(self, msg, *args, **kwargs): - self.log(logging.AUDIT, msg, *args, **kwargs) + @property + def handlers(self): + return self.logger.handlers def deprecated(self, msg, *args, **kwargs): stdmsg = _("Deprecated: %s") % msg @@ -303,17 +322,6 @@ class JSONFormatter(logging.Formatter): return jsonutils.dumps(message) -class PublishErrorsHandler(logging.Handler): - def emit(self, record): - if ('kwapi.openstack.common.notifier.log_notifier' in - CONF.notification_driver): - return - notifier.api.notify(None, 'error.publisher', - 'error_notification', - notifier.api.ERROR, - dict(error=record.msg)) - - def _create_logging_excepthook(product_name): def logging_excepthook(type, value, tb): extra = {} @@ -323,12 +331,32 @@ def _create_logging_excepthook(product_name): return logging_excepthook +class LogConfigError(Exception): + + message = _('Error loading logging config %(log_config)s: %(err_msg)s') + + def __init__(self, log_config, err_msg): + self.log_config = log_config + self.err_msg = err_msg + + def __str__(self): + return self.message % dict(log_config=self.log_config, + err_msg=self.err_msg) + + +def _load_log_config(log_config): + try: + logging.config.fileConfig(log_config) + except moves.configparser.Error as exc: + raise LogConfigError(log_config, str(exc)) + + def setup(product_name): """Setup logging.""" if CONF.log_config: - logging.config.fileConfig(CONF.log_config) + _load_log_config(CONF.log_config) else: - _setup_logging_from_conf(product_name) + _setup_logging_from_conf() sys.excepthook = _create_logging_excepthook(product_name) @@ -362,8 +390,8 @@ def _find_facility_from_conf(): return facility -def _setup_logging_from_conf(product_name): - log_root = getLogger(product_name).logger +def _setup_logging_from_conf(): + log_root = getLogger(None).logger for handler in log_root.handlers: log_root.removeHandler(handler) @@ -378,11 +406,6 @@ def _setup_logging_from_conf(product_name): filelog = logging.handlers.WatchedFileHandler(logpath) log_root.addHandler(filelog) - mode = int(CONF.logfile_mode, 8) - st = os.stat(logpath) - if st.st_mode != (stat.S_IFREG | mode): - os.chmod(logpath, mode) - if CONF.use_stderr: streamlog = ColorHandler() log_root.addHandler(streamlog) @@ -394,14 +417,22 @@ def _setup_logging_from_conf(product_name): log_root.addHandler(streamlog) if CONF.publish_errors: - log_root.addHandler(PublishErrorsHandler(logging.ERROR)) + handler = importutils.import_object( + "kwapi.openstack.common.log_handler.PublishErrorsHandler", + logging.ERROR) + log_root.addHandler(handler) + datefmt = CONF.log_date_format for handler in log_root.handlers: - datefmt = CONF.log_date_format + # NOTE(alaski): CONF.log_format overrides everything currently. This + # should be deprecated in favor of context aware formatting. if CONF.log_format: handler.setFormatter(logging.Formatter(fmt=CONF.log_format, datefmt=datefmt)) - handler.setFormatter(LegacyFormatter(datefmt=datefmt)) + log_root.info('Deprecated: log_format is now deprecated and will ' + 'be removed in the next release') + else: + handler.setFormatter(ContextFormatter(datefmt=datefmt)) if CONF.debug: log_root.setLevel(logging.DEBUG) @@ -410,14 +441,11 @@ def _setup_logging_from_conf(product_name): else: log_root.setLevel(logging.WARNING) - level = logging.NOTSET for pair in CONF.default_log_levels: mod, _sep, level_name = pair.partition('=') level = logging.getLevelName(level_name) logger = logging.getLogger(mod) logger.setLevel(level) - for handler in log_root.handlers: - logger.addHandler(handler) _loggers = {} @@ -430,6 +458,16 @@ def getLogger(name='unknown', version='unknown'): return _loggers[name] +def getLazyLogger(name='unknown', version='unknown'): + """Returns lazy logger. + + Creates a pass-through logger that does not create the real logger + until it is really needed and delegates all calls to the real logger + once it is created. + """ + return LazyAdapter(name, version) + + class WritableLogger(object): """A thin wrapper that responds to `write` and logs.""" @@ -441,7 +479,7 @@ class WritableLogger(object): self.logger.log(self.level, msg) -class LegacyFormatter(logging.Formatter): +class ContextFormatter(logging.Formatter): """A context.RequestContext aware formatter configured through flags. The flags used to set format strings are: logging_context_format_string @@ -482,7 +520,7 @@ class LegacyFormatter(logging.Formatter): if not record: return logging.Formatter.formatException(self, exc_info) - stringbuffer = cStringIO.StringIO() + stringbuffer = moves.StringIO() traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], None, stringbuffer) lines = stringbuffer.getvalue().split('\n') diff --git a/kwapi/openstack/common/notifier/__init__.py b/kwapi/openstack/common/notifier/__init__.py deleted file mode 100644 index 482d54e..0000000 --- a/kwapi/openstack/common/notifier/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/kwapi/openstack/common/notifier/api.py b/kwapi/openstack/common/notifier/api.py deleted file mode 100644 index d25dbe9..0000000 --- a/kwapi/openstack/common/notifier/api.py +++ /dev/null @@ -1,183 +0,0 @@ -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from oslo.config import cfg - -from kwapi.openstack.common import context -from kwapi.openstack.common.gettextutils import _ -from kwapi.openstack.common import importutils -from kwapi.openstack.common import jsonutils -from kwapi.openstack.common import log as logging -from kwapi.openstack.common import timeutils - - -LOG = logging.getLogger(__name__) - -notifier_opts = [ - cfg.MultiStrOpt('notification_driver', - default=[], - deprecated_name='list_notifier_drivers', - help='Driver or drivers to handle sending notifications'), - cfg.StrOpt('default_notification_level', - default='INFO', - help='Default notification level for outgoing notifications'), - cfg.StrOpt('default_publisher_id', - default='$host', - help='Default publisher_id for outgoing notifications'), -] - -CONF = cfg.CONF -CONF.register_opts(notifier_opts) - -WARN = 'WARN' -INFO = 'INFO' -ERROR = 'ERROR' -CRITICAL = 'CRITICAL' -DEBUG = 'DEBUG' - -log_levels = (DEBUG, WARN, INFO, ERROR, CRITICAL) - - -class BadPriorityException(Exception): - pass - - -def notify_decorator(name, fn): - """ decorator for notify which is used from utils.monkey_patch() - - :param name: name of the function - :param function: - object of the function - :returns: function -- decorated function - - """ - def wrapped_func(*args, **kwarg): - body = {} - body['args'] = [] - body['kwarg'] = {} - for arg in args: - body['args'].append(arg) - for key in kwarg: - body['kwarg'][key] = kwarg[key] - - ctxt = context.get_context_from_function_and_args(fn, args, kwarg) - notify(ctxt, - CONF.default_publisher_id, - name, - CONF.default_notification_level, - body) - return fn(*args, **kwarg) - return wrapped_func - - -def publisher_id(service, host=None): - if not host: - host = CONF.host - return "%s.%s" % (service, host) - - -def notify(context, publisher_id, event_type, priority, payload): - """Sends a notification using the specified driver - - :param publisher_id: the source worker_type.host of the message - :param event_type: the literal type of event (ex. Instance Creation) - :param priority: patterned after the enumeration of Python logging - levels in the set (DEBUG, WARN, INFO, ERROR, CRITICAL) - :param payload: A python dictionary of attributes - - Outgoing message format includes the above parameters, and appends the - following: - - message_id - a UUID representing the id for this notification - - timestamp - the GMT timestamp the notification was sent at - - The composite message will be constructed as a dictionary of the above - attributes, which will then be sent via the transport mechanism defined - by the driver. - - Message example:: - - {'message_id': str(uuid.uuid4()), - 'publisher_id': 'compute.host1', - 'timestamp': timeutils.utcnow(), - 'priority': 'WARN', - 'event_type': 'compute.create_instance', - 'payload': {'instance_id': 12, ... }} - - """ - if priority not in log_levels: - raise BadPriorityException( - _('%s not in valid priorities') % priority) - - # Ensure everything is JSON serializable. - payload = jsonutils.to_primitive(payload, convert_instances=True) - - msg = dict(message_id=str(uuid.uuid4()), - publisher_id=publisher_id, - event_type=event_type, - priority=priority, - payload=payload, - timestamp=str(timeutils.utcnow())) - - for driver in _get_drivers(): - try: - driver.notify(context, msg) - except Exception as e: - LOG.exception(_("Problem '%(e)s' attempting to " - "send to notification system. " - "Payload=%(payload)s") - % dict(e=e, payload=payload)) - - -_drivers = None - - -def _get_drivers(): - """Instantiate, cache, and return drivers based on the CONF.""" - global _drivers - if _drivers is None: - _drivers = {} - for notification_driver in CONF.notification_driver: - add_driver(notification_driver) - - return _drivers.values() - - -def add_driver(notification_driver): - """Add a notification driver at runtime.""" - # Make sure the driver list is initialized. - _get_drivers() - if isinstance(notification_driver, basestring): - # Load and add - try: - driver = importutils.import_module(notification_driver) - _drivers[notification_driver] = driver - except ImportError: - LOG.exception(_("Failed to load notifier %s. " - "These notifications will not be sent.") % - notification_driver) - else: - # Driver is already loaded; just add the object. - _drivers[notification_driver] = notification_driver - - -def _reset_drivers(): - """Used by unit tests to reset the drivers.""" - global _drivers - _drivers = None diff --git a/kwapi/openstack/common/notifier/log_notifier.py b/kwapi/openstack/common/notifier/log_notifier.py deleted file mode 100644 index 84b4a9b..0000000 --- a/kwapi/openstack/common/notifier/log_notifier.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo.config import cfg - -from kwapi.openstack.common import jsonutils -from kwapi.openstack.common import log as logging - - -CONF = cfg.CONF - - -def notify(_context, message): - """Notifies the recipient of the desired event given the model. - Log notifications using openstack's default logging system""" - - priority = message.get('priority', - CONF.default_notification_level) - priority = priority.lower() - logger = logging.getLogger( - 'kwapi.openstack.common.notification.%s' % - message['event_type']) - getattr(logger, priority)(jsonutils.dumps(message)) diff --git a/kwapi/openstack/common/notifier/no_op_notifier.py b/kwapi/openstack/common/notifier/no_op_notifier.py deleted file mode 100644 index ee1ddbd..0000000 --- a/kwapi/openstack/common/notifier/no_op_notifier.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -def notify(_context, message): - """Notifies the recipient of the desired event given the model""" - pass diff --git a/kwapi/openstack/common/notifier/rabbit_notifier.py b/kwapi/openstack/common/notifier/rabbit_notifier.py deleted file mode 100644 index 1ceebb3..0000000 --- a/kwapi/openstack/common/notifier/rabbit_notifier.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2012 Red Hat, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from kwapi.openstack.common.gettextutils import _ -from kwapi.openstack.common import log as logging -from kwapi.openstack.common.notifier import rpc_notifier - -LOG = logging.getLogger(__name__) - - -def notify(context, message): - """Deprecated in Grizzly. Please use rpc_notifier instead.""" - - LOG.deprecated(_("The rabbit_notifier is now deprecated." - " Please use rpc_notifier instead.")) - rpc_notifier.notify(context, message) diff --git a/kwapi/openstack/common/notifier/rpc_notifier.py b/kwapi/openstack/common/notifier/rpc_notifier.py deleted file mode 100644 index 98c41d1..0000000 --- a/kwapi/openstack/common/notifier/rpc_notifier.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo.config import cfg - -from kwapi.openstack.common import context as req_context -from kwapi.openstack.common.gettextutils import _ -from kwapi.openstack.common import log as logging -from kwapi.openstack.common import rpc - -LOG = logging.getLogger(__name__) - -notification_topic_opt = cfg.ListOpt( - 'notification_topics', default=['notifications', ], - help='AMQP topic used for openstack notifications') - -CONF = cfg.CONF -CONF.register_opt(notification_topic_opt) - - -def notify(context, message): - """Sends a notification via RPC""" - if not context: - context = req_context.get_admin_context() - priority = message.get('priority', - CONF.default_notification_level) - priority = priority.lower() - for topic in CONF.notification_topics: - topic = '%s.%s' % (topic, priority) - try: - rpc.notify(context, topic, message) - except Exception: - LOG.exception(_("Could not send notification to %(topic)s. " - "Payload=%(message)s"), locals()) diff --git a/kwapi/openstack/common/notifier/rpc_notifier2.py b/kwapi/openstack/common/notifier/rpc_notifier2.py deleted file mode 100644 index a6e6650..0000000 --- a/kwapi/openstack/common/notifier/rpc_notifier2.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -'''messaging based notification driver, with message envelopes''' - -from oslo.config import cfg - -from kwapi.openstack.common import context as req_context -from kwapi.openstack.common.gettextutils import _ -from kwapi.openstack.common import log as logging -from kwapi.openstack.common import rpc - -LOG = logging.getLogger(__name__) - -notification_topic_opt = cfg.ListOpt( - 'topics', default=['notifications', ], - help='AMQP topic(s) used for openstack notifications') - -opt_group = cfg.OptGroup(name='rpc_notifier2', - title='Options for rpc_notifier2') - -CONF = cfg.CONF -CONF.register_group(opt_group) -CONF.register_opt(notification_topic_opt, opt_group) - - -def notify(context, message): - """Sends a notification via RPC""" - if not context: - context = req_context.get_admin_context() - priority = message.get('priority', - CONF.default_notification_level) - priority = priority.lower() - for topic in CONF.rpc_notifier2.topics: - topic = '%s.%s' % (topic, priority) - try: - rpc.notify(context, topic, message, envelope=True) - except Exception: - LOG.exception(_("Could not send notification to %(topic)s. " - "Payload=%(message)s"), locals()) diff --git a/kwapi/openstack/common/notifier/test_notifier.py b/kwapi/openstack/common/notifier/test_notifier.py deleted file mode 100644 index 5e34880..0000000 --- a/kwapi/openstack/common/notifier/test_notifier.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -NOTIFICATIONS = [] - - -def notify(_context, message): - """Test notifier, stores notifications in memory for unittests.""" - NOTIFICATIONS.append(message) diff --git a/kwapi/openstack/common/policy.py b/kwapi/openstack/common/policy.py index 2e3fb7a..8a24cc5 100644 --- a/kwapi/openstack/common/policy.py +++ b/kwapi/openstack/common/policy.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright (c) 2012 OpenStack, LLC. +# Copyright (c) 2012 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may @@ -57,33 +57,48 @@ as it allows particular rules to be explicitly disabled. """ import abc -import logging import re import urllib - import urllib2 -from kwapi.openstack.common.gettextutils import _ -from kwapi.openstack.common import jsonutils +from oslo.config import cfg +import six +from kwapi.openstack.common import fileutils +from kwapi.openstack.common.gettextutils import _ # noqa +from kwapi.openstack.common import jsonutils +from kwapi.openstack.common import log as logging + +policy_opts = [ + cfg.StrOpt('policy_file', + default='policy.json', + help=_('JSON file containing policy')), + cfg.StrOpt('policy_default_rule', + default='default', + help=_('Rule enforced when requested rule is not found')), +] + +CONF = cfg.CONF +CONF.register_opts(policy_opts) LOG = logging.getLogger(__name__) - -_rules = None _checks = {} +class PolicyNotAuthorized(Exception): + + def __init__(self, rule): + msg = _("Policy doesn't allow %s to be performed.") % rule + super(PolicyNotAuthorized, self).__init__(msg) + + class Rules(dict): - """ - A store for rules. Handles the default_rule setting directly. - """ + """A store for rules. Handles the default_rule setting directly.""" @classmethod def load_json(cls, data, default_rule=None): - """ - Allow loading of JSON rule data. - """ + """Allow loading of JSON rule data.""" # Suck in the JSON data and parse the rules rules = dict((k, parse_rule(v)) for k, v in @@ -123,87 +138,157 @@ class Rules(dict): return jsonutils.dumps(out_rules, indent=4) -# Really have to figure out a way to deprecate this -def set_rules(rules): - """Set the rules in use for policy checks.""" +class Enforcer(object): + """Responsible for loading and enforcing rules. - global _rules - - _rules = rules - - -# Ditto -def reset(): - """Clear the rules used for policy checks.""" - - global _rules - - _rules = None - - -def check(rule, target, creds, exc=None, *args, **kwargs): - """ - Checks authorization of a rule against the target and credentials. - - :param rule: The rule to evaluate. - :param target: As much information about the object being operated - on as possible, as a dictionary. - :param creds: As much information about the user performing the - action as possible, as a dictionary. - :param exc: Class of the exception to raise if the check fails. - Any remaining arguments passed to check() (both - positional and keyword arguments) will be passed to - the exception class. If exc is not provided, returns - False. - - :return: Returns False if the policy does not allow the action and - exc is not provided; otherwise, returns a value that - evaluates to True. Note: for rules using the "case" - expression, this True value will be the specified string - from the expression. + :param policy_file: Custom policy file to use, if none is + specified, `CONF.policy_file` will be + used. + :param rules: Default dictionary / Rules to use. It will be + considered just in the first instantiation. If + `load_rules(True)`, `clear()` or `set_rules(True)` + is called this will be overwritten. + :param default_rule: Default rule to use, CONF.default_rule will + be used if none is specified. """ - # Allow the rule to be a Check tree - if isinstance(rule, BaseCheck): - result = rule(target, creds) - elif not _rules: - # No rules to reference means we're going to fail closed - result = False - else: - try: - # Evaluate the rule - result = _rules[rule](target, creds) - except KeyError: - # If the rule doesn't exist, fail closed + def __init__(self, policy_file=None, rules=None, default_rule=None): + self.rules = Rules(rules) + self.default_rule = default_rule or CONF.policy_default_rule + + self.policy_path = None + self.policy_file = policy_file or CONF.policy_file + + def set_rules(self, rules, overwrite=True): + """Create a new Rules object based on the provided dict of rules. + + :param rules: New rules to use. It should be an instance of dict. + :param overwrite: Whether to overwrite current rules or update them + with the new rules. + """ + + if not isinstance(rules, dict): + raise TypeError(_("Rules must be an instance of dict or Rules, " + "got %s instead") % type(rules)) + + if overwrite: + self.rules = Rules(rules) + else: + self.update(rules) + + def clear(self): + """Clears Enforcer rules, policy's cache and policy's path.""" + self.set_rules({}) + self.policy_path = None + + def load_rules(self, force_reload=False): + """Loads policy_path's rules. + + Policy file is cached and will be reloaded if modified. + + :param force_reload: Whether to overwrite current rules. + """ + + if not self.policy_path: + self.policy_path = self._get_policy_path() + + reloaded, data = fileutils.read_cached_file(self.policy_path, + force_reload=force_reload) + + if reloaded: + rules = Rules.load_json(data, self.default_rule) + self.set_rules(rules) + LOG.debug(_("Rules successfully reloaded")) + + def _get_policy_path(self): + """Locate the policy json data file. + + :param policy_file: Custom policy file to locate. + + :returns: The policy path + + :raises: ConfigFilesNotFoundError if the file couldn't + be located. + """ + policy_file = CONF.find_file(self.policy_file) + + if policy_file: + return policy_file + + raise cfg.ConfigFilesNotFoundError(path=CONF.policy_file) + + def enforce(self, rule, target, creds, do_raise=False, + exc=None, *args, **kwargs): + """Checks authorization of a rule against the target and credentials. + + :param rule: A string or BaseCheck instance specifying the rule + to evaluate. + :param target: As much information about the object being operated + on as possible, as a dictionary. + :param creds: As much information about the user performing the + action as possible, as a dictionary. + :param do_raise: Whether to raise an exception or not if check + fails. + :param exc: Class of the exception to raise if the check fails. + Any remaining arguments passed to check() (both + positional and keyword arguments) will be passed to + the exception class. If not specified, PolicyNotAuthorized + will be used. + + :return: Returns False if the policy does not allow the action and + exc is not provided; otherwise, returns a value that + evaluates to True. Note: for rules using the "case" + expression, this True value will be the specified string + from the expression. + """ + + # NOTE(flaper87): Not logging target or creds to avoid + # potential security issues. + LOG.debug(_("Rule %s will be now enforced") % rule) + + self.load_rules() + + # Allow the rule to be a Check tree + if isinstance(rule, BaseCheck): + result = rule(target, creds, self) + elif not self.rules: + # No rules to reference means we're going to fail closed result = False + else: + try: + # Evaluate the rule + result = self.rules[rule](target, creds, self) + except KeyError: + LOG.debug(_("Rule [%s] doesn't exist") % rule) + # If the rule doesn't exist, fail closed + result = False - # If it is False, raise the exception if requested - if exc and result is False: - raise exc(*args, **kwargs) + # If it is False, raise the exception if requested + if do_raise and not result: + if exc: + raise exc(*args, **kwargs) - return result + raise PolicyNotAuthorized(rule) + + return result class BaseCheck(object): - """ - Abstract base class for Check classes. - """ + """Abstract base class for Check classes.""" __metaclass__ = abc.ABCMeta @abc.abstractmethod def __str__(self): - """ - Retrieve a string representation of the Check tree rooted at - this node. - """ + """String representation of the Check tree rooted at this node.""" pass @abc.abstractmethod - def __call__(self, target, cred): - """ - Perform the check. Returns False to reject the access or a + def __call__(self, target, cred, enforcer): + """Triggers if instance of the class is called. + + Performs the check. Returns False to reject the access or a true value (not necessary True) to accept the access. """ @@ -211,44 +296,39 @@ class BaseCheck(object): class FalseCheck(BaseCheck): - """ - A policy check that always returns False (disallow). - """ + """A policy check that always returns False (disallow).""" def __str__(self): """Return a string representation of this check.""" return "!" - def __call__(self, target, cred): + def __call__(self, target, cred, enforcer): """Check the policy.""" return False class TrueCheck(BaseCheck): - """ - A policy check that always returns True (allow). - """ + """A policy check that always returns True (allow).""" def __str__(self): """Return a string representation of this check.""" return "@" - def __call__(self, target, cred): + def __call__(self, target, cred, enforcer): """Check the policy.""" return True class Check(BaseCheck): - """ - A base class to allow for user-defined policy checks. - """ + """A base class to allow for user-defined policy checks.""" def __init__(self, kind, match): - """ + """Initiates Check instance. + :param kind: The kind of the check, i.e., the field before the ':'. :param match: The match of the check, i.e., the field after @@ -265,14 +345,13 @@ class Check(BaseCheck): class NotCheck(BaseCheck): - """ + """Implements the "not" logical operator. + A policy check that inverts the result of another policy check. - Implements the "not" operator. """ def __init__(self, rule): - """ - Initialize the 'not' check. + """Initialize the 'not' check. :param rule: The rule to negate. Must be a Check. """ @@ -284,24 +363,23 @@ class NotCheck(BaseCheck): return "not %s" % self.rule - def __call__(self, target, cred): - """ - Check the policy. Returns the logical inverse of the wrapped - check. + def __call__(self, target, cred, enforcer): + """Check the policy. + + Returns the logical inverse of the wrapped check. """ - return not self.rule(target, cred) + return not self.rule(target, cred, enforcer) class AndCheck(BaseCheck): - """ - A policy check that requires that a list of other checks all - return True. Implements the "and" operator. + """Implements the "and" logical operator. + + A policy check that requires that a list of other checks all return True. """ def __init__(self, rules): - """ - Initialize the 'and' check. + """Initialize the 'and' check. :param rules: A list of rules that will be tested. """ @@ -313,10 +391,10 @@ class AndCheck(BaseCheck): return "(%s)" % ' and '.join(str(r) for r in self.rules) - def __call__(self, target, cred): - """ - Check the policy. Requires that all rules accept in order to - return True. + def __call__(self, target, cred, enforcer): + """Check the policy. + + Requires that all rules accept in order to return True. """ for rule in self.rules: @@ -326,7 +404,8 @@ class AndCheck(BaseCheck): return True def add_check(self, rule): - """ + """Adds rule to be tested. + Allows addition of another rule to the list of rules that will be tested. Returns the AndCheck object for convenience. """ @@ -336,14 +415,14 @@ class AndCheck(BaseCheck): class OrCheck(BaseCheck): - """ + """Implements the "or" operator. + A policy check that requires that at least one of a list of other - checks returns True. Implements the "or" operator. + checks returns True. """ def __init__(self, rules): - """ - Initialize the 'or' check. + """Initialize the 'or' check. :param rules: A list of rules that will be tested. """ @@ -355,10 +434,10 @@ class OrCheck(BaseCheck): return "(%s)" % ' or '.join(str(r) for r in self.rules) - def __call__(self, target, cred): - """ - Check the policy. Requires that at least one rule accept in - order to return True. + def __call__(self, target, cred, enforcer): + """Check the policy. + + Requires that at least one rule accept in order to return True. """ for rule in self.rules: @@ -368,7 +447,8 @@ class OrCheck(BaseCheck): return False def add_check(self, rule): - """ + """Adds rule to be tested. + Allows addition of another rule to the list of rules that will be tested. Returns the OrCheck object for convenience. """ @@ -378,9 +458,7 @@ class OrCheck(BaseCheck): def _parse_check(rule): - """ - Parse a single base check rule into an appropriate Check object. - """ + """Parse a single base check rule into an appropriate Check object.""" # Handle the special checks if rule == '!': @@ -391,7 +469,7 @@ def _parse_check(rule): try: kind, match = rule.split(':', 1) except Exception: - LOG.exception(_("Failed to understand rule %(rule)s") % locals()) + LOG.exception(_("Failed to understand rule %s") % rule) # If the rule is invalid, we'll fail closed return FalseCheck() @@ -406,9 +484,9 @@ def _parse_check(rule): def _parse_list_rule(rule): - """ - Provided for backwards compatibility. Translates the old - list-of-lists syntax into a tree of Check objects. + """Translates the old list-of-lists syntax into a tree of Check objects. + + Provided for backwards compatibility. """ # Empty rule defaults to True @@ -436,7 +514,7 @@ def _parse_list_rule(rule): or_list.append(AndCheck(and_list)) # If we have only one check, omit the "or" - if len(or_list) == 0: + if not or_list: return FalseCheck() elif len(or_list) == 1: return or_list[0] @@ -449,8 +527,7 @@ _tokenize_re = re.compile(r'\s+') def _parse_tokenize(rule): - """ - Tokenizer for the policy language. + """Tokenizer for the policy language. Most of the single-character tokens are specified in the _tokenize_re; however, parentheses need to be handled specially, @@ -499,16 +576,16 @@ def _parse_tokenize(rule): class ParseStateMeta(type): - """ - Metaclass for the ParseState class. Facilitates identifying - reduction methods. + """Metaclass for the ParseState class. + + Facilitates identifying reduction methods. """ def __new__(mcs, name, bases, cls_dict): - """ - Create the class. Injects the 'reducers' list, a list of - tuples matching token sequences to the names of the - corresponding reduction methods. + """Create the class. + + Injects the 'reducers' list, a list of tuples matching token sequences + to the names of the corresponding reduction methods. """ reducers = [] @@ -525,10 +602,10 @@ class ParseStateMeta(type): def reducer(*tokens): - """ - Decorator for reduction methods. Arguments are a sequence of - tokens, in order, which should trigger running this reduction - method. + """Decorator for reduction methods. + + Arguments are a sequence of tokens, in order, which should trigger running + this reduction method. """ def decorator(func): @@ -545,10 +622,10 @@ def reducer(*tokens): class ParseState(object): - """ - Implement the core of parsing the policy language. Uses a greedy - reduction algorithm to reduce a sequence of tokens into a single - terminal, the value of which will be the root of the Check tree. + """Implement the core of parsing the policy language. + + Uses a greedy reduction algorithm to reduce a sequence of tokens into + a single terminal, the value of which will be the root of the Check tree. Note: error reporting is rather lacking. The best we can get with this parser formulation is an overall "parse failed" error. @@ -565,11 +642,11 @@ class ParseState(object): self.values = [] def reduce(self): - """ - Perform a greedy reduction of the token stream. If a reducer - method matches, it will be executed, then the reduce() method - will be called recursively to search for any more possible - reductions. + """Perform a greedy reduction of the token stream. + + If a reducer method matches, it will be executed, then the + reduce() method will be called recursively to search for any more + possible reductions. """ for reduction, methname in self.reducers: @@ -599,9 +676,9 @@ class ParseState(object): @property def result(self): - """ - Obtain the final result of the parse. Raises ValueError if - the parse failed to reduce to a single result. + """Obtain the final result of the parse. + + Raises ValueError if the parse failed to reduce to a single result. """ if len(self.values) != 1: @@ -618,35 +695,31 @@ class ParseState(object): @reducer('check', 'and', 'check') def _make_and_expr(self, check1, _and, check2): - """ - Create an 'and_expr' from two checks joined by the 'and' - operator. + """Create an 'and_expr'. + + Join two checks by the 'and' operator. """ return [('and_expr', AndCheck([check1, check2]))] @reducer('and_expr', 'and', 'check') def _extend_and_expr(self, and_expr, _and, check): - """ - Extend an 'and_expr' by adding one more check. - """ + """Extend an 'and_expr' by adding one more check.""" return [('and_expr', and_expr.add_check(check))] @reducer('check', 'or', 'check') def _make_or_expr(self, check1, _or, check2): - """ - Create an 'or_expr' from two checks joined by the 'or' - operator. + """Create an 'or_expr'. + + Join two checks by the 'or' operator. """ return [('or_expr', OrCheck([check1, check2]))] @reducer('or_expr', 'or', 'check') def _extend_or_expr(self, or_expr, _or, check): - """ - Extend an 'or_expr' by adding one more check. - """ + """Extend an 'or_expr' by adding one more check.""" return [('or_expr', or_expr.add_check(check))] @@ -658,7 +731,8 @@ class ParseState(object): def _parse_text_rule(rule): - """ + """Parses policy to the tree. + Translates a policy written in the policy language into a tree of Check objects. """ @@ -676,16 +750,14 @@ def _parse_text_rule(rule): return state.result except ValueError: # Couldn't parse the rule - LOG.exception(_("Failed to understand rule %(rule)r") % locals()) + LOG.exception(_("Failed to understand rule %r") % rule) # Fail closed return FalseCheck() def parse_rule(rule): - """ - Parses a policy rule into a tree of Check objects. - """ + """Parses a policy rule into a tree of Check objects.""" # If the rule is a string, it's in the policy language if isinstance(rule, basestring): @@ -694,8 +766,7 @@ def parse_rule(rule): def register(name, func=None): - """ - Register a function or Check class as a policy check. + """Register a function or Check class as a policy check. :param name: Gives the name of the check type, e.g., 'rule', 'role', etc. If name is None, a default check type @@ -722,13 +793,11 @@ def register(name, func=None): @register("rule") class RuleCheck(Check): - def __call__(self, target, creds): - """ - Recursively checks credentials based on the defined rules. - """ + def __call__(self, target, creds, enforcer): + """Recursively checks credentials based on the defined rules.""" try: - return _rules[self.match](target, creds) + return enforcer.rules[self.match](target, creds, enforcer) except KeyError: # We don't have any matching rule; fail closed return False @@ -736,7 +805,7 @@ class RuleCheck(Check): @register("role") class RoleCheck(Check): - def __call__(self, target, creds): + def __call__(self, target, creds, enforcer): """Check that there is a matching role in the cred dict.""" return self.match.lower() in [x.lower() for x in creds['roles']] @@ -744,9 +813,8 @@ class RoleCheck(Check): @register('http') class HttpCheck(Check): - def __call__(self, target, creds): - """ - Check http: rules by calling to a remote server. + def __call__(self, target, creds, enforcer): + """Check http: rules by calling to a remote server. This example implementation simply verifies that the response is exactly 'True'. @@ -762,9 +830,8 @@ class HttpCheck(Check): @register(None) class GenericCheck(Check): - def __call__(self, target, creds): - """ - Check an individual match. + def __call__(self, target, creds, enforcer): + """Check an individual match. Matches look like: @@ -775,5 +842,5 @@ class GenericCheck(Check): # TODO(termie): do dict inspection via dot syntax match = self.match % target if self.kind in creds: - return match == unicode(creds[self.kind]) + return match == six.text_type(creds[self.kind]) return False diff --git a/kwapi/openstack/common/setup.py b/kwapi/openstack/common/setup.py deleted file mode 100644 index 80a0ece..0000000 --- a/kwapi/openstack/common/setup.py +++ /dev/null @@ -1,359 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack LLC. -# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Utilities with minimum-depends for use in setup.py -""" - -import email -import os -import re -import subprocess -import sys - -from setuptools.command import sdist - - -def parse_mailmap(mailmap='.mailmap'): - mapping = {} - if os.path.exists(mailmap): - with open(mailmap, 'r') as fp: - for l in fp: - try: - canonical_email, alias = re.match( - r'[^#]*?(<.+>).*(<.+>).*', l).groups() - except AttributeError: - continue - mapping[alias] = canonical_email - return mapping - - -def _parse_git_mailmap(git_dir, mailmap='.mailmap'): - mailmap = os.path.join(os.path.dirname(git_dir), mailmap) - return parse_mailmap(mailmap) - - -def canonicalize_emails(changelog, mapping): - """Takes in a string and an email alias mapping and replaces all - instances of the aliases in the string with their real email. - """ - for alias, email_address in mapping.iteritems(): - changelog = changelog.replace(alias, email_address) - return changelog - - -# Get requirements from the first file that exists -def get_reqs_from_files(requirements_files): - for requirements_file in requirements_files: - if os.path.exists(requirements_file): - with open(requirements_file, 'r') as fil: - return fil.read().split('\n') - return [] - - -def parse_requirements(requirements_files=['requirements.txt', - 'tools/pip-requires']): - requirements = [] - for line in get_reqs_from_files(requirements_files): - # For the requirements list, we need to inject only the portion - # after egg= so that distutils knows the package it's looking for - # such as: - # -e git://github.com/openstack/nova/master#egg=nova - if re.match(r'\s*-e\s+', line): - requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1', - line)) - # such as: - # http://github.com/openstack/nova/zipball/master#egg=nova - elif re.match(r'\s*https?:', line): - requirements.append(re.sub(r'\s*https?:.*#egg=(.*)$', r'\1', - line)) - # -f lines are for index locations, and don't get used here - elif re.match(r'\s*-f\s+', line): - pass - # argparse is part of the standard library starting with 2.7 - # adding it to the requirements list screws distro installs - elif line == 'argparse' and sys.version_info >= (2, 7): - pass - else: - requirements.append(line) - - return requirements - - -def parse_dependency_links(requirements_files=['requirements.txt', - 'tools/pip-requires']): - dependency_links = [] - # dependency_links inject alternate locations to find packages listed - # in requirements - for line in get_reqs_from_files(requirements_files): - # skip comments and blank lines - if re.match(r'(\s*#)|(\s*$)', line): - continue - # lines with -e or -f need the whole line, minus the flag - if re.match(r'\s*-[ef]\s+', line): - dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line)) - # lines that are only urls can go in unmolested - elif re.match(r'\s*https?:', line): - dependency_links.append(line) - return dependency_links - - -def _run_shell_command(cmd, throw_on_error=False): - if os.name == 'nt': - output = subprocess.Popen(["cmd.exe", "/C", cmd], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - else: - output = subprocess.Popen(["/bin/sh", "-c", cmd], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - out = output.communicate() - if output.returncode and throw_on_error: - raise Exception("%s returned %d" % cmd, output.returncode) - if len(out) == 0: - return None - if len(out[0].strip()) == 0: - return None - return out[0].strip() - - -def _get_git_directory(): - parent_dir = os.path.dirname(__file__) - while True: - git_dir = os.path.join(parent_dir, '.git') - if os.path.exists(git_dir): - return git_dir - parent_dir, child = os.path.split(parent_dir) - if not child: # reached to root dir - return None - - -def write_git_changelog(): - """Write a changelog based on the git changelog.""" - new_changelog = 'ChangeLog' - git_dir = _get_git_directory() - if not os.getenv('SKIP_WRITE_GIT_CHANGELOG'): - if git_dir: - git_log_cmd = 'git --git-dir=%s log --stat' % git_dir - changelog = _run_shell_command(git_log_cmd) - mailmap = _parse_git_mailmap(git_dir) - with open(new_changelog, "w") as changelog_file: - changelog_file.write(canonicalize_emails(changelog, mailmap)) - else: - open(new_changelog, 'w').close() - - -def generate_authors(): - """Create AUTHORS file using git commits.""" - jenkins_email = 'jenkins@review.(openstack|stackforge).org' - old_authors = 'AUTHORS.in' - new_authors = 'AUTHORS' - git_dir = _get_git_directory() - if not os.getenv('SKIP_GENERATE_AUTHORS'): - if git_dir: - # don't include jenkins email address in AUTHORS file - git_log_cmd = ("git --git-dir=" + git_dir + - " log --format='%aN <%aE>' | sort -u | " - "egrep -v '" + jenkins_email + "'") - changelog = _run_shell_command(git_log_cmd) - mailmap = _parse_git_mailmap(git_dir) - with open(new_authors, 'w') as new_authors_fh: - new_authors_fh.write(canonicalize_emails(changelog, mailmap)) - if os.path.exists(old_authors): - with open(old_authors, "r") as old_authors_fh: - new_authors_fh.write('\n' + old_authors_fh.read()) - else: - open(new_authors, 'w').close() - - -_rst_template = """%(heading)s -%(underline)s - -.. automodule:: %(module)s - :members: - :undoc-members: - :show-inheritance: -""" - - -def get_cmdclass(): - """Return dict of commands to run from setup.py.""" - - cmdclass = dict() - - def _find_modules(arg, dirname, files): - for filename in files: - if filename.endswith('.py') and filename != '__init__.py': - arg["%s.%s" % (dirname.replace('/', '.'), - filename[:-3])] = True - - class LocalSDist(sdist.sdist): - """Builds the ChangeLog and Authors files from VC first.""" - - def run(self): - write_git_changelog() - generate_authors() - # sdist.sdist is an old style class, can't use super() - sdist.sdist.run(self) - - cmdclass['sdist'] = LocalSDist - - # If Sphinx is installed on the box running setup.py, - # enable setup.py to build the documentation, otherwise, - # just ignore it - try: - from sphinx.setup_command import BuildDoc - - class LocalBuildDoc(BuildDoc): - - builders = ['html', 'man'] - - def generate_autoindex(self): - print "**Autodocumenting from %s" % os.path.abspath(os.curdir) - modules = {} - option_dict = self.distribution.get_option_dict('build_sphinx') - source_dir = os.path.join(option_dict['source_dir'][1], 'api') - if not os.path.exists(source_dir): - os.makedirs(source_dir) - for pkg in self.distribution.packages: - if '.' not in pkg: - os.path.walk(pkg, _find_modules, modules) - module_list = modules.keys() - module_list.sort() - autoindex_filename = os.path.join(source_dir, 'autoindex.rst') - with open(autoindex_filename, 'w') as autoindex: - autoindex.write(""".. toctree:: - :maxdepth: 1 - -""") - for module in module_list: - output_filename = os.path.join(source_dir, - "%s.rst" % module) - heading = "The :mod:`%s` Module" % module - underline = "=" * len(heading) - values = dict(module=module, heading=heading, - underline=underline) - - print "Generating %s" % output_filename - with open(output_filename, 'w') as output_file: - output_file.write(_rst_template % values) - autoindex.write(" %s.rst\n" % module) - - def run(self): - if not os.getenv('SPHINX_DEBUG'): - self.generate_autoindex() - - for builder in self.builders: - self.builder = builder - self.finalize_options() - self.project = self.distribution.get_name() - self.version = self.distribution.get_version() - self.release = self.distribution.get_version() - BuildDoc.run(self) - - class LocalBuildLatex(LocalBuildDoc): - builders = ['latex'] - - cmdclass['build_sphinx'] = LocalBuildDoc - cmdclass['build_sphinx_latex'] = LocalBuildLatex - except ImportError: - pass - - return cmdclass - - -def _get_revno(git_dir): - """Return the number of commits since the most recent tag. - - We use git-describe to find this out, but if there are no - tags then we fall back to counting commits since the beginning - of time. - """ - describe = _run_shell_command( - "git --git-dir=%s describe --always" % git_dir) - if "-" in describe: - return describe.rsplit("-", 2)[-2] - - # no tags found - revlist = _run_shell_command( - "git --git-dir=%s rev-list --abbrev-commit HEAD" % git_dir) - return len(revlist.splitlines()) - - -def _get_version_from_git(pre_version): - """Return a version which is equal to the tag that's on the current - revision if there is one, or tag plus number of additional revisions - if the current revision has no tag.""" - - git_dir = _get_git_directory() - if git_dir: - if pre_version: - try: - return _run_shell_command( - "git --git-dir=" + git_dir + " describe --exact-match", - throw_on_error=True).replace('-', '.') - except Exception: - sha = _run_shell_command( - "git --git-dir=" + git_dir + " log -n1 --pretty=format:%h") - return "%s.a%s.g%s" % (pre_version, _get_revno(git_dir), sha) - else: - return _run_shell_command( - "git --git-dir=" + git_dir + " describe --always").replace( - '-', '.') - return None - - -def _get_version_from_pkg_info(package_name): - """Get the version from PKG-INFO file if we can.""" - try: - pkg_info_file = open('PKG-INFO', 'r') - except (IOError, OSError): - return None - try: - pkg_info = email.message_from_file(pkg_info_file) - except email.MessageError: - return None - # Check to make sure we're in our own dir - if pkg_info.get('Name', None) != package_name: - return None - return pkg_info.get('Version', None) - - -def get_version(package_name, pre_version=None): - """Get the version of the project. First, try getting it from PKG-INFO, if - it exists. If it does, that means we're in a distribution tarball or that - install has happened. Otherwise, if there is no PKG-INFO file, pull the - version from git. - - We do not support setup.py version sanity in git archive tarballs, nor do - we support packagers directly sucking our git repo into theirs. We expect - that a source tarball be made from our git repo - or that if someone wants - to make a source tarball from a fork of our repo with additional tags in it - that they understand and desire the results of doing that. - """ - version = os.environ.get("OSLO_PACKAGE_VERSION", None) - if version: - return version - version = _get_version_from_pkg_info(package_name) - if version: - return version - version = _get_version_from_git(pre_version) - if version: - return version - raise Exception("Versioning for this project requires either an sdist" - " tarball, or access to an upstream git repository.") diff --git a/kwapi/openstack/common/timeutils.py b/kwapi/openstack/common/timeutils.py index e2c2740..bd60489 100644 --- a/kwapi/openstack/common/timeutils.py +++ b/kwapi/openstack/common/timeutils.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may @@ -23,24 +23,29 @@ import calendar import datetime import iso8601 +import six -TIME_FORMAT = "%Y-%m-%dT%H:%M:%S" -PERFECT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" +# ISO 8601 extended time format with microseconds +_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' +_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' +PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND -def isotime(at=None): - """Stringify time in ISO 8601 format""" +def isotime(at=None, subsecond=False): + """Stringify time in ISO 8601 format.""" if not at: at = utcnow() - str = at.strftime(TIME_FORMAT) + st = at.strftime(_ISO8601_TIME_FORMAT + if not subsecond + else _ISO8601_TIME_FORMAT_SUBSECOND) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' - str += ('Z' if tz == 'UTC' else tz) - return str + st += ('Z' if tz == 'UTC' else tz) + return st def parse_isotime(timestr): - """Parse time from ISO 8601 format""" + """Parse time from ISO 8601 format.""" try: return iso8601.parse_date(timestr) except iso8601.ParseError as e: @@ -62,7 +67,7 @@ def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): def normalize_time(timestamp): - """Normalize time in arbitrary timezone to UTC naive object""" + """Normalize time in arbitrary timezone to UTC naive object.""" offset = timestamp.utcoffset() if offset is None: return timestamp @@ -71,14 +76,14 @@ def normalize_time(timestamp): def is_older_than(before, seconds): """Return True if before is older than seconds.""" - if isinstance(before, basestring): + if isinstance(before, six.string_types): before = parse_strtime(before).replace(tzinfo=None) return utcnow() - before > datetime.timedelta(seconds=seconds) def is_newer_than(after, seconds): """Return True if after is newer than seconds.""" - if isinstance(after, basestring): + if isinstance(after, six.string_types): after = parse_strtime(after).replace(tzinfo=None) return after - utcnow() > datetime.timedelta(seconds=seconds) @@ -99,7 +104,7 @@ def utcnow(): def iso8601_from_timestamp(timestamp): - """Returns a iso8601 formated date from timestamp""" + """Returns a iso8601 formated date from timestamp.""" return isotime(datetime.datetime.utcfromtimestamp(timestamp)) @@ -107,9 +112,9 @@ utcnow.override_time = None def set_time_override(override_time=datetime.datetime.utcnow()): - """ - Override utils.utcnow to return a constant time or a list thereof, - one at a time. + """Overrides utils.utcnow. + + Make it return a constant time or a list thereof, one at a time. """ utcnow.override_time = override_time @@ -137,7 +142,8 @@ def clear_time_override(): def marshall_now(now=None): """Make an rpc-safe datetime with microseconds. - Note: tzinfo is stripped, but not required for relative times.""" + Note: tzinfo is stripped, but not required for relative times. + """ if not now: now = utcnow() return dict(day=now.day, month=now.month, year=now.year, hour=now.hour, @@ -157,7 +163,8 @@ def unmarshall_time(tyme): def delta_seconds(before, after): - """ + """Return the difference between two timing objects. + Compute the difference in seconds between two date, time, or datetime objects (as a float, to microsecond resolution). """ @@ -170,8 +177,7 @@ def delta_seconds(before, after): def is_soon(dt, window): - """ - Determines if time is going to happen in the next window seconds. + """Determines if time is going to happen in the next window seconds. :params dt: the time :params window: minimum seconds to remain to consider the time not soon diff --git a/kwapi/plugins/api/app.py b/kwapi/plugins/api/app.py index 346e235..3e712dd 100644 --- a/kwapi/plugins/api/app.py +++ b/kwapi/plugins/api/app.py @@ -16,6 +16,8 @@ """Set up the API server application instance.""" +import sys + import flask from oslo.config import cfg @@ -30,6 +32,9 @@ app_opts = [ cfg.BoolOpt('acl_enabled', required=True, ), + cfg.IntOpt('api_port', + required=True, + ), ] cfg.CONF.register_opts(app_opts) @@ -59,3 +64,14 @@ def make_app(): acl.install(app, cfg.CONF) return app + + +def start(): + """Starts Kwapi API.""" + cfg.CONF(sys.argv[1:], + project='kwapi', + default_config_files=['/etc/kwapi/api.conf'] + ) + log.setup('kwapi') + root = make_app() + root.run(host='0.0.0.0', port=cfg.CONF.api_port) diff --git a/kwapi/plugins/rrd/app.py b/kwapi/plugins/rrd/app.py index eb30a23..3972e29 100644 --- a/kwapi/plugins/rrd/app.py +++ b/kwapi/plugins/rrd/app.py @@ -16,9 +16,11 @@ """Set up the RRD server application instance.""" +import sys import thread import flask +from oslo.config import cfg from kwapi.openstack.common import log import rrd @@ -26,6 +28,14 @@ import v1 LOG = log.getLogger(__name__) +app_opts = [ + cfg.IntOpt('rrd_port', + required=True, + ), +] + +cfg.CONF.register_opts(app_opts) + def make_app(): """Instantiates Flask app, attaches collector database. """ @@ -40,3 +50,13 @@ def make_app(): flask.request.probes = rrd.probes flask.request.scales = rrd.scales return app + + +def start(): + """Starts Kwapi RRD.""" + cfg.CONF(sys.argv[1:], + project='kwapi', + default_config_files=['/etc/kwapi/rrd.conf']) + log.setup('kwapi') + root = make_app() + root.run(host='0.0.0.0', port=cfg.CONF.rrd_port) diff --git a/kwapi/policy.py b/kwapi/policy.py index 8068b00..79a113f 100644 --- a/kwapi/policy.py +++ b/kwapi/policy.py @@ -24,18 +24,6 @@ from oslo.config import cfg from kwapi.openstack.common import policy from kwapi import utils - -OPTS = [ - cfg.StrOpt('policy_file', - default='policy.json', - help='JSON file representing policy'), - cfg.StrOpt('policy_default_rule', - default='default', - help='Rule checked when requested rule is not found'), -] - -cfg.CONF.register_opts(OPTS) - _POLICY_PATH = None _POLICY_CACHE = {} diff --git a/openstack-common.conf b/openstack-common.conf index 4d5f1d1..0307484 100644 --- a/openstack-common.conf +++ b/openstack-common.conf @@ -1,3 +1,8 @@ [DEFAULT] -modules=gettextutils,jsonutils,local,log,notifier,policy,setup,timeutils +module=gettextutils +module=jsonutils +module=local +module=log +module=policy +module=timeutils base=kwapi diff --git a/pip-requires b/requirements.txt similarity index 61% rename from pip-requires rename to requirements.txt index aa90941..23ce986 100644 --- a/pip-requires +++ b/requirements.txt @@ -1,10 +1,13 @@ +d2to1>=0.2.10,<0.3 eventlet flask iso8601 +kombu oslo.config +pbr>=0.5,<0.6 pyserial pysnmp python-keystoneclient pyzmq -#python-rrdtool +python-rrdtool webob diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..3fbabdf --- /dev/null +++ b/setup.cfg @@ -0,0 +1,39 @@ +[metadata] +name = kwapi +version = 2013.7 +summary = Energy framework for OpenStack +description-file = + README.rst +author = François Rossigneux +author-email = francois.rossigneux@inria.fr +home-page = https://launchpad.net/kwapi +classifier = + Development Status :: 4 - Beta + Environment :: OpenStack + Intended Audience :: Information Technology + Intended Audience :: System Administrators + License :: OSI Approved :: Apache Software License + Operating System :: POSIX :: Linux + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.7 + Topic :: System :: Monitoring + +[global] +setup-hooks = + pbr.hooks.setup_hook + +[files] +packages = + kwapi + +[entry_points] +console_scripts = + kwapi-api = kwapi.plugins.api.app:start + kwapi-drivers = kwapi.drivers.driver_manager:start + kwapi-forwarder = kwapi.forwarder:start + kwapi-rrd = kwapi.plugins.rrd.app:start + +[build_sphinx] +all_files = 1 +build-dir = doc/build +source-dir = doc/source diff --git a/setup.py b/setup.py index 68feca9..f914160 100755 --- a/setup.py +++ b/setup.py @@ -17,52 +17,6 @@ import setuptools -from kwapi.openstack.common import setup as common_setup - -requires = common_setup.parse_requirements(['pip-requires']) -depend_links = common_setup.parse_dependency_links(['pip-requires']) -project = 'kwapi' -version = common_setup.get_version(project, '2013.3') - - setuptools.setup( - - name='kwapi', - version=version, - - description='Energy Efficiency Architecture', - - author='François Rossigneux', - author_email='francois.rossigneux@inria.fr', - - url='https://github.com/stackforge/kwapi', - - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Framework :: Setuptools Plugin', - 'Environment :: OpenStack', - 'Intended Audience :: Information Technology', - 'Intended Audience :: System Administrators', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: POSIX :: Linux', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Topic :: System :: Monitoring', - ], - - packages=setuptools.find_packages(), - package_data={'kwapi.plugins.rrd': ['templates/*', 'static/*']}, - - scripts=['bin/kwapi-api', - 'bin/kwapi-forwarder', - 'bin/kwapi-drivers', - 'bin/kwapi-rrd'], - - data_files=[('etc/kwapi', ['etc/kwapi/api.conf', - 'etc/kwapi/drivers.conf', - 'etc/kwapi/rrd.conf'])], - - install_requires=requires, - dependency_links=depend_links, -) + setup_requires=['d2to1>=0.2.10,<0.3', 'pbr>=0.5,<0.6'], + d2to1=True) diff --git a/tox.ini b/tox.ini index fffe081..1d5f107 100644 --- a/tox.ini +++ b/tox.ini @@ -3,4 +3,4 @@ envlist = pep8 [testenv:pep8] deps = pep8==1.4 -commands = pep8 --show-source kwapi setup.py bin/kwapi-api bin/kwapi-api bin/kwapi-api +commands = pep8 --show-source kwapi setup.py