Remove repository content and add new URL

Change-Id: I68eeb6c393810563a3c90bfea87590b9f1c1e452
This commit is contained in:
Julien Danjou 2017-04-25 17:07:34 +02:00
parent ad75e1e5aa
commit b64069d598
60 changed files with 10 additions and 5221 deletions

View File

@ -1,6 +0,0 @@
[run]
branch = True
source = gnocchiclient
[report]
ignore_errors = True

58
.gitignore vendored
View File

@ -1,58 +0,0 @@
*.py[cod]
# C extensions
*.so
# Packages
*.egg
*.egg-info
dist
build
.eggs
eggs
parts
bin
var
sdist
develop-eggs
.installed.cfg
lib
lib64
# Installer logs
pip-log.txt
# Unit test / coverage reports
.coverage
.tox
nosetests.xml
.testrepository
.venv
# Translations
*.mo
# Mr Developer
.mr.developer.cfg
.project
.pydevproject
# Complexity
output/*.html
output/*/index.html
# Sphinx
doc/build
# pbr generates these
AUTHORS
ChangeLog
# Editors
*~
.*.swp
.*sw?
# generated docs
doc/source/api
doc/source/gnocchi.rst

View File

@ -1,4 +0,0 @@
[gerrit]
host=review.openstack.org
port=29418
project=openstack/python-gnocchiclient.git

View File

@ -1,2 +0,0 @@
<sileht@sileht.net> <sileht@redhat.com>
<sileht@sileht.net> <sileht@sileh.net>

View File

@ -1,7 +0,0 @@
[DEFAULT]
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
${PYTHON:-python} -m subunit.run discover -t ./ ${OS_TEST_PATH:-./gnocchiclient/tests} $LISTOPT $IDOPTION
test_id_option=--load-list $IDFILE
test_list_option=--list

View File

@ -1,17 +0,0 @@
language: python
dist: xenial
sudo: required
cache:
- pip
python:
- 2.7
- 3.5
- 3.6
before_install:
# Always redownload tarball
- find ~/.cache/pip -name '*.dev*' -delete
- sudo apt-get -qq update
install:
# The install requirements in travis virtualenv that will be cached
- pip install tox-travis .[test]
script: tox

176
LICENSE
View File

@ -1,176 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.

10
README Normal file
View File

@ -0,0 +1,10 @@
This project has been moved to https://github.com/gnocchixyz/python-gnocchiclient
The contents of this repository are still available in the Git
source code management system. To see the contents of this
repository before it reached its end of life, please check out the
previous commit with "git checkout HEAD^1".
For any further questions, please email
openstack-dev@lists.openstack.org or join #openstack-dev or #gnocchi on
Freenode.

View File

@ -1,14 +0,0 @@
=============
gnocchiclient
=============
Python bindings to the Gnocchi API
This is a client for Gnocchi API. There's :doc:`a Python API <api>` (the
:mod:`gnocchiclient` module), and a :doc:`command-line script <shell>`
(installed as :program:`gnocchi`). Each implements the entire Gnocchi API.
* Free software: Apache license
* Documentation: http://gnocchi.xyz/gnocchiclient
* Source: https://github.com/gnocchixyz/python-gnocchiclient
* Bugs: https://github.com/gnocchixyz/python-gnocchiclient/issues

View File

@ -1,45 +0,0 @@
The :mod:`gnocchiclient` Python API
===================================
.. module:: gnocchiclient
:synopsis: A client for the Gnocchi API.
.. currentmodule:: gnocchiclient
Usage
-----
To use gnocchiclient in a project::
>>> from gnocchiclient import auth
>>> from gnocchiclient.v1 import client
>>>
>>> auth_plugin = auth.GnocchiBasicPlugin(user="admin",
>>> endpoint="http://localhost:8041")
>>> gnocchi = client.Client(session_options={'auth': auth_plugin})
>>> gnocchi.resource.list("generic")
With authentication from a keystoneauth1 plugins::
>>> from keystoneauth1 import loading
>>> from oslo_config import cfg
>>> from gnocchiclient import auth
>>> from gnocchiclient.v1 import client
>>>
>>> conf = cfg.ConfigOpts()
>>> ...
>>> auth_plugin = loading.load_auth_from_conf_options(conf, "gnocchi_credentials")
>>> gnocchi = client.Client(session_options={'auth': auth_plugin})
>>> gnocchi.resource.list("generic")
Reference
---------
For more information, see the reference:
.. toctree::
:maxdepth: 2
api/autoindex

View File

@ -1,84 +0,0 @@
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT = os.path.abspath(os.path.join(BASE_DIR, "..", ".."))
sys.path.insert(0, ROOT)
sys.path.insert(0, BASE_DIR)
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'gnocchiclient.gendoc',
'sphinx.ext.autodoc',
#'sphinx.ext.intersphinx'
]
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'gnocchiclient'
copyright = u'2015, OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# html_static_path = ['static']
html_theme = 'sphinx_rtd_theme'
import sphinx_rtd_theme
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index',
'%s.tex' % project,
u'%s Documentation' % project,
u'OpenStack Foundation', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.
#intersphinx_mapping = {'http://docs.python.org/': None}

View File

@ -1,38 +0,0 @@
.. gnocchiclient documentation master file, created by
sphinx-quickstart on Tue Jul 9 22:26:36 2013.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Python bindings and command line tool to the Gnocchi API
========================================================
This is a client for `Gnocchi`_. There's :doc:`a Python API <api>` (the
:mod:`gnocchiclient` module), and a :doc:`command-line script <shell>`
(installed as :program:`gnocchi`). Each implements the entire Gnocchi API.
.. seealso::
You may want to read the `Gnocchi documentation`__ to get an idea of the
concepts. By understanding the concepts this library and client should make
more sense.
__ http://gnocchi.xyz
.. _Gnocchi: http://gnocchi.xyz
Contents:
.. toctree::
:maxdepth: 2
installation
shell
api
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

View File

@ -1,7 +0,0 @@
============
Installation
============
At the command line::
$ pip install gnocchiclient

View File

@ -1,79 +0,0 @@
The :program:`gnocchi` shell utility
=========================================
.. program:: gnocchi
.. highlight:: bash
The :program:`gnocchi` shell utility interacts with Gnocchi from the command
line. It supports the entirety of the Gnocchi API.
Authentication method
+++++++++++++++++++++
You'll need to provide the authentication method and your credentials to
:program:`gnocchi`.
No authentication
~~~~~~~~~~~~~~~~~
If you're using Gnocchi with no authentication, export the following variables
in your environment::
export OS_AUTH_TYPE=gnocchi-noauth
export GNOCCHI_USER_ID=<youruserid>
export GNOCCHI_PROJECT_ID=<yourprojectid>
export GNOCCHI_ENDPOINT=http://urlofgnocchi
.. note::
OS_AUTH_TYPE is used globally by all clients supporting Keystone. Provide
:option:`--os-auth-plugin` gnocchi-noauth to the client instead if other
clients are used in session.
Basic authentication
~~~~~~~~~~~~~~~~~~~~
If you're using Gnocchi with basic authentication, export the following
variables in your environment::
export OS_AUTH_TYPE=gnocchi-basic
export GNOCCHI_USER=<youruserid>
export GNOCCHI_ENDPOINT=http://urlofgnocchi
.. note::
OS_AUTH_TYPE is used globally by all clients supporting Keystone. Provide
:option:`--os-auth-plugin` gnocchi-basic to the client instead if other
clients are used in session.
OpenStack Keystone authentication
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If you're using Gnocchi with Keystone authentication, export the following
variables in your environment with the appropriate values::
export OS_AUTH_TYPE=password
export OS_USERNAME=user
export OS_PASSWORD=pass
export OS_TENANT_NAME=myproject
export OS_AUTH_URL=http://auth.example.com:5000/v2.0
The command line tool will attempt to reauthenticate using your provided
credentials for every request. You can override this behavior by manually
supplying an auth token using :option:`--endpoint` and
:option:`--os-auth-token`. You can alternatively set these environment
variables::
export GNOCCHI_ENDPOINT=http://gnocchi.example.org:8041
export OS_AUTH_PLUGIN=token
export OS_AUTH_TOKEN=3bcc3d3a03f44e3d8377f9247b0ad155
For more details, check the `keystoneauth documentation`_.
.. _`keystoneauth documentation`: https://docs.openstack.org/developer/keystoneauth/
Commands descriptions
+++++++++++++++++++++
.. include:: gnocchi.rst

View File

@ -1,19 +0,0 @@
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'gnocchiclient').version_string()

View File

@ -1,117 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import os
from keystoneauth1 import loading
from keystoneauth1 import plugin
class GnocchiNoAuthPlugin(plugin.BaseAuthPlugin):
"""No authentication plugin for Gnocchi
This is a keystoneauth plugin that instead of
doing authentication, it just fill the 'x-user-id'
and 'x-project-id' headers with the user provided one.
"""
def __init__(self, user_id, project_id, roles, endpoint):
self._user_id = user_id
self._project_id = project_id
self._endpoint = endpoint
self._roles = roles
def get_headers(self, session, **kwargs):
return {'x-user-id': self._user_id,
'x-project-id': self._project_id,
'x-roles': self._roles}
def get_user_id(self, session, **kwargs):
return self._user_id
def get_project_id(self, session, **kwargs):
return self._project_id
def get_endpoint(self, session, **kwargs):
return self._endpoint
class GnocchiOpt(loading.Opt):
@property
def argparse_args(self):
return ['--%s' % o.name for o in self._all_opts]
@property
def argparse_default(self):
# select the first ENV that is not false-y or return None
for o in self._all_opts:
v = os.environ.get('GNOCCHI_%s' % o.name.replace('-', '_').upper())
if v:
return v
return self.default
class GnocchiNoAuthLoader(loading.BaseLoader):
plugin_class = GnocchiNoAuthPlugin
def get_options(self):
options = super(GnocchiNoAuthLoader, self).get_options()
options.extend([
GnocchiOpt('user-id', help='User ID', required=True,
metavar="<gnocchi user id>"),
GnocchiOpt('project-id', help='Project ID', required=True,
metavar="<gnocchi project id>"),
GnocchiOpt('roles', help='Roles', default="admin",
metavar="<gnocchi roles>"),
GnocchiOpt('endpoint', help='Gnocchi endpoint',
deprecated=[
GnocchiOpt('gnocchi-endpoint'),
],
dest="endpoint", required=True,
metavar="<gnocchi endpoint>"),
])
return options
class GnocchiBasicPlugin(plugin.BaseAuthPlugin):
"""Basic authentication plugin for Gnocchi."""
def __init__(self, user, endpoint):
self._user = user.encode('utf-8')
self._endpoint = endpoint
def get_headers(self, session, **kwargs):
return {
'Authorization':
(b"basic " + base64.b64encode(
self._user + b":")).decode('ascii')
}
def get_endpoint(self, session, **kwargs):
return self._endpoint
class GnocchiBasicLoader(loading.BaseLoader):
plugin_class = GnocchiBasicPlugin
def get_options(self):
options = super(GnocchiBasicLoader, self).get_options()
options.extend([
GnocchiOpt('user', help='User', required=True,
default="admin",
metavar="<gnocchi user>"),
GnocchiOpt('endpoint', help='Gnocchi endpoint',
dest="endpoint", required=True,
default="http://localhost:8041",
metavar="<gnocchi endpoint>"),
])
return options

View File

@ -1,341 +0,0 @@
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import datetime
import functools
import logging
import math
import random
import time
import types
from cliff import show
import futurist
from oslo_utils import timeutils
import six.moves
from gnocchiclient.v1 import metric_cli
LOG = logging.getLogger(__name__)
def _pickle_method(m):
if m.im_self is None:
return getattr, (m.im_class, m.im_func.func_name)
else:
return getattr, (m.im_self, m.im_func.func_name)
six.moves.copyreg.pickle(types.MethodType, _pickle_method)
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF
args = [iter(iterable)] * n
return six.moves.zip(*args)
def _positive_non_zero_int(argument_value):
if argument_value is None:
return None
try:
value = int(argument_value)
except ValueError:
msg = "%s must be an integer" % argument_value
raise argparse.ArgumentTypeError(msg)
if value <= 0:
msg = "%s must be greater than 0" % argument_value
raise argparse.ArgumentTypeError(msg)
return value
def measure_job(fn, *args, **kwargs):
# NOTE(sileht): This is not part of BenchmarkPool
# because we cannot pickle BenchmarkPool class
sw = timeutils.StopWatch().start()
return fn(*args, **kwargs), sw.elapsed()
class BenchmarkPool(futurist.ProcessPoolExecutor):
def submit_job(self, times, fn, *args, **kwargs):
self.sw = timeutils.StopWatch()
self.sw.start()
self.times = times
return [self.submit(measure_job, fn, *args, **kwargs)
for i in six.moves.range(times)]
def map_job(self, fn, iterable, **kwargs):
self.sw = timeutils.StopWatch()
r = []
self.times = 0
self.sw.start()
for item in iterable:
r.append(self.submit(measure_job, fn, item, **kwargs))
self.times += 1
return r
def _log_progress(self, verb):
runtime = self.sw.elapsed()
done = self.statistics.executed
rate = done / runtime if runtime != 0 else 0
LOG.info(
"%d/%d, "
"total: %.2f seconds, "
"rate: %.2f %s/second"
% (done, self.times, runtime, rate, verb))
def wait_job(self, verb, futures):
while self.statistics.executed != self.times:
self._log_progress(verb)
time.sleep(0.2)
runtime = self.sw.elapsed()
self._log_progress(verb)
self.shutdown(wait=True)
results = []
latencies = []
for f in futures:
try:
result, latency = f.result()
results.append(result)
latencies.append(latency)
except Exception as e:
LOG.error("Error with %s metric: %s" % (verb, e))
latencies = sorted(latencies)
return results, runtime, {
'client workers': self._max_workers,
verb + ' runtime': "%.2f seconds" % runtime,
verb + ' executed': self.statistics.executed,
verb + ' speed': (
"%.2f %s/s" % (self.statistics.executed / runtime, verb)
),
verb + ' failures': self.statistics.failures,
verb + ' failures rate': (
"%.2f %%" % (
100
* self.statistics.failures
/ float(self.statistics.executed)
)
),
verb + ' latency min': min(latencies),
verb + ' latency max': max(latencies),
verb + ' latency mean': sum(latencies) / len(latencies),
verb + ' latency median': self._percentile(latencies, 0.5),
verb + ' latency 95%\'ile': self._percentile(latencies, 0.95),
verb + ' latency 99%\'ile': self._percentile(latencies, 0.99),
verb + ' latency 99.9%\'ile': self._percentile(latencies, 0.999),
}
@staticmethod
def _percentile(sorted_list, percent):
# NOTE(sileht): we don't to want depends on numpy
if not sorted_list:
return None
k = (len(sorted_list) - 1) * percent
f = math.floor(k)
c = math.ceil(k)
if f == c:
return sorted_list[int(k)]
d0 = sorted_list[int(f)] * (c - k)
d1 = sorted_list[int(c)] * (k - f)
return d0 + d1
class CliBenchmarkBase(show.ShowOne):
def get_parser(self, prog_name):
parser = super(CliBenchmarkBase, self).get_parser(prog_name)
parser.add_argument("--workers", "-w",
default=None,
type=_positive_non_zero_int,
help="Number of workers to use")
return parser
class CliBenchmarkMetricShow(CliBenchmarkBase,
metric_cli.CliMetricWithResourceID):
"""Do benchmark testing of metric show"""
def get_parser(self, prog_name):
parser = super(CliBenchmarkMetricShow, self).get_parser(prog_name)
parser.add_argument("metric", nargs='+',
help="ID or name of the metrics")
parser.add_argument("--count", "-n",
required=True,
type=_positive_non_zero_int,
help="Number of metrics to get")
return parser
def take_action(self, parsed_args):
pool = BenchmarkPool(parsed_args.workers)
LOG.info("Getting metrics")
futures = pool.map_job(self.app.client.metric.get,
parsed_args.metric * parsed_args.count,
resource_id=parsed_args.resource_id)
result, runtime, stats = pool.wait_job("show", futures)
return self.dict2columns(stats)
class CliBenchmarkMetricCreate(CliBenchmarkBase,
metric_cli.CliMetricCreateBase):
"""Do benchmark testing of metric creation"""
def get_parser(self, prog_name):
parser = super(CliBenchmarkMetricCreate, self).get_parser(prog_name)
parser.add_argument("--count", "-n",
required=True,
type=_positive_non_zero_int,
help="Number of metrics to create")
parser.add_argument("--keep", "-k",
action='store_true',
help="Keep created metrics")
return parser
def _take_action(self, metric, parsed_args):
pool = BenchmarkPool(parsed_args.workers)
LOG.info("Creating metrics")
futures = pool.submit_job(parsed_args.count,
self.app.client.metric.create,
metric, refetch_metric=False)
created_metrics, runtime, stats = pool.wait_job("create", futures)
if not parsed_args.keep:
LOG.info("Deleting metrics")
pool = BenchmarkPool(parsed_args.workers)
futures = pool.map_job(self.app.client.metric.delete,
[m['id'] for m in created_metrics])
_, runtime, dstats = pool.wait_job("delete", futures)
stats.update(dstats)
return self.dict2columns(stats)
class CliBenchmarkMeasuresAdd(CliBenchmarkBase,
metric_cli.CliMeasuresAddBase):
"""Do benchmark testing of adding measurements"""
def get_parser(self, prog_name):
parser = super(CliBenchmarkMeasuresAdd, self).get_parser(prog_name)
parser.add_argument("--count", "-n",
required=True,
type=_positive_non_zero_int,
help="Number of total measures to send")
parser.add_argument("--batch", "-b",
default=1,
type=_positive_non_zero_int,
help="Number of measures to send in each batch")
parser.add_argument("--timestamp-start", "-s",
default=(
timeutils.utcnow(True)
- datetime.timedelta(days=365)),
type=timeutils.parse_isotime,
help="First timestamp to use")
parser.add_argument("--timestamp-end", "-e",
default=timeutils.utcnow(True),
type=timeutils.parse_isotime,
help="Last timestamp to use")
parser.add_argument("--wait",
default=False,
action='store_true',
help="Wait for all measures to be processed")
return parser
def take_action(self, parsed_args):
pool = BenchmarkPool(parsed_args.workers)
LOG.info("Sending measures")
if parsed_args.timestamp_end <= parsed_args.timestamp_start:
raise ValueError("End timestamp must be after start timestamp")
# If batch size is bigger than the number of measures to send, we
# reduce it to make sure we send something.
if parsed_args.batch > parsed_args.count:
parsed_args.batch = parsed_args.count
start = int(parsed_args.timestamp_start.strftime("%s"))
end = int(parsed_args.timestamp_end.strftime("%s"))
count = parsed_args.count
if (end - start) < count:
raise ValueError(
"The specified time range is not large enough "
"for the number of points")
random_values = (random.randint(- 2 ** 32, 2 ** 32)
for _ in six.moves.range(count))
all_measures = ({"timestamp": ts, "value": v}
for ts, v
in six.moves.zip(
six.moves.range(start,
end,
(end - start) // count),
random_values))
measures = grouper(all_measures, parsed_args.batch)
futures = pool.map_job(functools.partial(
self.app.client.metric.add_measures,
parsed_args.metric), measures, resource_id=parsed_args.resource_id)
_, runtime, stats = pool.wait_job("push", futures)
stats['measures per request'] = parsed_args.batch
stats['measures push speed'] = (
"%.2f push/s" % (
parsed_args.batch * pool.statistics.executed / runtime
)
)
if parsed_args.wait:
with timeutils.StopWatch() as sw:
while True:
status = self.app.client.status.get()
remaining = int(status['storage']['summary']['measures'])
if remaining == 0:
stats['extra wait to process measures'] = (
"%s seconds" % sw.elapsed()
)
break
else:
LOG.info(
"Remaining measures to be processed: %d"
% remaining)
time.sleep(1)
return self.dict2columns(stats)
class CliBenchmarkMeasuresShow(CliBenchmarkBase,
metric_cli.CliMeasuresShow):
"""Do benchmark testing of measurements show"""
def get_parser(self, prog_name):
parser = super(CliBenchmarkMeasuresShow, self).get_parser(prog_name)
parser.add_argument("--count", "-n",
required=True,
type=_positive_non_zero_int,
help="Number of total measures to send")
return parser
def take_action(self, parsed_args):
pool = BenchmarkPool(parsed_args.workers)
LOG.info("Getting measures")
futures = pool.submit_job(parsed_args.count,
self.app.client.metric.get_measures,
metric=parsed_args.metric,
resource_id=parsed_args.resource_id,
aggregation=parsed_args.aggregation,
start=parsed_args.start,
stop=parsed_args.stop)
result, runtime, stats = pool.wait_job("show", futures)
return self.dict2columns(stats)

View File

@ -1,39 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneauth1 import adapter
from oslo_utils import importutils
from gnocchiclient import exceptions
def Client(version, *args, **kwargs):
module = 'gnocchiclient.v%s.client' % version
module = importutils.import_module(module)
client_class = getattr(module, 'Client')
return client_class(*args, **kwargs)
class SessionClient(adapter.Adapter):
def request(self, url, method, **kwargs):
kwargs.setdefault('headers', kwargs.get('headers', {}))
# NOTE(sileht): The standard call raises errors from
# keystoneauth, where we need to raise the gnocchiclient errors.
raise_exc = kwargs.pop('raise_exc', True)
resp = super(SessionClient, self).request(url,
method,
raise_exc=False,
**kwargs)
if raise_exc and resp.status_code >= 400:
raise exceptions.from_response(resp, method)
return resp

View File

@ -1,238 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
class ClientException(Exception):
"""The base exception class for all exceptions this library raises."""
message = 'Unknown Error'
def __init__(self, code=None, message=None, request_id=None,
url=None, method=None):
self.code = code
self.message = message or self.__class__.message
self.request_id = request_id
self.url = url
self.method = method
def __str__(self):
formatted_string = "%s" % self.message
if self.code:
formatted_string += " (HTTP %s)" % self.code
if self.request_id:
formatted_string += " (Request-ID: %s)" % self.request_id
return formatted_string
class RetryAfterException(ClientException):
"""The base exception for ClientExceptions that use Retry-After header."""
def __init__(self, *args, **kwargs):
try:
self.retry_after = int(kwargs.pop('retry_after'))
except (KeyError, ValueError):
self.retry_after = 0
super(RetryAfterException, self).__init__(*args, **kwargs)
class MutipleMeaningException(object):
"""An mixin for exception that can be enhanced by reading the details"""
class BadRequest(ClientException):
"""HTTP 400 - Bad request: you sent some malformed data."""
http_status = 400
message = "Bad request"
class Unauthorized(ClientException):
"""HTTP 401 - Unauthorized: bad credentials."""
http_status = 401
message = "Unauthorized"
class Forbidden(ClientException):
"""HTTP 403 - Forbidden:
your credentials don't give you access to this resource.
"""
http_status = 403
message = "Forbidden"
class NotFound(ClientException):
"""HTTP 404 - Not found"""
http_status = 404
message = "Not found"
class MetricNotFound(NotFound, MutipleMeaningException):
message = "Metric not found"
match = re.compile("Metric .* does not exist")
class ResourceNotFound(NotFound, MutipleMeaningException):
message = "Resource not found"
match = re.compile("Resource .* does not exist")
class ResourceTypeNotFound(NotFound, MutipleMeaningException):
message = "Resource type not found"
match = re.compile("Resource type .* does not exist")
class ArchivePolicyNotFound(NotFound, MutipleMeaningException):
message = "Archive policy not found"
match = re.compile("Archive policy .* does not exist")
class ArchivePolicyRuleNotFound(NotFound, MutipleMeaningException):
message = "Archive policy rule not found"
match = re.compile("Archive policy rule .* does not exist")
class MethodNotAllowed(ClientException):
"""HTTP 405 - Method Not Allowed"""
http_status = 405
message = "Method Not Allowed"
class NotAcceptable(ClientException):
"""HTTP 406 - Not Acceptable"""
http_status = 406
message = "Not Acceptable"
class Conflict(ClientException):
"""HTTP 409 - Conflict"""
http_status = 409
message = "Conflict"
class NamedMetricAlreadyExists(Conflict, MutipleMeaningException):
message = "Named metric already exists"
match = re.compile("Named metric .* already exist")
class ResourceAlreadyExists(Conflict, MutipleMeaningException):
message = "Resource already exists"
match = re.compile("Resource .* already exists")
class ArchivePolicyAlreadyExists(Conflict, MutipleMeaningException):
message = "Archive policy already exists"
match = re.compile("Archive policy .* already exists")
class ArchivePolicyRuleAlreadyExists(Conflict, MutipleMeaningException):
message = "Archive policy rule already exists"
match = re.compile("Archive policy rule .* already exists")
class OverLimit(RetryAfterException):
"""HTTP 413 - Over limit:
you're over the API limits for this time period.
"""
http_status = 413
message = "Over limit"
class RateLimit(RetryAfterException):
"""HTTP 429 - Rate limit:
you've sent too many requests for this time period.
"""
http_status = 429
message = "Rate limit"
class NotImplemented(ClientException):
"""HTTP 501 - Not Implemented:
the server does not support this operation.
"""
http_status = 501
message = "Not Implemented"
_error_classes = [BadRequest, Unauthorized, Forbidden, NotFound,
MethodNotAllowed, NotAcceptable, Conflict, OverLimit,
RateLimit, NotImplemented]
_error_classes_enhanced = {
NotFound: [MetricNotFound, ResourceTypeNotFound, ResourceNotFound,
ArchivePolicyRuleNotFound, ArchivePolicyNotFound],
Conflict: [NamedMetricAlreadyExists, ResourceAlreadyExists,
ArchivePolicyAlreadyExists,
ArchivePolicyRuleAlreadyExists]
}
_code_map = dict(
(c.http_status, (c, _error_classes_enhanced.get(c, [])))
for c in _error_classes)
def from_response(response, method=None):
"""Return an instance of one of the ClientException on an requests response.
Usage::
resp, body = requests.request(...)
if resp.status_code != 200:
raise from_response(resp)
"""
if response.status_code:
cls, enhanced_classes = _code_map.get(response.status_code,
(ClientException, []))
req_id = response.headers.get("x-openstack-request-id")
content_type = response.headers.get("Content-Type", "").split(";")[0]
kwargs = {
'code': response.status_code,
'method': method,
'url': response.url,
'request_id': req_id,
}
if "retry-after" in response.headers:
kwargs['retry_after'] = response.headers.get('retry-after')
if content_type == "application/json":
try:
body = response.json()
except ValueError:
pass
else:
if 'description' in body:
# Gnocchi json
desc = body.get('description')
if desc:
for enhanced_cls in enhanced_classes:
if enhanced_cls.match.match(desc):
cls = enhanced_cls
break
kwargs['message'] = desc
elif isinstance(body, dict) and isinstance(body.get("error"),
dict):
# Keystone json
kwargs['message'] = body["error"]["message"]
else:
kwargs['message'] = response.text
elif content_type.startswith("text/"):
kwargs['message'] = response.text
if not kwargs['message']:
del kwargs['message']
return cls(**kwargs)

View File

@ -1,36 +0,0 @@
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from os_doc_tools import commands
# HACK(jd) Not sure why but Sphinx setup this multiple times, so we just avoid
# doing several times the requests by using this global variable :(
_RUN = False
def setup(app):
global _RUN
if _RUN:
return
commands.document_single_project("gnocchi", "doc/source", False)
with open("doc/source/gnocchi.rst", "r") as f:
data = f.read().splitlines(True)
for index, line in enumerate(data):
if "This chapter documents" in line:
break
with open("doc/source/gnocchi.rst", "w") as f:
f.writelines(data[index+1:])
_RUN = True

View File

@ -1,56 +0,0 @@
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from osc_lib import utils
DEFAULT_METRICS_API_VERSION = '1'
API_VERSION_OPTION = 'os_metrics_api_version'
API_NAME = "metric"
API_VERSIONS = {
"1": "gnocchiclient.v1.client.Client",
}
def make_client(instance):
"""Returns a metrics service client."""
version = instance._api_version[API_NAME]
try:
version = int(version)
except ValueError:
version = float(version)
gnocchi_client = utils.get_client_class(
API_NAME,
version,
API_VERSIONS)
# NOTE(sileht): ensure setup of the session is done
instance.setup_auth()
return gnocchi_client(session=instance.session,
interface=instance.interface,
region_name=instance.region_name)
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument(
'--os-metrics-api-version',
metavar='<metrics-api-version>',
default=utils.env(
'OS_METRICS_API_VERSION',
default=DEFAULT_METRICS_API_VERSION),
help=('Metrics API version, default=' +
DEFAULT_METRICS_API_VERSION +
' (Env: OS_METRICS_API_VERSION)'))
return parser

View File

@ -1,239 +0,0 @@
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import sys
import warnings
from cliff import app
from cliff import commandmanager
from keystoneauth1 import adapter
from keystoneauth1 import exceptions
from keystoneauth1 import loading
from gnocchiclient import auth
from gnocchiclient import benchmark
from gnocchiclient import client
from gnocchiclient.v1 import archive_policy_cli
from gnocchiclient.v1 import archive_policy_rule_cli as ap_rule_cli
from gnocchiclient.v1 import capabilities_cli
from gnocchiclient.v1 import metric_cli
from gnocchiclient.v1 import resource_cli
from gnocchiclient.v1 import resource_type_cli
from gnocchiclient.v1 import status_cli
from gnocchiclient.version import __version__
class GnocchiCommandManager(commandmanager.CommandManager):
SHELL_COMMANDS = {
"status": status_cli.CliStatusShow,
"resource list": resource_cli.CliResourceList,
"resource show": resource_cli.CliResourceShow,
"resource history": resource_cli.CliResourceHistory,
"resource search": resource_cli.CliResourceSearch,
"resource create": resource_cli.CliResourceCreate,
"resource update": resource_cli.CliResourceUpdate,
"resource delete": resource_cli.CliResourceDelete,
"resource batch delete": resource_cli.CliResourceBatchDelete,
"resource-type list": resource_type_cli.CliResourceTypeList,
"resource-type create": resource_type_cli.CliResourceTypeCreate,
"resource-type update": resource_type_cli.CliResourceTypeUpdate,
"resource-type show": resource_type_cli.CliResourceTypeShow,
"resource-type delete": resource_type_cli.CliResourceTypeDelete,
"archive-policy list": archive_policy_cli.CliArchivePolicyList,
"archive-policy show": archive_policy_cli.CliArchivePolicyShow,
"archive-policy create": archive_policy_cli.CliArchivePolicyCreate,
"archive-policy update": archive_policy_cli.CliArchivePolicyUpdate,
"archive-policy delete": archive_policy_cli.CliArchivePolicyDelete,
"archive-policy-rule list": ap_rule_cli.CliArchivePolicyRuleList,
"archive-policy-rule show": ap_rule_cli.CliArchivePolicyRuleShow,
"archive-policy-rule create": ap_rule_cli.CliArchivePolicyRuleCreate,
"archive-policy-rule delete": ap_rule_cli.CliArchivePolicyRuleDelete,
"metric list": metric_cli.CliMetricList,
"metric show": metric_cli.CliMetricShow,
"metric create": metric_cli.CliMetricCreate,
"metric delete": metric_cli.CliMetricDelete,
"measures show": metric_cli.CliMeasuresShow,
"measures add": metric_cli.CliMeasuresAdd,
"measures batch-metrics": metric_cli.CliMetricsMeasuresBatch,
"measures batch-resources-metrics":
metric_cli.CliResourcesMetricsMeasuresBatch,
"measures aggregation": metric_cli.CliMeasuresAggregation,
"capabilities list": capabilities_cli.CliCapabilitiesList,
"benchmark metric create": benchmark.CliBenchmarkMetricCreate,
"benchmark metric show": benchmark.CliBenchmarkMetricShow,
"benchmark measures add": benchmark.CliBenchmarkMeasuresAdd,
"benchmark measures show": benchmark.CliBenchmarkMeasuresShow,
}
def load_commands(self, namespace):
for name, command_class in self.SHELL_COMMANDS.items():
self.add_command(name, command_class)
class GnocchiShell(app.App):
def __init__(self):
super(GnocchiShell, self).__init__(
description='Gnocchi command line client',
# FIXME(sileht): get version from pbr
version=__version__,
command_manager=GnocchiCommandManager(None),
deferred_help=True,
)
self._client = None
def build_option_parser(self, description, version):
"""Return an argparse option parser for this application.
Subclasses may override this method to extend
the parser with more global options.
:param description: full description of the application
:paramtype description: str
:param version: version number for the application
:paramtype version: str
"""
parser = super(GnocchiShell, self).build_option_parser(
description,
version,
argparse_kwargs={'allow_abbrev': False})
parser.add_argument(
'--gnocchi-api-version',
default=os.environ.get('GNOCCHI_API_VERSION', '1'),
help='Defaults to env[GNOCCHI_API_VERSION] or 1.')
loading.register_session_argparse_arguments(parser=parser)
plugin = loading.register_auth_argparse_arguments(
parser=parser, argv=sys.argv, default="gnocchi-basic")
if not isinstance(plugin, (auth.GnocchiNoAuthLoader,
auth.GnocchiBasicLoader)):
adapter.register_adapter_argparse_arguments(
parser=parser, service_type="metric")
adapter.register_service_adapter_argparse_arguments(
parser=parser, service_type="metric")
parser.add_argument(
'--endpoint',
default=os.environ.get('GNOCCHI_ENDPOINT'),
help='Gnocchi endpoint (Env: GNOCCHI_ENDPOINT). '
'Deprecated, use --os-endpoint-override and '
'OS_ENDPOINT_OVERRIDE instead')
return parser
@property
def client(self):
# NOTE(sileht): we lazy load the client to not
# load/connect auth stuffs
if self._client is None:
auth_plugin = loading.load_auth_from_argparse_arguments(
self.options)
session = loading.load_session_from_argparse_arguments(
self.options, auth=auth_plugin)
if isinstance(auth_plugin, (auth.GnocchiNoAuthPlugin,
auth.GnocchiBasicPlugin)):
# Normal endpoint
kwargs = dict(
version=self.options.gnocchi_api_version,
session=session,
)
else:
# Openstck style endpoint
kwargs = dict(
version=(self.options.os_metric_api_version or
self.options.os_api_version or
self.options.gnocchi_api_version),
session=session,
adapter_options=dict(
service_type=(self.options.os_metric_service_type or
self.options.os_service_type),
service_name=(self.options.os_metric_service_name or
self.options.os_service_name),
interface=(self.options.os_metric_interface or
self.options.os_interface),
region_name=self.options.os_region_name,
endpoint_override=(
self.options.os_metric_endpoint_override or
self.options.os_endpoint_override or
self.options.endpoint),
)
)
self._client = client.Client(**kwargs)
return self._client
def clean_up(self, cmd, result, err):
if err and isinstance(err, exceptions.HttpError):
try:
error = err.response.json()
except Exception:
pass
else:
if 'description' in error:
# Gnocchi format
print(error['description'])
elif 'error' in error and 'message' in error['error']:
# Keystone format
print(error['error']['message'])
def configure_logging(self):
if self.options.debug:
# --debug forces verbose_level 3
# Set this here so cliff.app.configure_logging() can work
self.options.verbose_level = 3
super(GnocchiShell, self).configure_logging()
root_logger = logging.getLogger('')
# Set logging to the requested level
if self.options.verbose_level == 0:
# --quiet
root_logger.setLevel(logging.ERROR)
warnings.simplefilter("ignore")
elif self.options.verbose_level == 1:
# This is the default case, no --debug, --verbose or --quiet
root_logger.setLevel(logging.WARNING)
warnings.simplefilter("ignore")
elif self.options.verbose_level == 2:
# One --verbose
root_logger.setLevel(logging.INFO)
warnings.simplefilter("once")
elif self.options.verbose_level >= 3:
# Two or more --verbose
root_logger.setLevel(logging.DEBUG)
# Hide some useless message
requests_log = logging.getLogger("requests")
cliff_log = logging.getLogger('cliff')
stevedore_log = logging.getLogger('stevedore')
iso8601_log = logging.getLogger("iso8601")
cliff_log.setLevel(logging.ERROR)
stevedore_log.setLevel(logging.ERROR)
iso8601_log.setLevel(logging.ERROR)
if self.options.debug:
requests_log.setLevel(logging.DEBUG)
else:
requests_log.setLevel(logging.ERROR)
def main(args=None):
if args is None:
args = sys.argv[1:]
return GnocchiShell().run(args)

View File

@ -1,115 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import shlex
import six
import subprocess
import time
from tempest.lib.cli import base
from tempest.lib import exceptions
class GnocchiClient(object):
"""Gnocchi Client for tempest-lib
This client doesn't use any authentication system
"""
def __init__(self):
self.cli_dir = os.environ.get('GNOCCHI_CLIENT_EXEC_DIR')
self.endpoint = os.environ.get('PIFPAF_GNOCCHI_HTTP_URL')
def gnocchi(self, action, flags='', params='',
fail_ok=False, merge_stderr=False, input=None):
creds = ("--os-auth-plugin gnocchi-basic "
"--user admin "
"--endpoint %s") % self.endpoint
flags = creds + ' ' + flags
# FIXME(sileht): base.execute is broken in py3 in tempest-lib
# see: https://review.openstack.org/#/c/218870/
# return base.execute("gnocchi", action, flags, params, fail_ok,
# merge_stderr, self.cli_dir)
cmd = "gnocchi"
# from fixed tempestlib
cmd = ' '.join([os.path.join(self.cli_dir, cmd),
flags, action, params])
if six.PY2:
cmd = cmd.encode('utf-8')
cmd = shlex.split(cmd)
result = ''
result_err = ''
stdin = None if input is None else subprocess.PIPE
stdout = subprocess.PIPE
stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE
proc = subprocess.Popen(cmd, stdin=stdin, stdout=stdout, stderr=stderr)
result, result_err = proc.communicate(input=input)
if not fail_ok and proc.returncode != 0:
raise exceptions.CommandFailed(proc.returncode,
cmd,
result,
result_err)
if six.PY2:
return result
else:
return os.fsdecode(result)
class ClientTestBase(base.ClientTestBase):
"""Base class for gnocchiclient tests.
Establishes the gnocchi client and retrieves the essential environment
information.
"""
def _get_clients(self):
return GnocchiClient()
def retry_gnocchi(self, retry, *args, **kwargs):
result = ""
while not result.strip() and retry > 0:
result = self.gnocchi(*args, **kwargs)
if not result:
time.sleep(1)
retry -= 1
return result
def gnocchi(self, *args, **kwargs):
return self.clients.gnocchi(*args, **kwargs)
def details_multiple(self, output_lines, with_label=False):
"""Return list of dicts with item details from cli output tables.
If with_label is True, key '__label' is added to each items dict.
For more about 'label' see OutputParser.tables().
NOTE(sileht): come from tempest-lib just because cliff use
Field instead of Property as first columun header.
"""
items = []
tables_ = self.parser.tables(output_lines)
for table_ in tables_:
if ('Field' not in table_['headers']
or 'Value' not in table_['headers']):
raise exceptions.InvalidStructure()
item = {}
for value in table_['values']:
item[value[0]] = value[1]
if with_label:
item['__label'] = table_['label']
items.append(item)
return items

View File

@ -1,89 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
from gnocchiclient.tests.functional import base
class ArchivePolicyClientTest(base.ClientTestBase):
def test_archive_policy_scenario(self):
# CREATE
apname = uuidutils.generate_uuid()
result = self.gnocchi(
u'archive-policy', params=u"create %s"
u" --back-window 0"
u" -d granularity:1s,points:86400" % apname)
policy = self.details_multiple(result)[0]
self.assertEqual(apname, policy["name"])
# CREATE FAIL
result = self.gnocchi(
u'archive-policy', params=u"create %s"
u" --back-window 0"
u" -d granularity:1s,points:86400" % apname,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Archive policy %s already exists (HTTP 409)" % apname)
# GET
result = self.gnocchi(
'archive-policy', params="show %s" % apname)
policy = self.details_multiple(result)[0]
self.assertEqual(apname, policy["name"])
# LIST
result = self.gnocchi(
'archive-policy', params="list")
policies = self.parser.listing(result)
policy_from_list = [p for p in policies
if p['name'] == apname][0]
for field in ["back_window", "definition", "aggregation_methods"]:
self.assertEqual(policy[field], policy_from_list[field])
# UPDATE
result = self.gnocchi(
'archive-policy', params='update %s'
' -d granularity:1s,points:60' % apname)
policy = self.details_multiple(result)[0]
self.assertEqual(apname, policy["name"])
# UPDATE FAIL
result = self.gnocchi(
'archive-policy', params='update %s'
' -d granularity:5s,points:86400' % apname,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Archive policy %s does not support change: 1.0 granularity "
"interval was changed (HTTP 400)" % apname)
# DELETE
result = self.gnocchi('archive-policy',
params="delete %s" % apname)
self.assertEqual("", result)
# GET FAIL
result = self.gnocchi('archive-policy',
params="show %s" % apname,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Archive policy %s does not exist (HTTP 404)" % apname)
# DELETE FAIL
result = self.gnocchi('archive-policy',
params="delete %s" % apname,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Archive policy %s does not exist (HTTP 404)" % apname)

View File

@ -1,75 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
from gnocchiclient.tests.functional import base
class ArchivePolicyRuleClientTest(base.ClientTestBase):
def test_archive_policy_rule_scenario(self):
apname = uuidutils.generate_uuid()
# Create an archive policy
self.gnocchi(
u'archive-policy', params=u"create %s"
u" -d granularity:1s,points:86400" % apname)
# CREATE
result = self.gnocchi(
u'archive-policy-rule', params=u"create test"
u" --archive-policy-name %s"
u" --metric-pattern 'disk.io.*'" % apname)
policy_rule = self.details_multiple(result)[0]
self.assertEqual('test', policy_rule["name"])
# CREATE FAIL
result = self.gnocchi(
u'archive-policy-rule', params=u"create test"
u" --archive-policy-name high"
u" --metric-pattern 'disk.io.*'",
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Archive policy rule test already exists (HTTP 409)")
# GET
result = self.gnocchi(
'archive-policy-rule', params="show test")
policy_rule = self.details_multiple(result)[0]
self.assertEqual("test", policy_rule["name"])
# LIST
result = self.gnocchi('archive-policy-rule', params="list")
rules = self.parser.listing(result)
rule_from_list = [p for p in rules
if p['name'] == 'test'][0]
for field in ["metric_pattern", "archive_policy_name"]:
self.assertEqual(policy_rule[field], rule_from_list[field])
# DELETE
result = self.gnocchi('archive-policy-rule',
params="delete test")
self.assertEqual("", result)
# GET FAIL
result = self.gnocchi('archive-policy-rule',
params="show test",
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Archive policy rule test does not exist (HTTP 404)")
# DELETE FAIL
result = self.gnocchi('archive-policy-rule',
params="delete test",
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Archive policy rule test does not exist (HTTP 404)")

View File

@ -1,110 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
from gnocchiclient.tests.functional import base
class BenchmarkMetricTest(base.ClientTestBase):
def test_benchmark_metric_create_wrong_workers(self):
result = self.gnocchi(
u'benchmark', params=u"metric create -n 0",
fail_ok=True, merge_stderr=True)
self.assertIn("0 must be greater than 0", result)
def test_benchmark_metric_create(self):
apname = uuidutils.generate_uuid()
# PREPARE AN ARCHIVE POLICY
self.gnocchi("archive-policy", params="create %s "
"--back-window 0 -d granularity:1s,points:86400" % apname)
result = self.gnocchi(
u'benchmark', params=u"metric create -n 10 -a %s" % apname)
result = self.details_multiple(result)[0]
self.assertEqual(10, int(result['create executed']))
self.assertLessEqual(int(result['create failures']), 10)
self.assertLessEqual(int(result['delete executed']),
int(result['create executed']))
result = self.gnocchi(
u'benchmark', params=u"metric create -k -n 10 -a %s" % apname)
result = self.details_multiple(result)[0]
self.assertEqual(10, int(result['create executed']))
self.assertLessEqual(int(result['create failures']), 10)
self.assertNotIn('delete executed', result)
def test_benchmark_metric_get(self):
apname = uuidutils.generate_uuid()
# PREPARE AN ARCHIVE POLICY
self.gnocchi("archive-policy", params="create %s "
"--back-window 0 -d granularity:1s,points:86400" % apname)
result = self.gnocchi(
u'metric', params=u"create -a %s" % apname)
metric = self.details_multiple(result)[0]
result = self.gnocchi(
u'benchmark', params=u"metric show -n 10 %s" % metric['id'])
result = self.details_multiple(result)[0]
self.assertEqual(10, int(result['show executed']))
self.assertLessEqual(int(result['show failures']), 10)
def test_benchmark_measures_add(self):
apname = uuidutils.generate_uuid()
# PREPARE AN ARCHIVE POLICY
self.gnocchi("archive-policy", params="create %s "
"--back-window 0 -d granularity:1s,points:86400" % apname)
result = self.gnocchi(
u'metric', params=u"create -a %s" % apname)
metric = self.details_multiple(result)[0]
result = self.gnocchi(
u'benchmark', params=u"measures add -n 10 -b 4 %s" % metric['id'])
result = self.details_multiple(result)[0]
self.assertEqual(2, int(result['push executed']))
self.assertLessEqual(int(result['push failures']), 2)
result = self.gnocchi(
u'benchmark',
params=u"measures add -s 2010-01-01 -n 10 -b 4 %s"
% metric['id'])
result = self.details_multiple(result)[0]
self.assertEqual(2, int(result['push executed']))
self.assertLessEqual(int(result['push failures']), 2)
result = self.gnocchi(
u'benchmark',
params=u"measures add --wait -s 2010-01-01 -n 10 -b 4 %s"
% metric['id'])
result = self.details_multiple(result)[0]
self.assertEqual(2, int(result['push executed']))
self.assertLessEqual(int(result['push failures']), 2)
self.assertIn("extra wait to process measures", result)
def test_benchmark_measures_show(self):
apname = uuidutils.generate_uuid()
# PREPARE AN ARCHIVE POLICY
self.gnocchi("archive-policy", params="create %s "
"--back-window 0 -d granularity:1s,points:86400" % apname)
result = self.gnocchi(
u'metric', params=u"create -a %s" % apname)
metric = self.details_multiple(result)[0]
result = self.gnocchi(
u'benchmark',
params=u"measures show -n 2 %s"
% metric['id'])
result = self.details_multiple(result)[0]
self.assertEqual(2, int(result['show executed']))
self.assertLessEqual(int(result['show failures']), 2)

View File

@ -1,22 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from gnocchiclient.tests.functional import base
class CapabilitiesClientTest(base.ClientTestBase):
def test_capabilities_scenario(self):
# GET
result = self.gnocchi('capabilities', params="list")
caps = self.parser.listing(result)[0]
self.assertIsNotNone(caps)
self.assertEqual('aggregation_methods', caps['Field'])

View File

@ -1,433 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import os
import tempfile
from oslo_utils import uuidutils
from gnocchiclient.tests.functional import base
class MetricClientTest(base.ClientTestBase):
def test_delete_several_metrics(self):
apname = uuidutils.generate_uuid()
# PREPARE AN ARCHIVE POLICY
self.gnocchi("archive-policy", params="create %s "
"--back-window 0 -d granularity:1s,points:86400" % apname)
# Create 2 metrics
result = self.gnocchi(
u'metric', params=u"create"
u" --archive-policy-name %s" % apname)
metric1 = self.details_multiple(result)[0]
result = self.gnocchi(
u'metric', params=u"create"
u" --archive-policy-name %s" % apname)
metric2 = self.details_multiple(result)[0]
# DELETE
result = self.gnocchi('metric', params="delete %s %s"
% (metric1["id"], metric2["id"]))
self.assertEqual("", result)
# GET FAIL
result = self.gnocchi('metric', params="show %s" % metric1["id"],
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(result.split('\n'),
"Metric %s does not exist (HTTP 404)" %
metric1["id"])
result = self.gnocchi('metric', params="show %s" % metric2["id"],
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(result.split('\n'),
"Metric %s does not exist (HTTP 404)" %
metric2["id"])
def test_metric_scenario(self):
# PREPARE AN ARCHIVE POLICY
self.gnocchi("archive-policy", params="create metric-test "
"--back-window 0 -d granularity:1s,points:86400")
# CREATE WITH NAME AND WITHOUT UNIT
result = self.gnocchi(
u'metric', params=u"create"
u" --archive-policy-name metric-test some-name")
metric = self.details_multiple(result)[0]
self.assertIsNotNone(metric["id"])
self.assertEqual("admin", metric["creator"])
self.assertEqual("", metric["created_by_project_id"])
self.assertEqual("admin", metric["created_by_user_id"])
self.assertEqual('some-name', metric["name"])
self.assertEqual('None', metric["unit"])
self.assertEqual('None', metric["resource/id"])
self.assertIn("metric-test", metric["archive_policy/name"])
# CREATE WITH UNIT
result = self.gnocchi(
u'metric', params=u"create another-name"
u" --archive-policy-name metric-test"
u" --unit some-unit")
metric = self.details_multiple(result)[0]
self.assertIsNotNone(metric["id"])
self.assertEqual("admin", metric["creator"])
self.assertEqual("", metric["created_by_project_id"])
self.assertEqual("admin", metric["created_by_user_id"])
self.assertEqual('another-name', metric["name"])
self.assertEqual('some-unit', metric["unit"])
self.assertEqual('None', metric["resource/id"])
self.assertIn("metric-test", metric["archive_policy/name"])
# GET
result = self.gnocchi('metric', params="show %s" % metric["id"])
metric_get = self.details_multiple(result)[0]
self.assertEqual(metric, metric_get)
# MEASURES ADD
result = self.gnocchi('measures',
params=("add %s "
"-m '2015-03-06T14:33:57@43.11' "
"--measure '2015-03-06T14:34:12@12' "
) % metric["id"])
self.assertEqual("", result)
# MEASURES GET with refresh
result = self.gnocchi('measures',
params=("show %s "
"--aggregation mean "
"--granularity 1 "
"--start 2015-03-06T14:32:00 "
"--stop 2015-03-06T14:36:00 "
"--refresh") % metric["id"])
measures = self.parser.listing(result)
self.assertEqual([{'granularity': '1.0',
'timestamp': '2015-03-06T14:33:57+00:00',
'value': '43.11'},
{'granularity': '1.0',
'timestamp': '2015-03-06T14:34:12+00:00',
'value': '12.0'}], measures)
# MEASURES GET
result = self.retry_gnocchi(
5, 'measures', params=("show %s "
"--aggregation mean "
"--granularity 1 "
"--start 2015-03-06T14:32:00 "
"--stop 2015-03-06T14:36:00"
) % metric["id"])
measures = self.parser.listing(result)
self.assertEqual([{'granularity': '1.0',
'timestamp': '2015-03-06T14:33:57+00:00',
'value': '43.11'},
{'granularity': '1.0',
'timestamp': '2015-03-06T14:34:12+00:00',
'value': '12.0'}], measures)
# MEASURES GET RESAMPLE
result = self.retry_gnocchi(
5, 'measures', params=("show %s "
"--aggregation mean "
"--granularity 1 --resample 3600 "
"--start 2015-03-06T14:32:00 "
"--stop 2015-03-06T14:36:00"
) % metric["id"])
measures = self.parser.listing(result)
self.assertEqual([{'granularity': '3600.0',
'timestamp': '2015-03-06T14:00:00+00:00',
'value': '27.555'}], measures)
# MEASURES AGGREGATION
result = self.gnocchi(
'measures', params=("aggregation "
"--metric %s "
"--aggregation mean "
"--reaggregation sum "
"--granularity 1 "
"--start 2015-03-06T14:32:00 "
"--stop 2015-03-06T14:36:00"
) % metric["id"])
measures = self.parser.listing(result)
self.assertEqual([{'granularity': '1.0',
'timestamp': '2015-03-06T14:33:57+00:00',
'value': '43.11'},
{'granularity': '1.0',
'timestamp': '2015-03-06T14:34:12+00:00',
'value': '12.0'}], measures)
# BATCHING
measures = json.dumps({
metric['id']: [{'timestamp': '2015-03-06T14:34:12',
'value': 12}]})
tmpfile = tempfile.NamedTemporaryFile(delete=False)
self.addCleanup(os.remove, tmpfile.name)
with tmpfile as f:
f.write(measures.encode('utf8'))
self.gnocchi('measures', params=("batch-metrics %s" % tmpfile.name))
self.gnocchi('measures', params="batch-metrics -",
input=measures.encode('utf8'))
# LIST
result = self.gnocchi('metric', params="list")
metrics = self.parser.listing(result)
metric_from_list = [p for p in metrics
if p['id'] == metric['id']][0]
for field in ["id", "archive_policy/name", "name"]:
# FIXME(sileht): add "resource_id" or "resource"
# when LP#1497171 is fixed
self.assertEqual(metric[field], metric_from_list[field], field)
# LIST + limit
result = self.gnocchi('metric',
params=("list "
"--sort name:asc "
"--marker %s "
"--limit 1") % metric['id'])
metrics = self.parser.listing(result)
metric_from_list = metrics[0]
self.assertEqual(1, len(metrics))
self.assertTrue(metric['name'] < metric_from_list['name'])
# DELETE
result = self.gnocchi('metric', params="delete %s" % metric["id"])
self.assertEqual("", result)
# GET FAIL
result = self.gnocchi('metric', params="show %s" % metric["id"],
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Metric %s does not exist (HTTP 404)" % metric["id"])
# DELETE FAIL
result = self.gnocchi('metric', params="delete %s" % metric["id"],
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Metric %s does not exist (HTTP 404)" % metric["id"])
def test_metric_by_name_scenario(self):
# PREPARE REQUIREMENT
self.gnocchi("archive-policy", params="create metric-test2 "
"--back-window 0 -d granularity:1s,points:86400")
self.gnocchi("resource", params="create metric-res")
# CREATE
result = self.gnocchi(
u'metric', params=u"create"
u" --archive-policy-name metric-test2 -r metric-res metric-name"
u" --unit some-unit")
metric = self.details_multiple(result)[0]
self.assertIsNotNone(metric["id"])
self.assertEqual("", metric['created_by_project_id'])
self.assertEqual("admin", metric['created_by_user_id'])
self.assertEqual("admin", metric['creator'])
self.assertEqual('metric-name', metric["name"])
self.assertEqual('some-unit', metric["unit"])
self.assertNotEqual('None', metric["resource/id"])
self.assertIn("metric-test", metric["archive_policy/name"])
# CREATE FAIL
result = self.gnocchi(
u'metric', params=u"create"
u" --archive-policy-name metric-test2 -r metric-res metric-name",
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Named metric metric-name already exists (HTTP 409)")
# GET
result = self.gnocchi('metric',
params="show -r metric-res metric-name")
metric_get = self.details_multiple(result)[0]
self.assertEqual(metric, metric_get)
# MEASURES ADD
result = self.gnocchi('measures',
params=("add metric-name -r metric-res "
"-m '2015-03-06T14:33:57@43.11' "
"--measure '2015-03-06T14:34:12@12'"))
self.assertEqual("", result)
# MEASURES AGGREGATION with refresh
result = self.gnocchi(
'measures', params=("aggregation "
"--query \"id='metric-res'\" "
"--resource-type \"generic\" "
"-m metric-name "
"--aggregation mean "
"--needed-overlap 0 "
"--start 2015-03-06T14:32:00 "
"--stop 2015-03-06T14:36:00 "
"--refresh"))
measures = self.parser.listing(result)
self.assertEqual([{'granularity': '1.0',
'timestamp': '2015-03-06T14:33:57+00:00',
'value': '43.11'},
{'granularity': '1.0',
'timestamp': '2015-03-06T14:34:12+00:00',
'value': '12.0'}], measures)
# MEASURES AGGREGATION
result = self.gnocchi(
'measures', params=("aggregation "
"--query \"id='metric-res'\" "
"--resource-type \"generic\" "
"-m metric-name "
"--aggregation mean "
"--needed-overlap 0 "
"--start 2015-03-06T14:32:00 "
"--stop 2015-03-06T14:36:00"))
measures = self.parser.listing(result)
self.assertEqual([{'granularity': '1.0',
'timestamp': '2015-03-06T14:33:57+00:00',
'value': '43.11'},
{'granularity': '1.0',
'timestamp': '2015-03-06T14:34:12+00:00',
'value': '12.0'}], measures)
# MEASURES AGGREGATION WITH FILL
result = self.gnocchi(
'measures', params=("aggregation "
"--query \"id='metric-res'\" "
"--resource-type \"generic\" "
"-m metric-name --fill 0 "
"--granularity 1 "
"--start 2015-03-06T14:32:00 "
"--stop 2015-03-06T14:36:00"))
measures = self.parser.listing(result)
self.assertEqual([{'granularity': '1.0',
'timestamp': '2015-03-06T14:33:57+00:00',
'value': '43.11'},
{'granularity': '1.0',
'timestamp': '2015-03-06T14:34:12+00:00',
'value': '12.0'}], measures)
# MEASURES AGGREGATION RESAMPLE
result = self.gnocchi(
'measures', params=("aggregation "
"--query \"id='metric-res'\" "
"--resource-type \"generic\" "
"-m metric-name --granularity 1 "
"--aggregation mean --resample=3600 "
"--needed-overlap 0 "
"--start 2015-03-06T14:32:00 "
"--stop 2015-03-06T14:36:00"))
measures = self.parser.listing(result)
self.assertEqual([{'granularity': '3600.0',
'timestamp': '2015-03-06T14:00:00+00:00',
'value': '27.555'}], measures)
# MEASURES AGGREGATION GROUPBY
result = self.gnocchi(
'measures', params=("aggregation "
"--groupby project_id "
"--groupby user_id "
"--query \"id='metric-res'\" "
"--resource-type \"generic\" "
"-m metric-name "
"--aggregation mean "
"--needed-overlap 0 "
"--start 2015-03-06T14:32:00 "
"--stop 2015-03-06T14:36:00"))
measures = self.parser.listing(result)
self.assertEqual([{'group': 'project_id: None, user_id: None',
'granularity': '1.0',
'timestamp': '2015-03-06T14:33:57+00:00',
'value': '43.11'},
{'group': 'project_id: None, user_id: None',
'granularity': '1.0',
'timestamp': '2015-03-06T14:34:12+00:00',
'value': '12.0'}], measures)
# MEASURES GET
result = self.gnocchi('measures',
params=("show metric-name -r metric-res "
"--aggregation mean "
"--start 2015-03-06T14:32:00 "
"--stop 2015-03-06T14:36:00"))
measures = self.parser.listing(result)
self.assertEqual([{'granularity': '1.0',
'timestamp': '2015-03-06T14:33:57+00:00',
'value': '43.11'},
{'granularity': '1.0',
'timestamp': '2015-03-06T14:34:12+00:00',
'value': '12.0'}], measures)
# BATCHING
measures = json.dumps({'metric-res': {'metric-name': [{
'timestamp': '2015-03-06T14:34:12', 'value': 12
}]}})
tmpfile = tempfile.NamedTemporaryFile(delete=False)
self.addCleanup(os.remove, tmpfile.name)
with tmpfile as f:
f.write(measures.encode('utf8'))
self.gnocchi('measures', params=("batch-resources-metrics %s" %
tmpfile.name))
self.gnocchi('measures', params="batch-resources-metrics -",
input=measures.encode('utf8'))
# BATCHING --create-metrics
measures = json.dumps({'metric-res': {'unknown-metric-name': [{
'timestamp': '2015-03-06T14:34:12', 'value': 12
}]}})
self.gnocchi('measures',
params="batch-resources-metrics --create-metrics -",
input=measures.encode('utf8'),)
# LIST
result = self.gnocchi('metric', params="list")
metrics = self.parser.listing(result)
metric_from_list = [p for p in metrics
if p['archive_policy/name'] == 'metric-test2'][0]
for field in ["archive_policy/name", "name"]:
# FIXME(sileht): add "resource_id" or "resource"
# when LP#1497171 is fixed
self.assertEqual(metric[field], metric_from_list[field])
# DELETE
result = self.gnocchi('metric',
params="delete -r metric-res metric-name")
self.assertEqual("", result)
# GET FAIL
result = self.gnocchi('metric',
params="show -r metric-res metric-name",
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Metric metric-name does not exist (HTTP 404)")
# DELETE FAIL
result = self.gnocchi('metric',
params="delete -r metric-res metric-name",
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Metric metric-name does not exist (HTTP 404)")
# GET RESOURCE ID
result = self.gnocchi(
'resource', params="show -t generic metric-res")
resource_id = self.details_multiple(result)[0]["id"]
# DELETE RESOURCE
result = self.gnocchi('resource', params="delete metric-res")
self.assertEqual("", result)
# GET FAIL WITH RESOURCE ERROR
result = self.gnocchi('metric',
params="show metric-name -r metric-res",
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Resource %s does not exist (HTTP 404)" % resource_id)

View File

@ -1,198 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
from gnocchiclient.tests.functional import base
class ResourceClientTest(base.ClientTestBase):
RESOURCE_ID = uuidutils.generate_uuid()
RESOURCE_ID2 = "foo"
PROJECT_ID = uuidutils.generate_uuid()
def test_help(self):
self.gnocchi("help", params="resource list")
self.gnocchi("help", params="resource history")
self.gnocchi("help", params="resource search")
def test_resource_scenario(self):
apname = uuidutils.generate_uuid()
# Create an archive policy
self.gnocchi(
u'archive-policy', params=u"create %s"
u" -d granularity:1s,points:86400" % apname)
# CREATE
result = self.gnocchi(
u'resource', params=u"create %s --type generic" %
self.RESOURCE_ID)
resource = self.details_multiple(result)[0]
self.assertEqual(self.RESOURCE_ID, resource["id"])
self.assertEqual('None', resource["project_id"])
self.assertNotEqual('None', resource["started_at"])
# CREATE FAIL
result = self.gnocchi('resource',
params="create generic -a id:%s" %
self.RESOURCE_ID,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Resource %s already exists (HTTP 409)" % self.RESOURCE_ID)
# UPDATE
result = self.gnocchi(
'resource', params=("update -t generic %s -a project_id:%s "
"-n temperature:%s" %
(self.RESOURCE_ID, self.PROJECT_ID, apname)))
resource_updated = self.details_multiple(result)[0]
self.assertEqual(self.RESOURCE_ID, resource_updated["id"])
self.assertEqual(self.PROJECT_ID, resource_updated["project_id"])
self.assertEqual(resource["started_at"],
resource_updated["started_at"])
self.assertIn("temperature", resource_updated["metrics"])
# GET
result = self.gnocchi(
'resource', params="show -t generic %s" % self.RESOURCE_ID)
resource_got = self.details_multiple(result)[0]
self.assertEqual(self.RESOURCE_ID, resource_got["id"])
self.assertEqual(self.PROJECT_ID, resource_got["project_id"])
self.assertEqual(resource["started_at"], resource_got["started_at"])
self.assertIn("temperature", resource_got["metrics"])
# HISTORY
result = self.gnocchi(
'resource', params="history --type generic %s" % self.RESOURCE_ID)
resource_history = self.parser.listing(result)
self.assertEqual(2, len(resource_history))
self.assertEqual(self.RESOURCE_ID, resource_history[0]["id"])
self.assertEqual(self.RESOURCE_ID, resource_history[1]["id"])
self.assertEqual("None", resource_history[0]["project_id"])
self.assertEqual(self.PROJECT_ID, resource_history[1]["project_id"])
# LIST
result = self.gnocchi('resource', params="list -t generic")
self.assertIn(self.RESOURCE_ID,
[r['id'] for r in self.parser.listing(result)])
resource_list = [r for r in self.parser.listing(result)
if r['id'] == self.RESOURCE_ID][0]
self.assertEqual(self.RESOURCE_ID, resource_list["id"])
self.assertEqual(self.PROJECT_ID, resource_list["project_id"])
self.assertEqual(resource["started_at"], resource_list["started_at"])
# Search
result = self.gnocchi('resource',
params=("search --type generic "
"'project_id=%s'"
) % self.PROJECT_ID)
resource_list = self.parser.listing(result)[0]
self.assertEqual(self.RESOURCE_ID, resource_list["id"])
self.assertEqual(self.PROJECT_ID, resource_list["project_id"])
self.assertEqual(resource["started_at"], resource_list["started_at"])
# UPDATE with Delete metric
result = self.gnocchi(
'resource', params=("update -t generic %s -a project_id:%s "
"-d temperature" %
(self.RESOURCE_ID, self.PROJECT_ID)))
resource_updated = self.details_multiple(result)[0]
self.assertNotIn("temperature", resource_updated["metrics"])
result = self.gnocchi(
'resource', params=("update %s -d temperature" % self.RESOURCE_ID),
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Metric name temperature not found")
# CREATE 2
result = self.gnocchi(
'resource', params=("create %s -t generic "
"-a project_id:%s"
) % (self.RESOURCE_ID2, self.PROJECT_ID))
resource2 = self.details_multiple(result)[0]
self.assertEqual(self.RESOURCE_ID2,
resource2["original_resource_id"])
self.assertEqual(self.PROJECT_ID, resource2["project_id"])
self.assertNotEqual('None', resource2["started_at"])
# Search + limit + short
result = self.gnocchi('resource',
params=("search "
"-t generic "
"'project_id=%s' "
"--sort started_at:asc "
"--marker %s "
"--limit 1"
) % (self.PROJECT_ID, self.RESOURCE_ID))
resource_limit = self.parser.listing(result)[0]
self.assertEqual(self.RESOURCE_ID2,
resource_limit["original_resource_id"])
self.assertEqual(self.PROJECT_ID, resource_limit["project_id"])
self.assertEqual(resource2["started_at"], resource_limit["started_at"])
# DELETE
result = self.gnocchi('resource',
params="delete %s" % self.RESOURCE_ID)
self.assertEqual("", result)
result = self.gnocchi('resource',
params="delete %s" % self.RESOURCE_ID2)
self.assertEqual("", result)
# GET FAIL
result = self.gnocchi('resource',
params="show --type generic %s" %
self.RESOURCE_ID,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Resource %s does not exist (HTTP 404)" % self.RESOURCE_ID)
# DELETE FAIL
result = self.gnocchi('resource',
params="delete %s" % self.RESOURCE_ID,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Resource %s does not exist (HTTP 404)" % self.RESOURCE_ID)
# Create and Batch Delete
result1 = self.gnocchi(
u'resource', params=u"create %s --type generic" %
self.RESOURCE_ID)
result2 = self.gnocchi(
u'resource', params=u"create %s --type generic" %
self.RESOURCE_ID2)
resource1 = self.details_multiple(result1)[0]
resource2 = self.details_multiple(result2)[0]
self.assertEqual(self.RESOURCE_ID, resource1['id'])
self.assertEqual(self.RESOURCE_ID2, resource2['original_resource_id'])
result3 = self.gnocchi(
'resource batch delete ',
params=("'id in [%s, %s]' "
"-t generic") % (resource1["id"], resource2["id"]))
resource3 = self.details_multiple(result3)[0]
self.assertEqual(2, int(resource3["deleted"]))
result4 = self.gnocchi(
'resource batch delete ',
params=("'id in [%s, %s]' "
"-t generic") % (resource1["id"], resource2["id"]))
resource4 = self.details_multiple(result4)[0]
self.assertEqual(0, int(resource4["deleted"]))
# LIST EMPTY
result = self.gnocchi('resource', params="list -t generic")
resource_ids = [r['id'] for r in self.parser.listing(result)]
self.assertNotIn(self.RESOURCE_ID, resource_ids)
self.assertNotIn(self.RESOURCE_ID2, resource_ids)

View File

@ -1,103 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
from gnocchiclient.tests.functional import base
class ResourceTypeClientTest(base.ClientTestBase):
RESOURCE_TYPE = uuidutils.generate_uuid()
RESOURCE_ID = uuidutils.generate_uuid()
def test_help(self):
self.gnocchi("help", params="resource list")
def test_resource_type_scenario(self):
# LIST
result = self.gnocchi('resource-type', params="list")
r = self.parser.listing(result)
self.assertEqual([{'attributes': '', 'name': 'generic'}], r)
# CREATE
result = self.gnocchi(
u'resource-type',
params=u"create -a foo:string:1:max_length=16 "
"-a bar:number:no:max=32 %s" % self.RESOURCE_TYPE)
resource = self.details_multiple(result)[0]
self.assertEqual(self.RESOURCE_TYPE, resource["name"])
self.assertEqual(
"max_length=16, min_length=0, required=True, type=string",
resource["attributes/foo"])
# SHOW
result = self.gnocchi(
u'resource-type', params=u"show %s" % self.RESOURCE_TYPE)
resource = self.details_multiple(result)[0]
self.assertEqual(self.RESOURCE_TYPE, resource["name"])
self.assertEqual(
"max_length=16, min_length=0, required=True, type=string",
resource["attributes/foo"])
# PATCH
result = self.gnocchi(
u'resource-type',
params=u"update -r foo "
"-a new:number:no:max=16 %s" % self.RESOURCE_TYPE)
resource = self.details_multiple(result)[0]
self.assertEqual(self.RESOURCE_TYPE, resource["name"])
self.assertNotIn("attributes/foo", resource)
self.assertEqual(
"max=16, min=None, required=False, type=number",
resource["attributes/new"])
# SHOW
result = self.gnocchi(
u'resource-type', params=u"show %s" % self.RESOURCE_TYPE)
resource = self.details_multiple(result)[0]
self.assertEqual(self.RESOURCE_TYPE, resource["name"])
self.assertNotIn("attributes/foo", resource)
self.assertEqual(
"max=16, min=None, required=False, type=number",
resource["attributes/new"])
# Create a resource for this type
result = self.gnocchi(
u'resource', params=(u"create %s -t %s -a new:5") %
(self.RESOURCE_ID, self.RESOURCE_TYPE))
resource = self.details_multiple(result)[0]
self.assertEqual(self.RESOURCE_ID, resource["id"])
self.assertEqual('5.0', resource["new"])
# Delete the resource
self.gnocchi('resource', params="delete %s" % self.RESOURCE_ID)
# DELETE
result = self.gnocchi('resource-type',
params="delete %s" % self.RESOURCE_TYPE)
self.assertEqual("", result)
# DELETE AGAIN
result = self.gnocchi('resource-type',
params="delete %s" % self.RESOURCE_TYPE,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Resource type %s does not exist (HTTP 404)" % self.RESOURCE_TYPE)
# SHOW AGAIN
result = self.gnocchi(u'resource-type',
params=u"show %s" % self.RESOURCE_TYPE,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(
result.split('\n'),
"Resource type %s does not exist (HTTP 404)" % self.RESOURCE_TYPE)

View File

@ -1,20 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from gnocchiclient.tests.functional import base
class MetricClientTest(base.ClientTestBase):
def test_status_scenario(self):
result = self.gnocchi("status")
status = self.details_multiple(result)[0]
self.assertEqual(2, len(status))

View File

@ -1,24 +0,0 @@
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslotest import base
from gnocchiclient import auth
class GnocchiBasicPluginTest(base.BaseTestCase):
def test_get_headers(self):
p = auth.GnocchiBasicPlugin("foobar", "http://localhost")
self.assertEqual({'Authorization': 'basic Zm9vYmFyOg=='},
p.get_headers(None))

View File

@ -1,65 +0,0 @@
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from oslotest import base
from requests import models
from gnocchiclient import exceptions
class ExceptionsTest(base.BaseTestCase):
def test_from_response_404(self):
r = models.Response()
r.status_code = 404
r.headers['Content-Type'] = "application/json"
r._content = json.dumps(
{"description": "Archive policy rule foobar does not exist"}
).encode('utf-8')
exc = exceptions.from_response(r)
self.assertIsInstance(exc, exceptions.ArchivePolicyRuleNotFound)
def test_resource_type_before_resource(self):
r = models.Response()
r.status_code = 404
r.headers['Content-Type'] = "application/json"
r._content = json.dumps(
{"description": "Resource type foobar does not exist"}
).encode('utf-8')
exc = exceptions.from_response(r)
self.assertIsInstance(exc, exceptions.ResourceTypeNotFound)
def test_from_response_keystone_401(self):
r = models.Response()
r.status_code = 401
r.headers['Content-Type'] = "application/json"
r._content = json.dumps({"error": {
"message": "The request you have made requires authentication.",
"code": 401, "title": "Unauthorized"}}
).encode('utf-8')
exc = exceptions.from_response(r)
self.assertIsInstance(exc, exceptions.Unauthorized)
self.assertEqual("The request you have made requires authentication.",
exc.message)
def test_from_response_unknown_middleware(self):
r = models.Response()
r.status_code = 400
r.headers['Content-Type'] = "application/json"
r._content = json.dumps(
{"unknown": "random message"}
).encode('utf-8')
exc = exceptions.from_response(r)
self.assertIsInstance(exc, exceptions.ClientException)
self.assertEqual('{"unknown": "random message"}', exc.message)

View File

@ -1,113 +0,0 @@
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslotest import base
from gnocchiclient import utils
class SearchQueryBuilderTest(base.BaseTestCase):
def _do_test(self, expr, expected):
req = utils.search_query_builder(expr)
self.assertEqual(expected, req)
def test_search_query_builder(self):
self._do_test('foo=7EED6CC3-EDC8-48C9-8EF6-8A36B9ACC91C',
{"=": {"foo": "7EED6CC3-EDC8-48C9-8EF6-8A36B9ACC91C"}})
self._do_test('foo=7EED6CC3EDC848C98EF68A36B9ACC91C',
{"=": {"foo": "7EED6CC3EDC848C98EF68A36B9ACC91C"}})
self._do_test('foo=bar', {"=": {"foo": "bar"}})
self._do_test('foo!=1', {"!=": {"foo": 1.0}})
self._do_test('foo=True', {"=": {"foo": True}})
self._do_test('foo=null', {"=": {"foo": None}})
self._do_test('foo="null"', {"=": {"foo": "null"}})
self._do_test('foo in ["null", "foo"]',
{"in": {"foo": ["null", "foo"]}})
self._do_test(u'foo="quote" and bar≠1',
{"and": [{u"": {"bar": 1}},
{"=": {"foo": "quote"}}]})
self._do_test('foo="quote" or bar like "%%foo"',
{"or": [{"like": {"bar": "%%foo"}},
{"=": {"foo": "quote"}}]})
self._do_test('not (foo="quote" or bar like "%%foo" or foo="what!" '
'or bar="who?")',
{"not": {"or": [
{"=": {"bar": "who?"}},
{"=": {"foo": "what!"}},
{"like": {"bar": "%%foo"}},
{"=": {"foo": "quote"}},
]}})
self._do_test('(foo="quote" or bar like "%%foo" or not foo="what!" '
'or bar="who?") and cat="meme"',
{"and": [
{"=": {"cat": "meme"}},
{"or": [
{"=": {"bar": "who?"}},
{"not": {"=": {"foo": "what!"}}},
{"like": {"bar": "%%foo"}},
{"=": {"foo": "quote"}},
]}
]})
self._do_test('foo="quote" or bar like "%%foo" or foo="what!" '
'or bar="who?" and cat="meme"',
{"or": [
{"and": [
{"=": {"cat": "meme"}},
{"=": {"bar": "who?"}},
]},
{"=": {"foo": "what!"}},
{"like": {"bar": "%%foo"}},
{"=": {"foo": "quote"}},
]})
self._do_test('foo="quote" or bar like "%%foo" and foo="what!" '
'or bar="who?" or cat="meme"',
{"or": [
{"=": {"cat": "meme"}},
{"=": {"bar": "who?"}},
{"and": [
{"=": {"foo": "what!"}},
{"like": {"bar": "%%foo"}},
]},
{"=": {"foo": "quote"}},
]})
def test_dict_to_querystring(self):
expected = ["start=2016-02-10T13%3A54%3A53%2B00%3A00"
"&stop=2016-02-10T13%3A56%3A42%2B02%3A00",
"stop=2016-02-10T13%3A56%3A42%2B02%3A00"
"&start=2016-02-10T13%3A54%3A53%2B00%3A00"]
self.assertIn(utils.dict_to_querystring(
{"start": "2016-02-10T13:54:53+00:00",
"stop": "2016-02-10T13:56:42+02:00"}),
expected)
self.assertEqual(
"groupby=foo&groupby=bar",
utils.dict_to_querystring({
"groupby": ["foo", "bar"]
}),
)
self.assertEqual(
"groupby=foo&groupby=bar&overlap=0",
utils.dict_to_querystring({
"groupby": ["foo", "bar"],
"overlap": 0,
}),
)

View File

@ -1,225 +0,0 @@
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pyparsing as pp
import six
from six.moves.urllib import parse as urllib_parse
uninary_operators = ("not", )
binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq", u"ne",
u"lt", u"gt", u"ge", u"le", u"in", u"like", u"", u"",
u"")
multiple_operators = (u"and", u"or", u"", u"")
operator = pp.Regex(u"|".join(binary_operator))
null = pp.Regex("None|none|null").setParseAction(pp.replaceWith(None))
boolean = "False|True|false|true"
boolean = pp.Regex(boolean).setParseAction(lambda t: t[0].lower() == "true")
hex_string = lambda n: pp.Word(pp.hexnums, exact=n)
uuid_string = pp.Combine(hex_string(8) +
(pp.Optional("-") + hex_string(4)) * 3 +
pp.Optional("-") + hex_string(12))
number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?"
number = pp.Regex(number).setParseAction(lambda t: float(t[0]))
identifier = pp.Word(pp.alphas, pp.alphanums + "_")
quoted_string = pp.QuotedString('"') | pp.QuotedString("'")
comparison_term = pp.Forward()
in_list = pp.Group(pp.Suppress('[') +
pp.Optional(pp.delimitedList(comparison_term)) +
pp.Suppress(']'))("list")
comparison_term << (null | boolean | uuid_string | identifier | number |
quoted_string | in_list)
condition = pp.Group(comparison_term + operator + comparison_term)
expr = pp.infixNotation(condition, [
("not", 1, pp.opAssoc.RIGHT, ),
("and", 2, pp.opAssoc.LEFT, ),
("", 2, pp.opAssoc.LEFT, ),
("or", 2, pp.opAssoc.LEFT, ),
("", 2, pp.opAssoc.LEFT, ),
])
def _parsed_query2dict(parsed_query):
result = None
while parsed_query:
part = parsed_query.pop()
if part in binary_operator:
result = {part: {parsed_query.pop(): result}}
elif part in multiple_operators:
if result.get(part):
result[part].append(
_parsed_query2dict(parsed_query.pop()))
else:
result = {part: [result]}
elif part in uninary_operators:
result = {part: result}
elif isinstance(part, pp.ParseResults):
kind = part.getName()
if kind == "list":
res = part.asList()
else:
res = _parsed_query2dict(part)
if result is None:
result = res
elif isinstance(result, dict):
list(result.values())[0].append(res)
else:
result = part
return result
class MalformedQuery(Exception):
def __init__(self, reason):
super(MalformedQuery, self).__init__(
"Malformed Query: %s" % reason)
def add_query_argument(cmd, parser):
return parser.add_argument(
cmd,
help=u"A query to filter resource. "
u"The syntax is a combination of attribute, operator and value. "
u"For example: id=90d58eea-70d7-4294-a49a-170dcdf44c3c would filter "
u"resource with a certain id. More complex queries can be built, "
u"e.g.: not (flavor_id!=\"1\" and memory>=24). "
u"Use \"\" to force data to be interpreted as string. "
u"Supported operators are: not, and, ∧ or, , >=, <=, !=, >, <, =, "
u"==, eq, ne, lt, gt, ge, le, in, like, ≠, ≥, ≤, like, in.",
type=search_query_builder)
def search_query_builder(query):
try:
parsed_query = expr.parseString(query, parseAll=True)[0]
except pp.ParseException as e:
raise MalformedQuery(six.text_type(e))
return _parsed_query2dict(parsed_query)
def list2cols(cols, objs):
return cols, [tuple([o[k] for k in cols])
for o in objs]
def format_string_list(l):
return ", ".join(l)
def format_dict_list(l):
return "\n".join(
"- " + ", ".join("%s: %s" % (k, v)
for k, v in elem.items())
for elem in l)
def format_dict_dict(value):
return "\n".join(
"- %s: " % name + " , ".join("%s: %s" % (k, v)
for k, v in elem.items())
for name, elem in value.items())
def format_move_dict_to_root(obj, field):
for attr in obj[field]:
obj["%s/%s" % (field, attr)] = obj[field][attr]
del obj[field]
def format_resource_type(rt):
format_move_dict_to_root(rt, "attributes")
for key in rt:
if key.startswith("attributes"):
rt[key] = ", ".join(
"%s=%s" % (k, v) for k, v in sorted(rt[key].items()))
def format_archive_policy(ap):
ap['definition'] = format_dict_list(ap['definition'])
ap['aggregation_methods'] = format_string_list(ap['aggregation_methods'])
def format_resource_for_metric(metric):
# NOTE(sileht): Gnocchi < 2.0
if 'resource' not in metric:
return
if not metric['resource']:
metric['resource/id'] = None
del metric['resource']
else:
format_move_dict_to_root(metric, "resource")
def dict_from_parsed_args(parsed_args, attrs):
d = {}
for attr in attrs:
value = getattr(parsed_args, attr)
if value is not None:
d[attr] = value
return d
def dict_to_querystring(objs):
strings = []
for k, values in sorted(objs.items()):
if values is not None:
if not isinstance(values, (list, tuple)):
values = [values]
strings.append("&".join(
("%s=%s" % (k, v)
for v in map(urllib_parse.quote,
map(six.text_type, values)))))
return "&".join(strings)
def get_pagination_options(parsed_args):
options = dict(
sorts=parsed_args.sort,
limit=parsed_args.limit,
marker=parsed_args.marker)
if hasattr(parsed_args, 'details'):
options['details'] = parsed_args.details
if hasattr(parsed_args, 'history'):
options['history'] = parsed_args.history
return options
def build_pagination_options(details=False, history=False,
limit=None, marker=None, sorts=None):
options = []
if details:
options.append("details=true")
if history:
options.append("history=true")
if limit:
options.append("limit=%d" % limit)
if marker:
options.append("marker=%s" % urllib_parse.quote(marker))
for sort in sorts or []:
options.append("sort=%s" % urllib_parse.quote(sort))
return "%s" % "&".join(options)
def get_client(obj):
if hasattr(obj.app, 'client_manager'):
# NOTE(sileht): cliff objects loaded by OSC
return obj.app.client_manager.metric
else:
# TODO(sileht): Remove this when OSC is able
# to install the gnocchi client binary itself
return obj.app.client

View File

@ -1,68 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from gnocchiclient.v1 import base
class ArchivePolicyManager(base.Manager):
url = "v1/archive_policy/"
def list(self):
"""List archive policies
"""
return self._get(self.url).json()
def get(self, name):
"""Get an archive policy
:param name: Name of the archive policy
:type name: str
"""
return self._get(self.url + name).json()
def create(self, archive_policy):
"""Create an archive policy
:param archive_policy: the archive policy
:type archive_policy: dict
"""
return self._post(
self.url, headers={'Content-Type': "application/json"},
data=jsonutils.dumps(archive_policy)).json()
def update(self, name, archive_policy):
"""Update an archive policy
:param name: the name of archive policy
:type name: str
:param archive_policy: the archive policy
:type archive_policy: dict
"""
return self._patch(
self.url + '/' + name,
headers={'Content-Type': "application/json"},
data=jsonutils.dumps(archive_policy)).json()
def delete(self, name):
"""Delete an archive policy
:param name: Name of the archive policy
:type name: str
"""
self._delete(self.url + name)

View File

@ -1,127 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cliff import command
from cliff import lister
from cliff import show
from gnocchiclient import utils
class CliArchivePolicyList(lister.Lister):
"""List archive policies"""
COLS = ('name',
'back_window', 'definition', 'aggregation_methods')
def take_action(self, parsed_args):
policies = utils.get_client(self).archive_policy.list()
if parsed_args.formatter == 'table':
for ap in policies:
utils.format_archive_policy(ap)
return utils.list2cols(self.COLS, policies)
class CliArchivePolicyShow(show.ShowOne):
"""Show an archive policy"""
def get_parser(self, prog_name):
parser = super(CliArchivePolicyShow, self).get_parser(prog_name)
parser.add_argument("name",
help="Name of the archive policy")
return parser
def take_action(self, parsed_args):
ap = utils.get_client(self).archive_policy.get(
name=parsed_args.name)
if parsed_args.formatter == 'table':
utils.format_archive_policy(ap)
return self.dict2columns(ap)
def archive_policy_definition(string):
parts = string.split(",")
defs = {}
for part in parts:
attr, __, value = part.partition(":")
if (attr not in ['granularity', 'points', 'timespan']
or value is None):
raise ValueError
defs[attr] = value
if len(defs) < 2:
raise ValueError
return defs
class CliArchivePolicyWriteBase(show.ShowOne):
def get_parser(self, prog_name):
parser = super(CliArchivePolicyWriteBase, self).get_parser(prog_name)
parser.add_argument("name", help="name of the archive policy")
parser.add_argument("-d", "--definition", action='append',
required=True, type=archive_policy_definition,
metavar="<DEFINITION>",
help=("two attributes (separated by ',') of an "
"archive policy definition with its name "
"and value separated with a ':'"))
return parser
class CliArchivePolicyCreate(CliArchivePolicyWriteBase):
"""Create an archive policy"""
def get_parser(self, prog_name):
parser = super(CliArchivePolicyCreate, self).get_parser(prog_name)
parser.add_argument("-b", "--back-window", dest="back_window",
type=int,
help="back window of the archive policy")
parser.add_argument("-m", "--aggregation-method",
action="append",
dest="aggregation_methods",
help="aggregation method of the archive policy")
return parser
def take_action(self, parsed_args):
archive_policy = utils.dict_from_parsed_args(
parsed_args, ['name', 'back_window', 'aggregation_methods',
'definition'])
ap = utils.get_client(self).archive_policy.create(
archive_policy=archive_policy)
if parsed_args.formatter == 'table':
utils.format_archive_policy(ap)
return self.dict2columns(ap)
class CliArchivePolicyUpdate(CliArchivePolicyWriteBase):
"""Update an archive policy"""
def take_action(self, parsed_args):
archive_policy = utils.dict_from_parsed_args(
parsed_args, ['definition'])
ap = self.app.client.archive_policy.update(
name=parsed_args.name, archive_policy=archive_policy)
if parsed_args.formatter == 'table':
utils.format_archive_policy(ap)
return self.dict2columns(ap)
class CliArchivePolicyDelete(command.Command):
"""Delete an archive policy"""
def get_parser(self, prog_name):
parser = super(CliArchivePolicyDelete, self).get_parser(prog_name)
parser.add_argument("name",
help="Name of the archive policy")
return parser
def take_action(self, parsed_args):
utils.get_client(self).archive_policy.delete(name=parsed_args.name)

View File

@ -1,50 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from gnocchiclient.v1 import base
class ArchivePolicyRuleManager(base.Manager):
url = "v1/archive_policy_rule/"
def list(self):
"""List archive policy rules
"""
return self._get(self.url).json()
def get(self, name):
"""Get an archive policy rules
:param name: Name of the archive policy rule
:type name: str
"""
return self._get(self.url + name).json()
def create(self, archive_policy_rule):
"""Create an archive policy rule
"""
return self._post(
self.url, headers={'Content-Type': "application/json"},
data=jsonutils.dumps(archive_policy_rule)).json()
def delete(self, name):
"""Delete an archive policy rule
:param name: Name of the archive policy rule
:type name: str
"""
self._delete(self.url + name)

View File

@ -1,79 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cliff import command
from cliff import lister
from cliff import show
from gnocchiclient import utils
class CliArchivePolicyRuleList(lister.Lister):
"""List archive policy rules"""
COLS = ('name', 'archive_policy_name', 'metric_pattern')
def take_action(self, parsed_args):
ap_rules = utils.get_client(self).archive_policy_rule.list()
return utils.list2cols(self.COLS, ap_rules)
class CliArchivePolicyRuleShow(show.ShowOne):
"""Show an archive policy rule"""
def get_parser(self, prog_name):
parser = super(CliArchivePolicyRuleShow, self).get_parser(prog_name)
parser.add_argument("name",
help="Name of the archive policy rule")
return parser
def take_action(self, parsed_args):
ap_rule = utils.get_client(self).archive_policy_rule.get(
name=parsed_args.name)
return self.dict2columns(ap_rule)
class CliArchivePolicyRuleCreate(show.ShowOne):
"""Create an archive policy rule"""
def get_parser(self, prog_name):
parser = super(CliArchivePolicyRuleCreate, self).get_parser(prog_name)
parser.add_argument("name",
help="Rule name")
parser.add_argument("-a", "--archive-policy-name",
dest="archive_policy_name",
required=True,
help="Archive policy name")
parser.add_argument("-m", "--metric-pattern",
dest="metric_pattern", required=True,
help="Wildcard of metric name to match")
return parser
def take_action(self, parsed_args):
rule = utils.dict_from_parsed_args(
parsed_args, ["name", "metric_pattern", "archive_policy_name"])
policy = utils.get_client(self).archive_policy_rule.create(rule)
return self.dict2columns(policy)
class CliArchivePolicyRuleDelete(command.Command):
"""Delete an archive policy rule"""
def get_parser(self, prog_name):
parser = super(CliArchivePolicyRuleDelete, self).get_parser(prog_name)
parser.add_argument("name",
help="Name of the archive policy rule")
return parser
def take_action(self, parsed_args):
utils.get_client(self).archive_policy_rule.delete(parsed_args.name)

View File

@ -1,53 +0,0 @@
# Copyright 2012-2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
class Manager(object):
DEFAULT_HEADERS = {
"Accept": "application/json, */*",
}
def __init__(self, client):
self.client = client
def _set_default_headers(self, kwargs):
headers = kwargs.get('headers', {})
for k, v in six.iteritems(self.DEFAULT_HEADERS):
if k not in headers:
headers[k] = v
kwargs['headers'] = headers
return kwargs
def _get(self, *args, **kwargs):
self._set_default_headers(kwargs)
return self.client.api.get(*args, **kwargs)
def _post(self, *args, **kwargs):
self._set_default_headers(kwargs)
return self.client.api.post(*args, **kwargs)
def _put(self, *args, **kwargs):
self._set_default_headers(kwargs)
return self.client.api.put(*args, **kwargs)
def _patch(self, *args, **kwargs):
self._set_default_headers(kwargs)
return self.client.api.patch(*args, **kwargs)
def _delete(self, *args, **kwargs):
self._set_default_headers(kwargs)
return self.client.api.delete(*args, **kwargs)

View File

@ -1,24 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from gnocchiclient.v1 import base
class CapabilitiesManager(base.Manager):
cap_url = "v1/capabilities/"
def list(self):
"""List capabilities
"""
return self._get(self.cap_url).json()

View File

@ -1,24 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cliff import show
from gnocchiclient import utils
class CliCapabilitiesList(show.ShowOne):
"""List capabilities"""
def take_action(self, parsed_args):
caps = utils.get_client(self).capabilities.list()
return self.dict2columns(caps)

View File

@ -1,80 +0,0 @@
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import debtcollector
from debtcollector import removals
import keystoneauth1.session
from gnocchiclient import client
from gnocchiclient.v1 import archive_policy
from gnocchiclient.v1 import archive_policy_rule
from gnocchiclient.v1 import capabilities
from gnocchiclient.v1 import metric
from gnocchiclient.v1 import resource
from gnocchiclient.v1 import resource_type
from gnocchiclient.v1 import status
class Client(object):
"""Client for the Gnocchi v1 API.
:param session: keystoneauth1 session
:type session: :py:class:`keystoneauth1.session.Session` (optional)
:param adapter_options: options to pass to
:py:class:`keystoneauth1.adapter.Adapter`
:type adapter_options: dict (optional)
:param session_options: options to pass to
:py:class:`keystoneauth1.session.Session`
:type session_options: dict (optional)
"""
@removals.removed_kwarg('service_type',
message="Please use 'adapter_options="
"dict(service_type=...)' instead")
def __init__(self, session=None, service_type=None,
adapter_options=None, session_options=None,
**kwargs):
"""Initialize a new client for the Gnocchi v1 API."""
session_options = session_options or {}
adapter_options = adapter_options or {}
adapter_options.setdefault('service_type', "metric")
# NOTE(sileht): Backward compat stuff
if kwargs:
for key in kwargs:
debtcollector.deprecate(
"Using the '%s' argument is deprecated" % key,
message="Please use 'adapter_options=dict(%s=...)' "
"instead" % key)
adapter_options.update(kwargs)
if service_type is not None:
adapter_options['service_type'] = service_type
if session is None:
session = keystoneauth1.session.Session(**session_options)
else:
if session_options:
raise ValueError("session and session_options are exclusive")
self.api = client.SessionClient(session, **adapter_options)
self.resource = resource.ResourceManager(self)
self.resource_type = resource_type.ResourceTypeManager(self)
self.archive_policy = archive_policy.ArchivePolicyManager(self)
self.archive_policy_rule = (
archive_policy_rule.ArchivePolicyRuleManager(self))
self.metric = metric.MetricManager(self)
self.capabilities = capabilities.CapabilitiesManager(self)
self.status = status.StatusManager(self)

View File

@ -1,267 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid
from oslo_serialization import jsonutils
from gnocchiclient import utils
from gnocchiclient.v1 import base
class MetricManager(base.Manager):
metric_url = "v1/metric/"
resource_url = "v1/resource/generic/%s/metric/"
metric_batch_url = "v1/batch/metrics/measures"
resources_batch_url = "v1/batch/resources/metrics/measures"
def list(self, limit=None, marker=None, sorts=None):
"""List metrics
:param limit: maximum number of resources to return
:type limit: int
:param marker: the last item of the previous page; we return the next
results after this value.
:type marker: str
:param sorts: list of resource attributes to order by. (example
["user_id:desc-nullslast", "project_id:asc"]
:type sorts: list of str
"""
qs = utils.build_pagination_options(False, False, limit, marker,
sorts)
return self._get("%s?%s" % (self.metric_url[:-1], qs)).json()
@staticmethod
def _ensure_metric_is_uuid(metric, attribute="resource_id"):
try:
uuid.UUID(metric)
except ValueError:
raise TypeError("%s is required to get a metric by name" %
attribute)
def get(self, metric, resource_id=None):
"""Get an metric
:param metric: ID or Name of the metric
:type metric: str
:param resource_id: ID of the resource (required
to get a metric by name)
:type resource_id: str
"""
if resource_id is None:
self._ensure_metric_is_uuid(metric)
url = self.metric_url + metric
else:
url = (self.resource_url % resource_id) + metric
return self._get(url).json()
# FIXME(jd): remove refetch_metric when LP#1497171 is fixed
def create(self, metric, refetch_metric=True):
"""Create an metric
:param metric: The metric
:type metric: dict
"""
resource_id = metric.get('resource_id')
if resource_id is None:
metric = self._post(
self.metric_url, headers={'Content-Type': "application/json"},
data=jsonutils.dumps(metric)).json()
# FIXME(sileht): create and get have a
# different output: LP#1497171
if refetch_metric:
return self.get(metric["id"])
return metric
metric_name = metric.get('name')
if metric_name is None:
raise TypeError("metric_name is required if resource_id is set")
del metric['resource_id']
metric = {metric_name: metric}
metric = self._post(
self.resource_url % resource_id,
headers={'Content-Type': "application/json"},
data=jsonutils.dumps(metric))
return self.get(metric_name, resource_id)
def delete(self, metric, resource_id=None):
"""Delete an metric
:param metric: ID or Name of the metric
:type metric: str
:param resource_id: ID of the resource (required
to get a metric by name)
:type resource_id: str
"""
if resource_id is None:
self._ensure_metric_is_uuid(metric)
url = self.metric_url + metric
else:
url = self.resource_url % resource_id + metric
self._delete(url)
def add_measures(self, metric, measures, resource_id=None):
"""Add measurements to a metric
:param metric: ID or Name of the metric
:type metric: str
:param resource_id: ID of the resource (required
to get a metric by name)
:type resource_id: str
:param measures: measurements
:type measures: list of dict(timestamp=timestamp, value=float)
"""
if resource_id is None:
self._ensure_metric_is_uuid(metric)
url = self.metric_url + metric + "/measures"
else:
url = self.resource_url % resource_id + metric + "/measures"
return self._post(
url, headers={'Content-Type': "application/json"},
data=jsonutils.dumps(measures))
def batch_metrics_measures(self, measures):
"""Add measurements to metrics
:param measures: measurements
:type dict(metric_id: list of dict(timestamp=timestamp, value=float))
"""
return self._post(
self.metric_batch_url,
headers={'Content-Type': "application/json"},
data=jsonutils.dumps(measures))
def batch_resources_metrics_measures(self, measures, create_metrics=False):
"""Add measurements to named metrics if resources
:param measures: measurements
:type dict(resource_id: dict(metric_name:
list of dict(timestamp=timestamp, value=float)))
"""
return self._post(
self.resources_batch_url,
headers={'Content-Type': "application/json"},
data=jsonutils.dumps(measures),
params=dict(create_metrics=create_metrics))
def get_measures(self, metric, start=None, stop=None, aggregation=None,
granularity=None, resource_id=None, refresh=False,
resample=None, **kwargs):
"""Get measurements of a metric
:param metric: ID or Name of the metric
:type metric: str
:param start: beginning of the period
:type start: timestamp
:param stop: end of the period
:type stop: timestamp
:param aggregation: aggregation to retrieve
:type aggregation: str
:param granularity: granularity to retrieve (in seconds)
:type granularity: int
:param resource_id: ID of the resource (required
to get a metric by name)
:type resource_id: str
:param refresh: force aggregation of all known measures
:type refresh: bool
:param resample: resample measures to new granularity
:type resample: float
All other arguments are arguments are dedicated to custom aggregation
method passed as-is to the Gnocchi.
"""
if isinstance(start, datetime.datetime):
start = start.isoformat()
if isinstance(stop, datetime.datetime):
stop = stop.isoformat()
params = dict(start=start, stop=stop, aggregation=aggregation,
granularity=granularity, refresh=refresh,
resample=resample)
params.update(kwargs)
if resource_id is None:
self._ensure_metric_is_uuid(metric)
url = self.metric_url + metric + "/measures"
else:
url = self.resource_url % resource_id + metric + "/measures"
return self._get(url, params=params).json()
def aggregation(self, metrics, query=None,
start=None, stop=None, aggregation=None,
reaggregation=None, granularity=None,
needed_overlap=None, resource_type="generic",
groupby=None, refresh=False, resample=None, fill=None):
"""Get measurements of an aggregated metrics
:param metrics: IDs of metric or metric name
:type metric: list or str
:param query: The query dictionary
:type query: dict
:param start: beginning of the period
:type start: timestamp
:param stop: end of the period
:type stop: timestamp
:param aggregation: granularity aggregation function to retrieve
:type aggregation: str
:param reaggregation: groupby aggregation function to retrieve
:type reaggregation: str
:param granularity: granularity to retrieve (in seconds)
:type granularity: int
:param needed_overlap: percent of datapoints in each metrics required
:type needed_overlap: float
:param resource_type: type of resource for the query
:type resource_type: str
:param groupby: list of attribute to group by
:type groupby: list
:param refresh: force aggregation of all known measures
:type refresh: bool
:param resample: resample measures to new granularity
:type resample: float
:param fill: value to use when backfilling missing datapoints
:type fill: float or 'null'
See Gnocchi REST API documentation for the format
of *query dictionary*
http://docs.openstack.org/developer/gnocchi/rest.html#searching-for-resources
"""
if isinstance(start, datetime.datetime):
start = start.isoformat()
if isinstance(stop, datetime.datetime):
stop = stop.isoformat()
params = dict(start=start, stop=stop, aggregation=aggregation,
reaggregation=reaggregation, granularity=granularity,
needed_overlap=needed_overlap, groupby=groupby,
refresh=refresh, resample=resample, fill=fill)
if query is None:
for metric in metrics:
self._ensure_metric_is_uuid(metric)
params['metric'] = metrics
return self._get("v1/aggregation/metric",
params=params).json()
else:
return self._post(
"v1/aggregation/resource/%s/metric/%s?%s" % (
resource_type, metrics,
utils.dict_to_querystring(params)),
headers={'Content-Type': "application/json"},
data=jsonutils.dumps(query)).json()

View File

@ -1,345 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import logging
import sys
from cliff import command
from cliff import lister
from cliff import show
from gnocchiclient import utils
LOG_DEP = logging.getLogger('deprecated')
class CliMetricWithResourceID(command.Command):
def get_parser(self, prog_name):
parser = super(CliMetricWithResourceID, self).get_parser(prog_name)
parser.add_argument("--resource-id", "-r",
help="ID of the resource")
return parser
class CliMetricList(lister.Lister):
"""List metrics"""
COLS = ('id', 'archive_policy/name', 'name', 'unit', 'resource_id')
def get_parser(self, prog_name):
parser = super(CliMetricList, self).get_parser(prog_name)
parser.add_argument("--limit", type=int, metavar="<LIMIT>",
help="Number of metrics to return "
"(Default is server default)")
parser.add_argument("--marker", metavar="<MARKER>",
help="Last item of the previous listing. "
"Return the next results after this value")
parser.add_argument("--sort", action="append", metavar="<SORT>",
help="Sort of metric attribute "
"(example: user_id:desc-nullslast")
return parser
def take_action(self, parsed_args):
metrics = utils.get_client(self).metric.list(
**utils.get_pagination_options(parsed_args))
for metric in metrics:
utils.format_archive_policy(metric["archive_policy"])
utils.format_move_dict_to_root(metric, "archive_policy")
return utils.list2cols(self.COLS, metrics)
class DeprecatedCliMetricList(CliMetricList):
"""Deprecated: List metrics"""
def take_action(self, parsed_args):
LOG_DEP.warning('This command has been deprecated. '
'Please use "metric list" instead.')
return super(DeprecatedCliMetricList, self).take_action(parsed_args)
class CliMetricShow(CliMetricWithResourceID, show.ShowOne):
"""Show a metric"""
def get_parser(self, prog_name):
parser = super(CliMetricShow, self).get_parser(prog_name)
parser.add_argument("metric",
help="ID or name of the metric")
return parser
def take_action(self, parsed_args):
metric = utils.get_client(self).metric.get(
metric=parsed_args.metric,
resource_id=parsed_args.resource_id)
utils.format_archive_policy(metric["archive_policy"])
utils.format_move_dict_to_root(metric, "archive_policy")
utils.format_resource_for_metric(metric)
return self.dict2columns(metric)
class DeprecatedCliMetricShow(CliMetricShow):
"""Deprecated: Show a metric"""
def take_action(self, parsed_args):
LOG_DEP.warning('This command has been deprecated. '
'Please use "metric show" instead.')
return super(DeprecatedCliMetricShow, self).take_action(parsed_args)
class CliMetricCreateBase(show.ShowOne, CliMetricWithResourceID):
def get_parser(self, prog_name):
parser = super(CliMetricCreateBase, self).get_parser(prog_name)
parser.add_argument("--archive-policy-name", "-a",
dest="archive_policy_name",
help="name of the archive policy")
return parser
def take_action(self, parsed_args):
metric = utils.dict_from_parsed_args(parsed_args,
["archive_policy_name",
"resource_id"])
return self._take_action(metric, parsed_args)
class CliMetricCreate(CliMetricCreateBase):
"""Create a metric"""
def get_parser(self, prog_name):
parser = super(CliMetricCreate, self).get_parser(prog_name)
parser.add_argument("name", nargs='?',
metavar="METRIC_NAME",
help="Name of the metric")
parser.add_argument("--unit", "-u",
help="unit of the metric")
return parser
def _take_action(self, metric, parsed_args):
if parsed_args.name:
metric['name'] = parsed_args.name
if parsed_args.unit:
metric['unit'] = parsed_args.unit
metric = utils.get_client(self).metric.create(metric)
utils.format_archive_policy(metric["archive_policy"])
utils.format_move_dict_to_root(metric, "archive_policy")
utils.format_resource_for_metric(metric)
return self.dict2columns(metric)
class DeprecatedCliMetricCreate(CliMetricCreate):
"""Deprecated: Create a metric"""
def take_action(self, parsed_args):
LOG_DEP.warning('This command has been deprecated. '
'Please use "metric create" instead.')
return super(DeprecatedCliMetricCreate, self).take_action(parsed_args)
class CliMetricDelete(CliMetricWithResourceID):
"""Delete a metric"""
def get_parser(self, prog_name):
parser = super(CliMetricDelete, self).get_parser(prog_name)
parser.add_argument("metric", nargs='+',
help="IDs or names of the metric")
return parser
def take_action(self, parsed_args):
for metric in parsed_args.metric:
utils.get_client(self).metric.delete(
metric=metric, resource_id=parsed_args.resource_id)
class DeprecatedCliMetricDelete(CliMetricDelete):
"""Deprecated: Delete a metric"""
def take_action(self, parsed_args):
LOG_DEP.warning('This command has been deprecated. '
'Please use "metric delete" instead.')
return super(DeprecatedCliMetricDelete, self).take_action(parsed_args)
class CliMeasuresShow(CliMetricWithResourceID, lister.Lister):
"""Get measurements of a metric"""
COLS = ('timestamp', 'granularity', 'value')
def get_parser(self, prog_name):
parser = super(CliMeasuresShow, self).get_parser(prog_name)
parser.add_argument("metric",
help="ID or name of the metric")
parser.add_argument("--aggregation",
help="aggregation to retrieve")
parser.add_argument("--start",
help="beginning of the period")
parser.add_argument("--stop",
help="end of the period")
parser.add_argument("--granularity",
help="granularity to retrieve")
parser.add_argument("--refresh", action="store_true",
help="force aggregation of all known measures")
parser.add_argument("--resample",
help=("granularity to resample time-series to "
"(in seconds)"))
return parser
def take_action(self, parsed_args):
measures = utils.get_client(self).metric.get_measures(
metric=parsed_args.metric,
resource_id=parsed_args.resource_id,
aggregation=parsed_args.aggregation,
start=parsed_args.start,
stop=parsed_args.stop,
granularity=parsed_args.granularity,
refresh=parsed_args.refresh,
resample=parsed_args.resample
)
return self.COLS, measures
class CliMeasuresAddBase(CliMetricWithResourceID):
def get_parser(self, prog_name):
parser = super(CliMeasuresAddBase, self).get_parser(prog_name)
parser.add_argument("metric", help="ID or name of the metric")
return parser
class CliMeasuresAdd(CliMeasuresAddBase):
"""Add measurements to a metric"""
def measure(self, measure):
timestamp, __, value = measure.rpartition("@")
return {'timestamp': timestamp, 'value': float(value)}
def get_parser(self, prog_name):
parser = super(CliMeasuresAdd, self).get_parser(prog_name)
parser.add_argument("-m", "--measure", action='append',
required=True, type=self.measure,
help=("timestamp and value of a measure "
"separated with a '@'"))
return parser
def take_action(self, parsed_args):
utils.get_client(self).metric.add_measures(
metric=parsed_args.metric,
resource_id=parsed_args.resource_id,
measures=parsed_args.measure,
)
class CliMeasuresBatch(command.Command):
def stdin_or_file(self, value):
if value == "-":
return sys.stdin
else:
return open(value, 'r')
def get_parser(self, prog_name):
parser = super(CliMeasuresBatch, self).get_parser(prog_name)
parser.add_argument("file", type=self.stdin_or_file,
help=("File containing measurements to batch or "
"- for stdin (see Gnocchi REST API docs for "
"the format"))
return parser
class CliMetricsMeasuresBatch(CliMeasuresBatch):
def take_action(self, parsed_args):
with parsed_args.file as f:
utils.get_client(self).metric.batch_metrics_measures(json.load(f))
class CliResourcesMetricsMeasuresBatch(CliMeasuresBatch):
def get_parser(self, prog_name):
parser = super(CliResourcesMetricsMeasuresBatch, self).get_parser(
prog_name)
parser.add_argument("--create-metrics", action='store_true',
help="Create unknown metrics"),
return parser
def take_action(self, parsed_args):
with parsed_args.file as f:
utils.get_client(self).metric.batch_resources_metrics_measures(
json.load(f), create_metrics=parsed_args.create_metrics)
class CliMeasuresAggregation(lister.Lister):
"""Get measurements of aggregated metrics"""
COLS = ('timestamp', 'granularity', 'value')
def get_parser(self, prog_name):
parser = super(CliMeasuresAggregation, self).get_parser(prog_name)
parser.add_argument("-m", "--metric", nargs='+', required=True,
help="metrics IDs or metric name")
parser.add_argument("--aggregation", help="granularity aggregation "
"function to retrieve")
parser.add_argument("--reaggregation",
help="groupby aggregation function to retrieve")
parser.add_argument("--start",
help="beginning of the period")
parser.add_argument("--stop",
help="end of the period")
parser.add_argument("--granularity",
help="granularity to retrieve")
parser.add_argument("--needed-overlap", type=float,
help=("percent of datapoints in each "
"metrics required"))
utils.add_query_argument("--query", parser)
parser.add_argument("--resource-type", default="generic",
help="Resource type to query"),
parser.add_argument("--groupby",
action='append',
help="Attribute to use to group resources"),
parser.add_argument("--refresh", action="store_true",
help="force aggregation of all known measures")
parser.add_argument("--resample",
help=("granularity to resample time-series to "
"(in seconds)"))
parser.add_argument("--fill",
help=("Value to use when backfilling timestamps "
"with missing values in a subset of series. "
"Value should be a float or 'null'."))
return parser
def take_action(self, parsed_args):
metrics = parsed_args.metric
if parsed_args.query:
if len(parsed_args.metric) != 1:
raise ValueError("One metric is required if query is provided")
metrics = parsed_args.metric[0]
measures = utils.get_client(self).metric.aggregation(
metrics=metrics,
query=parsed_args.query,
aggregation=parsed_args.aggregation,
reaggregation=parsed_args.reaggregation,
start=parsed_args.start,
stop=parsed_args.stop,
granularity=parsed_args.granularity,
needed_overlap=parsed_args.needed_overlap,
resource_type=parsed_args.resource_type,
groupby=parsed_args.groupby,
refresh=parsed_args.refresh,
resample=parsed_args.resample, fill=parsed_args.fill
)
if parsed_args.groupby:
ms = []
for g in measures:
group_name = ", ".join("%s: %s" % (k, g['group'][k])
for k in sorted(g['group']))
for m in g['measures']:
i = [group_name]
i.extend(m)
ms.append(i)
return ('group',) + self.COLS, ms
return self.COLS, measures

View File

@ -1,164 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from gnocchiclient import utils
from gnocchiclient.v1 import base
class ResourceManager(base.Manager):
url = "v1/resource/"
def list(self, resource_type="generic", details=False, history=False,
limit=None, marker=None, sorts=None):
"""List resources
:param resource_type: Type of the resource
:type resource_type: str
:param details: Show all attributes of resources
:type details: bool
:param history: Show the history of resources
:type history: bool
:param limit: maximum number of resources to return
:type limit: int
:param marker: the last item of the previous page; we return the next
results after this value.
:type marker: str
:param sorts: list of resource attributes to order by. (example
["user_id:desc-nullslast", "project_id:asc"]
:type sorts: list of str
"""
qs = utils.build_pagination_options(details, history, limit, marker,
sorts)
url = "%s%s?%s" % (self.url, resource_type, qs)
return self._get(url).json()
def get(self, resource_type, resource_id, history=False):
"""Get a resource
:param resource_type: Type of the resource
:type resource_type: str
:param resource_id: ID of the resource
:type resource_id: str
:param history: Show the history of the resource
:type history: bool
"""
history = "/history" if history else ""
url = self.url + "%s/%s%s" % (resource_type, resource_id, history)
return self._get(url).json()
def history(self, resource_type, resource_id, details=False,
limit=None, marker=None, sorts=None):
"""Get a resource
:param resource_type: Type of the resource
:type resource_type: str
:param resource_id: ID of the resource
:type resource_id: str
:param details: Show all attributes of resources
:type details: bool
:param limit: maximum number of resources to return
:type limit: int
:param marker: the last item of the previous page; we returns the next
results after this value.
:type marker: str
:param sorts: list of resource attributes to order by. (example
["user_id:desc-nullslast", "project_id:asc"]
:type sorts: list of str
"""
qs = utils.build_pagination_options(details, False, limit, marker,
sorts)
url = "%s%s/%s/history?%s" % (self.url, resource_type, resource_id, qs)
return self._get(url).json()
def create(self, resource_type, resource):
"""Create a resource
:param resource_type: Type of the resource
:type resource_type: str
:param resource: Attribute of the resource
:type resource: dict
"""
return self._post(
self.url + resource_type,
headers={'Content-Type': "application/json"},
data=jsonutils.dumps(resource)).json()
def update(self, resource_type, resource_id, resource):
"""Update a resource
:param resource_type: Type of the resource
:type resource_type: str
:param resource_id: ID of the resource
:type resource_id: str
:param resource: Attribute of the resource
:type resource: dict
"""
return self._patch(
self.url + resource_type + "/" + resource_id,
headers={'Content-Type': "application/json"},
data=jsonutils.dumps(resource)).json()
def delete(self, resource_id):
"""Delete a resource
:param resource_id: ID of the resource
:type resource_id: str
"""
self._delete(self.url + "generic/" + resource_id)
def batch_delete(self, query, resource_type="generic"):
"""Delete a batch of resources based on attribute values
:param resource_type: Type of the resource
:type resource_type: str
"""
return self._delete(
self.url+resource_type + "/",
headers={'Content-Type': "application/json"},
data=jsonutils.dumps(query)).json()
def search(self, resource_type="generic", query=None, details=False,
history=False, limit=None, marker=None, sorts=None):
"""List resources
:param resource_type: Type of the resource
:type resource_type: str
:param query: The query dictionary
:type query: dict
:param details: Show all attributes of resources
:type details: bool
:param history: Show the history of resources
:type history: bool
:param limit: maximum number of resources to return
:type limit: int
:param marker: the last item of the previous page; we returns the next
results after this value.
:type marker: str
:param sorts: list of resource attributes to order by. (example
["user_id:desc-nullslast", "project_id:asc"]
:type sorts: list of str
See Gnocchi REST API documentation for the format
of *query dictionary*
http://docs.openstack.org/developer/gnocchi/rest.html#searching-for-resources
"""
query = query or {}
qs = utils.build_pagination_options(details, history, limit, marker,
sorts)
url = "v1/search/resource/%s?%s" % (resource_type, qs)
return self._post(
url, headers={'Content-Type': "application/json"},
data=jsonutils.dumps(query)).json()

View File

@ -1,264 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cliff import command
from cliff import lister
from cliff import show
from oslo_utils import strutils
from gnocchiclient import exceptions
from gnocchiclient import utils
class CliResourceList(lister.Lister):
"""List resources"""
COLS = ('id', 'type',
'project_id', 'user_id',
'original_resource_id',
'started_at', 'ended_at',
'revision_start', 'revision_end')
def get_parser(self, prog_name, history=True):
parser = super(CliResourceList, self).get_parser(prog_name)
parser.add_argument("--details", action='store_true',
help="Show all attributes of generic resources"),
if history:
parser.add_argument("--history", action='store_true',
help="Show history of the resources"),
parser.add_argument("--limit", type=int, metavar="<LIMIT>",
help="Number of resources to return "
"(Default is server default)")
parser.add_argument("--marker", metavar="<MARKER>",
help="Last item of the previous listing. "
"Return the next results after this value")
parser.add_argument("--sort", action="append", metavar="<SORT>",
help="Sort of resource attribute "
"(example: user_id:desc-nullslast")
parser.add_argument("--type", "-t", dest="resource_type",
default="generic", help="Type of resource")
return parser
def _list2cols(self, resources):
"""Return a formatted list of resources."""
if not resources:
return self.COLS, []
cols = list(self.COLS)
for k in resources[0]:
if k not in cols:
cols.append(k)
if 'creator' in cols:
cols.remove('created_by_user_id')
cols.remove('created_by_project_id')
return utils.list2cols(cols, resources)
def take_action(self, parsed_args):
resources = utils.get_client(self).resource.list(
resource_type=parsed_args.resource_type,
**utils.get_pagination_options(parsed_args))
# Do not dump metrics because it makes the list way too long
for r in resources:
del r['metrics']
return self._list2cols(resources)
class CliResourceHistory(CliResourceList):
"""Show the history of a resource"""
def get_parser(self, prog_name):
parser = super(CliResourceHistory, self).get_parser(prog_name,
history=False)
parser.add_argument("resource_id",
help="ID of a resource")
return parser
def take_action(self, parsed_args):
resources = utils.get_client(self).resource.history(
resource_type=parsed_args.resource_type,
resource_id=parsed_args.resource_id,
**utils.get_pagination_options(parsed_args))
if parsed_args.formatter == 'table':
return self._list2cols(list(map(normalize_metrics, resources)))
return self._list2cols(resources)
class CliResourceSearch(CliResourceList):
"""Search resources with specified query rules"""
def get_parser(self, prog_name):
parser = super(CliResourceSearch, self).get_parser(prog_name)
utils.add_query_argument("query", parser)
return parser
def take_action(self, parsed_args):
resources = utils.get_client(self).resource.search(
resource_type=parsed_args.resource_type,
query=parsed_args.query,
**utils.get_pagination_options(parsed_args))
# Do not dump metrics because it makes the list way too long
for r in resources:
del r['metrics']
return self._list2cols(resources)
def normalize_metrics(res):
res['metrics'] = "\n".join(sorted(
["%s: %s" % (name, _id)
for name, _id in res['metrics'].items()]))
return res
class CliResourceShow(show.ShowOne):
"""Show a resource"""
def get_parser(self, prog_name):
parser = super(CliResourceShow, self).get_parser(prog_name)
parser.add_argument("--type", "-t", dest="resource_type",
default="generic", help="Type of resource")
parser.add_argument("resource_id",
help="ID of a resource")
return parser
def take_action(self, parsed_args):
res = utils.get_client(self).resource.get(
resource_type=parsed_args.resource_type,
resource_id=parsed_args.resource_id)
if parsed_args.formatter == 'table':
normalize_metrics(res)
return self.dict2columns(res)
class CliResourceCreate(show.ShowOne):
"""Create a resource"""
def get_parser(self, prog_name):
parser = super(CliResourceCreate, self).get_parser(prog_name)
parser.add_argument("--type", "-t", dest="resource_type",
default="generic", help="Type of resource")
parser.add_argument("resource_id",
help="ID of the resource")
parser.add_argument("-a", "--attribute", action='append',
default=[],
help=("name and value of an attribute "
"separated with a ':'"))
parser.add_argument("-m", "--add-metric", action='append',
default=[],
help="name:id of a metric to add"),
parser.add_argument(
"-n", "--create-metric", action='append', default=[],
help="name:archive_policy_name of a metric to create"),
return parser
def _resource_from_args(self, parsed_args, update=False):
# Get the resource type to set the correct type
rt_attrs = utils.get_client(self).resource_type.get(
name=parsed_args.resource_type)['attributes']
resource = {}
if not update:
resource['id'] = parsed_args.resource_id
if parsed_args.attribute:
for attr in parsed_args.attribute:
attr, __, value = attr.partition(":")
attr_type = rt_attrs.get(attr, {}).get('type')
if attr_type == "number":
value = float(value)
elif attr_type == "bool":
value = strutils.bool_from_string(value)
resource[attr] = value
if (parsed_args.add_metric
or parsed_args.create_metric
or (update and parsed_args.delete_metric)):
if update:
r = utils.get_client(self).resource.get(
parsed_args.resource_type,
parsed_args.resource_id)
default = r['metrics']
for metric_name in parsed_args.delete_metric:
try:
del default[metric_name]
except KeyError:
raise exceptions.MetricNotFound(
message="Metric name %s not found" % metric_name)
else:
default = {}
resource['metrics'] = default
for metric in parsed_args.add_metric:
name, _, value = metric.partition(":")
resource['metrics'][name] = value
for metric in parsed_args.create_metric:
name, _, value = metric.partition(":")
if value is "":
resource['metrics'][name] = {}
else:
resource['metrics'][name] = {'archive_policy_name': value}
return resource
def take_action(self, parsed_args):
resource = self._resource_from_args(parsed_args)
res = utils.get_client(self).resource.create(
resource_type=parsed_args.resource_type, resource=resource)
if parsed_args.formatter == 'table':
normalize_metrics(res)
return self.dict2columns(res)
class CliResourceUpdate(CliResourceCreate):
"""Update a resource"""
def get_parser(self, prog_name):
parser = super(CliResourceUpdate, self).get_parser(prog_name)
parser.add_argument("-d", "--delete-metric", action='append',
default=[],
help="Name of a metric to delete"),
return parser
def take_action(self, parsed_args):
resource = self._resource_from_args(parsed_args, update=True)
res = utils.get_client(self).resource.update(
resource_type=parsed_args.resource_type,
resource_id=parsed_args.resource_id,
resource=resource)
if parsed_args.formatter == 'table':
normalize_metrics(res)
return self.dict2columns(res)
class CliResourceDelete(command.Command):
"""Delete a resource"""
def get_parser(self, prog_name):
parser = super(CliResourceDelete, self).get_parser(prog_name)
parser.add_argument("resource_id",
help="ID of the resource")
return parser
def take_action(self, parsed_args):
utils.get_client(self).resource.delete(parsed_args.resource_id)
class CliResourceBatchDelete(show.ShowOne):
"""Delete a batch of resources based on attribute values"""
def get_parser(self, prog_name):
parser = super(CliResourceBatchDelete, self).get_parser(prog_name)
parser.add_argument("--type", "-t", dest="resource_type",
default="generic", help="Type of resource")
utils.add_query_argument("query", parser)
return parser
def take_action(self, parsed_args):
res = utils.get_client(self).resource.batch_delete(
resource_type=parsed_args.resource_type,
query=parsed_args.query)
return self.dict2columns(res)

View File

@ -1,65 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from gnocchiclient.v1 import base
class ResourceTypeManager(base.Manager):
url = "v1/resource_type/"
def list(self):
"""List resource types."""
return self._get(self.url).json()
def create(self, resource_type):
"""Create a resource type
:param resource_type: resource type
:type resource_type: dict
"""
return self._post(
self.url,
headers={'Content-Type': "application/json"},
data=jsonutils.dumps(resource_type)).json()
def get(self, name):
"""Get a resource type
:param name: name of the resource type
:type name: str
"""
return self._get(self.url + name,
headers={'Content-Type': "application/json"}).json()
def delete(self, name):
"""Delete a resource type
:param resource_type: Resource type
:type resource_type: dict
"""
self._delete(self.url + name)
def update(self, name, operations):
"""Update a resource type
:param name: name of the resource type
:type name: str
:param operations: operations in RFC6902 format
:type name: list
"""
return self._patch(
self.url + name,
headers={'Content-Type': "application/json-patch+json"},
data=jsonutils.dumps(operations)).json()

View File

@ -1,134 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cliff import command
from cliff import lister
from cliff import show
from oslo_utils import strutils
from gnocchiclient import utils
class CliResourceTypeList(lister.Lister):
"""List resource types"""
COLS = ('name', 'attributes')
def take_action(self, parsed_args):
resource_types = utils.get_client(self).resource_type.list()
for resource_type in resource_types:
resource_type['attributes'] = utils.format_dict_dict(
resource_type['attributes'])
return utils.list2cols(self.COLS, resource_types)
class CliResourceTypeCreate(show.ShowOne):
"""Create a resource type"""
def get_parser(self, prog_name):
parser = super(CliResourceTypeCreate, self).get_parser(prog_name)
parser.add_argument("name", help="name of the resource type")
parser.add_argument("-a", "--attribute", action='append',
type=self._resource_attribute,
default=[],
help=(u"attribute definition, "
u"attribute_name:"
u"attribute_type:"
u"attribute_is_required:"
u"attribute_type_option_name="
u"attribute_type_option_value:\u2026 "
u"For example: "
u"display_name:string:true:max_length=255"))
return parser
@classmethod
def _resource_attribute(cls, value):
config = value.split(":")
name = config.pop(0)
attrs = {}
if config:
attrs["type"] = config.pop(0)
if config:
attrs["required"] = strutils.bool_from_string(config.pop(0),
strict=True)
while config:
param, _, value = config.pop(0).partition("=")
try:
attrs[param] = int(value)
except ValueError:
try:
attrs[param] = float(value)
except ValueError:
attrs[param] = value
return (name, attrs)
def take_action(self, parsed_args):
resource_type = {'name': parsed_args.name}
if parsed_args.attribute:
resource_type['attributes'] = dict(parsed_args.attribute)
res = utils.get_client(self).resource_type.create(
resource_type=resource_type)
utils.format_resource_type(res)
return self.dict2columns(res)
class CliResourceTypeUpdate(CliResourceTypeCreate):
def get_parser(self, prog_name):
parser = super(CliResourceTypeUpdate, self).get_parser(prog_name)
parser.add_argument("-r", "--remove-attribute", action='append',
default=[],
help=u"attribute name")
return parser
def take_action(self, parsed_args):
operations = []
if parsed_args.attribute:
for name, attrs in parsed_args.attribute:
operations.append({'op': 'add',
'path': '/attributes/%s' % name,
'value': attrs})
if parsed_args.remove_attribute:
for name in parsed_args.remove_attribute:
operations.append({'op': 'remove',
'path': '/attributes/%s' % name})
res = utils.get_client(self).resource_type.update(
parsed_args.name, operations)
utils.format_resource_type(res)
return self.dict2columns(res)
class CliResourceTypeShow(show.ShowOne):
"""Show a resource type"""
def get_parser(self, prog_name):
parser = super(CliResourceTypeShow, self).get_parser(prog_name)
parser.add_argument("name", help="name of the resource type")
return parser
def take_action(self, parsed_args):
res = utils.get_client(self).resource_type.get(name=parsed_args.name)
utils.format_resource_type(res)
return self.dict2columns(res)
class CliResourceTypeDelete(command.Command):
"""Delete a resource type"""
def get_parser(self, prog_name):
parser = super(CliResourceTypeDelete, self).get_parser(prog_name)
parser.add_argument("name", help="name of the resource type")
return parser
def take_action(self, parsed_args):
utils.get_client(self).resource_type.delete(parsed_args.name)

View File

@ -1,21 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from gnocchiclient.v1 import base
class StatusManager(base.Manager):
url = "v1/status"
def get(self, details=False):
"""Get Gnocchi status."""
return self._get(self.url + '?details=%s' % details).json()

View File

@ -1,30 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cliff import show
from gnocchiclient import utils
class CliStatusShow(show.ShowOne):
"""Show the status of measurements processing"""
def take_action(self, parsed_args):
status = utils.get_client(self).status.get()
return self.dict2columns({
"storage/total number of measures to process":
status['storage']['summary']['measures'],
"storage/number of metric having measures to process":
status['storage']['summary']['metrics'],
})

View File

@ -1,18 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import pbr.version
__version__ = pbr.version.VersionInfo('gnocchiclient').version_string()

View File

@ -1,12 +0,0 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
pbr>=1.4
cliff>1.16.0 # Apache-2.0
osc-lib>=0.3.0 # Apache-2.0
oslo.serialization>=1.4.0 # Apache-2.0
oslo.utils>=2.0.0 # Apache-2.0
keystoneauth1>=2.0.0
six
futurist

108
setup.cfg
View File

@ -1,108 +0,0 @@
[metadata]
name = gnocchiclient
summary = Python client library for Gnocchi
description-file =
README.rst
author = Gnocchi
home-page = http://gnocchi.xyz/gnocchiclient
classifier =
Intended Audience :: Information Technology
Intended Audience :: System Administrators
License :: OSI Approved :: Apache Software License
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.5
[files]
packages =
gnocchiclient
[entry_points]
console_scripts =
gnocchi = gnocchiclient.shell:main
keystoneauth1.plugin =
gnocchi-noauth = gnocchiclient.auth:GnocchiNoAuthLoader
gnocchi-basic = gnocchiclient.auth:GnocchiBasicLoader
openstack.cli.extension =
metric = gnocchiclient.osc
openstack.metric.v1 =
# FIXME(sileht): don't duplicate entry with the one in shell.py
metric_status = gnocchiclient.v1.status_cli:CliStatusShow
metric_resource_list = gnocchiclient.v1.resource_cli:CliResourceList
metric_resource_show = gnocchiclient.v1.resource_cli:CliResourceShow
metric_resource_history = gnocchiclient.v1.resource_cli:CliResourceHistory
metric_resource_search = gnocchiclient.v1.resource_cli:CliResourceSearch
metric_resource_create = gnocchiclient.v1.resource_cli:CliResourceCreate
metric_resource_update = gnocchiclient.v1.resource_cli:CliResourceUpdate
metric_resource_delete = gnocchiclient.v1.resource_cli:CliResourceDelete
metric_resource_batch_delete = gnocchiclient.v1.resource_cli:CliResourceBatchDelete
metric_resource-type_list = gnocchiclient.v1.resource_type_cli:CliResourceTypeList
metric_resource-type_create = gnocchiclient.v1.resource_type_cli:CliResourceTypeCreate
metric_resource-type_show = gnocchiclient.v1.resource_type_cli:CliResourceTypeShow
metric_resource-type_update = gnocchiclient.v1.resource_type_cli:CliResourceTypeUpdate
metric_resource-type_delete = gnocchiclient.v1.resource_type_cli:CliResourceTypeDelete
metric_archive-policy_list = gnocchiclient.v1.archive_policy_cli:CliArchivePolicyList
metric_archive-policy_show = gnocchiclient.v1.archive_policy_cli:CliArchivePolicyShow
metric_archive-policy_create = gnocchiclient.v1.archive_policy_cli:CliArchivePolicyCreate
metric_archive-policy_update = gnocchiclient.v1.archive_policy_cli:CliArchivePolicyUpdate
metric_archive-policy_delete = gnocchiclient.v1.archive_policy_cli:CliArchivePolicyDelete
metric_archive-policy-rule_list = gnocchiclient.v1.archive_policy_rule_cli:CliArchivePolicyRuleList
metric_archive-policy-rule_show = gnocchiclient.v1.archive_policy_rule_cli:CliArchivePolicyRuleShow
metric_archive-policy-rule_create = gnocchiclient.v1.archive_policy_rule_cli:CliArchivePolicyRuleCreate
metric_archive-policy-rule_delete = gnocchiclient.v1.archive_policy_rule_cli:CliArchivePolicyRuleDelete
# FIXME(rabel): Deprecate metric_metric entry points
metric_metric_list = gnocchiclient.v1.metric_cli:DeprecatedCliMetricList
metric_metric_show = gnocchiclient.v1.metric_cli:DeprecatedCliMetricShow
metric_metric_create = gnocchiclient.v1.metric_cli:DeprecatedCliMetricCreate
metric_metric_delete = gnocchiclient.v1.metric_cli:DeprecatedCliMetricDelete
metric_list = gnocchiclient.v1.metric_cli:CliMetricList
metric_show = gnocchiclient.v1.metric_cli:CliMetricShow
metric_create = gnocchiclient.v1.metric_cli:CliMetricCreate
metric_delete = gnocchiclient.v1.metric_cli:CliMetricDelete
metric_measures_show = gnocchiclient.v1.metric_cli:CliMeasuresShow
metric_measures_add = gnocchiclient.v1.metric_cli:CliMeasuresAdd
metric_measures_batch-metrics = gnocchiclient.v1.metric_cli:CliMetricsMeasuresBatch
metric_measures_batch-resources-metrics = gnocchiclient.v1.metric_cli:CliResourcesMetricsMeasuresBatch
metric_measures aggregation = gnocchiclient.v1.metric_cli:CliMeasuresAggregation
metric_capabilities list = gnocchiclient.v1.capabilities_cli:CliCapabilitiesList
metric_benchmark metric create = gnocchiclient.benchmark:CliBenchmarkMetricCreate
metric_benchmark metric show = gnocchiclient.benchmark:CliBenchmarkMetricShow
metric_benchmark measures add = gnocchiclient.benchmark:CliBenchmarkMeasuresAdd
metric_benchmark measures show = gnocchiclient.benchmark:CliBenchmarkMeasuresShow
[extras]
test =
coverage>=3.6
python-subunit>=0.0.18
oslotest>=1.10.0 # Apache-2.0
tempest>=10
testrepository>=0.0.18
testtools>=1.4.0
doc =
sphinx!=1.2.0,!=1.3b1,>=1.1.2
sphinx_rtd_theme
openstack-doc-tools>=1.0.1
[build_sphinx]
source-dir = doc/source
build-dir = doc/build
all_files = 1
[upload_sphinx]
upload-dir = doc/build/html
[pbr]
autodoc_index_modules = true
autodoc_exclude_modules =
gnocchiclient.tests.*
[wheel]
universal = 1

View File

@ -1,29 +0,0 @@
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr'],
pbr=True)

45
tox.ini
View File

@ -1,45 +0,0 @@
[tox]
minversion = 1.6
envlist = py35,py36,py27,pypy,pep8,docs-gnocchi.xyz
skipsdist = True
[testenv]
usedevelop = True
setenv =
VIRTUAL_ENV={envdir}
GNOCCHI_CLIENT_EXEC_DIR={envdir}/bin
passenv = GNOCCHI_* OS_TEST_TIMEOUT OS_STDOUT_CAPTURE OS_STDERR_CAPTURE OS_LOG_CAPTURE
deps = .[test]
http://tarballs.openstack.org/gnocchi/gnocchi-master.tar.gz#egg=gnocchi[postgresql,file]
pifpaf
# NOTE(tonyb): This project has chosen to *NOT* consume upper-constraints.txt
commands = pifpaf run gnocchi -- python setup.py test --slowest --testr-args='{posargs}'
[testenv:pep8]
deps = hacking<0.13,>=0.12
doc8>=0.8.0
commands = flake8
doc8 --ignore-path doc/source/gnocchi.rst --ignore-path-errors doc/source/shell.rst;D000 doc/source
[testenv:venv]
deps = .[test,doc]
commands = {posargs}
[testenv:cover]
commands = python setup.py test --coverage --testr-args='{posargs}'
[testenv:docs-gnocchi.xyz]
deps = .[test,doc]
commands =
python setup.py build_sphinx
[testenv:debug]
commands = pifpaf --debug run gnocchi -- oslo_debug_helper {posargs}
[flake8]
show-source = True
ignore =
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build
[travis]
python = 3.6: py36, pep8, docs-gnocchi.xyz