Remove Ceilometer API

This removes the deprecated Ceilometer API.

Change-Id: I752b36b3dfe8f935b68c4d3d59ccb5b8b60c582f
This commit is contained in:
Julien Danjou 2017-10-16 11:31:51 +02:00
parent 4b42065069
commit d881dd5228
150 changed files with 23 additions and 15299 deletions

4
.gitignore vendored
View File

@ -10,13 +10,9 @@ ChangeLog
cover/*
dist/*
doc/build
doc/source/api/
doc/source/_static/
etc/ceilometer/ceilometer.conf
subunit.log
# Files created by releasenotes build
releasenotes/build
# Files created by api-ref build
api-ref/build

View File

@ -18,39 +18,6 @@
- openstack-infra/devstack-gate
- openstack/ceilometer
- job:
name: ceilometer-dsvm-tempest-plugin-mongodb
parent: legacy-dsvm-base
run: playbooks/legacy/ceilometer-dsvm-tempest-plugin-mongodb/run
post-run: playbooks/legacy/ceilometer-dsvm-tempest-plugin-mongodb/post
timeout: 7800
required-projects:
- openstack-infra/devstack-gate
- openstack/ceilometer
- openstack/tempest
- job:
name: ceilometer-dsvm-tempest-plugin-mongodb-identity-v3-only
parent: legacy-dsvm-base
run: playbooks/legacy/ceilometer-dsvm-tempest-plugin-mongodb-identity-v3-only/run
post-run: playbooks/legacy/ceilometer-dsvm-tempest-plugin-mongodb-identity-v3-only/post
timeout: 7800
required-projects:
- openstack-infra/devstack-gate
- openstack/ceilometer
- openstack/tempest
- job:
name: ceilometer-dsvm-tempest-plugin-mysql
parent: legacy-dsvm-base
run: playbooks/legacy/ceilometer-dsvm-tempest-plugin-mysql/run
post-run: playbooks/legacy/ceilometer-dsvm-tempest-plugin-mysql/post
timeout: 7800
required-projects:
- openstack-infra/devstack-gate
- openstack/ceilometer
- openstack/tempest
- job:
name: ceilometer-tox-py27-mongodb
parent: legacy-base
@ -113,8 +80,6 @@
branches: ^stable/newton$
- ceilometer-dsvm-functional-mysql:
branches: ^stable/newton$
- ceilometer-dsvm-tempest-plugin-mongodb
- ceilometer-dsvm-tempest-plugin-mysql
- ceilometer-tox-py27-mongodb:
branches: ^(?!stable/newton)
- ceilometer-tox-py27-mysql:
@ -127,15 +92,12 @@
- ^(test-|)requirements.txt$
- ^setup.cfg$
- telemetry-dsvm-integration-ceilometer
- ceilometer-dsvm-tempest-plugin-mongodb-identity-v3-only
gate:
jobs:
- ceilometer-dsvm-functional-mongodb:
branches: ^stable/newton$
- ceilometer-dsvm-functional-mysql:
branches: ^stable/newton$
- ceilometer-dsvm-tempest-plugin-mongodb
- ceilometer-dsvm-tempest-plugin-mysql
- ceilometer-tox-py27-mongodb:
branches: ^(?!stable/newton)
- ceilometer-tox-py27-mysql:
@ -148,4 +110,3 @@
- ^(test-|)requirements.txt$
- ^setup.cfg$
- telemetry-dsvm-integration-ceilometer
- ceilometer-dsvm-tempest-plugin-mongodb-identity-v3-only

View File

@ -11,9 +11,6 @@ Ceilometer is distributed under the terms of the Apache
License, Version 2.0. The full terms and conditions of this
license are detailed in the LICENSE file.
For more information about Ceilometer APIs, see
https://developer.openstack.org/api-ref-telemetry-v2.html
Release notes are available at
https://releases.openstack.org/teams/telemetry.html

View File

@ -1,336 +0,0 @@
.. -*- rst -*-
======
Alarms
======
Lists, creates, gets details for, updates, and deletes alarms.
Show alarm details
==================
.. rest_method:: GET /v2/alarms/{alarm_id}
Shows details for an alarm, by alarm ID.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- alarm_id: alarm_id_path
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- alarm: alarm_response
- alarm_actions: alarm_actions
- alarm_id: alarm_id
- combination_rule: alarm_combination_rule
- description: alarm_description
- enabled: alarm_enabled
- insufficient_data_actions: alarm_insufficient_data_actions
- timestamp: alarm_timestamp
- name: alarm_name
- ok_actions: alarm_ok_actions
- project_id: alarm_project_id
- state_timestamp: alarm_state_timestamp
- threshold_rule: alarm_threshold_rule
- repeat_actions: alarm_repeat_actions
- state: alarm_state
- type: alarm_type
- user_id: user_id
Response Example
----------------
.. literalinclude:: ../samples/alarm-show-response.json
:language: javascript
Update alarm
============
.. rest_method:: PUT /v2/alarms/{alarm_id}
Updates an alarm.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- alarm_id: alarm_id_path
- alarm: alarm_request
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- alarm: alarm_response
- alarm_actions: alarm_actions
- alarm_id: alarm_id
- combination_rule: alarm_combination_rule
- description: alarm_description
- enabled: alarm_enabled
- insufficient_data_actions: alarm_insufficient_data_actions
- timestamp: alarm_timestamp
- name: alarm_name
- ok_actions: alarm_ok_actions
- project_id: alarm_project_id
- state_timestamp: alarm_state_timestamp
- threshold_rule: alarm_threshold_rule
- repeat_actions: alarm_repeat_actions
- state: alarm_state
- type: alarm_type
- user_id: user_id
Response Example
----------------
.. literalinclude:: ../samples/alarm-show-response.json
:language: javascript
Delete alarm
============
.. rest_method:: DELETE /v2/alarms/{alarm_id}
Deletes an alarm, by alarm ID.
Normal response codes:204
Request
-------
.. rest_parameters:: parameters.yaml
- alarm_id: alarm_id_path
Update alarm state
==================
.. rest_method:: PUT /v2/alarms/{alarm_id}/state
Sets the state of an alarm.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- alarm_id: alarm_id_path
- state: alarm_state
Response Example
----------------
.. literalinclude::
:language: javascript
Show alarm state
================
.. rest_method:: GET /v2/alarms/{alarm_id}/state
Shows the state for an alarm, by alarm ID.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- alarm_id: alarm_id_path
Response Example
----------------
.. literalinclude::
:language: javascript
List alarms
===========
.. rest_method:: GET /v2/alarms
Lists alarms, based on a query.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- q: q
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- alarm_actions: alarm_actions
- ok_actions: ok_actions
- description: description
- timestamp: timestamp
- enabled: enabled
- combination_rule: combination_rule
- state_timestamp: state_timestamp
- threshold_rule: threshold_rule
- alarm_id: alarm_id
- state: state
- insufficient_data_actions: alarm_insufficient_data_actions
- repeat_actions: repeat_actions
- user_id: user_id
- project_id: project_id
- type: type
- name: name
Response Example
----------------
.. literalinclude:: ../samples/alarms-list-response.json
:language: javascript
Create alarm
============
.. rest_method:: POST /v2/alarms
Creates an alarm.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- data: data
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- alarm: alarm_response
- alarm_actions: alarm_actions
- alarm_id: alarm_id
- combination_rule: alarm_combination_rule
- description: alarm_description
- enabled: alarm_enabled
- insufficient_data_actions: alarm_insufficient_data_actions
- timestamp: alarm_timestamp
- name: alarm_name
- ok_actions: alarm_ok_actions
- project_id: alarm_project_id
- state_timestamp: alarm_state_timestamp
- threshold_rule: alarm_threshold_rule
- repeat_actions: alarm_repeat_actions
- state: alarm_state
- type: alarm_type
- user_id: user_id
Response Example
----------------
.. literalinclude:: ../samples/alarm-show-response.json
:language: javascript
Show alarm history
==================
.. rest_method:: GET /v2/alarms/{alarm_id}/history
Assembles and shows the history for an alarm, by alarm ID.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- alarm_id: alarm_id_path
- q: q
Response Example
----------------
.. literalinclude::
:language: javascript

View File

@ -1,92 +0,0 @@
.. -*- rst -*-
============
Capabilities
============
Gets information for API and storage capabilities.
The Telemetry service enables you to store samples, events, and
alarm definitions in supported database back ends. The
``capabilities`` resource enables you to list the capabilities that
a database supports.
The ``capabilities`` resource returns a flattened dictionary of
capability properties, each with an associated boolean value. A
value of ``true`` indicates that the corresponding capability is
available in the back end.
You can optionally configure separate database back ends for
samples, events, and alarms definitions. The ``capabilities``
response shows a value of ``true`` to indicate that the definitions
database for samples, events, or alarms is ready to use in a
production environment.
List capabilities
=================
.. rest_method:: GET /v2/capabilities
A representation of the API and storage capabilities. Usually, the storage driver imposes constraints.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- statistics:query:complex: statistics:query:complex
- alarms:history:query:simple: alarms:history:query:simple
- meters:query:metadata: meters:query:metadata
- alarms:query:simple: alarms:query:simple
- resources:query:simple: resources:query:simple
- api: api
- statistics:aggregation:selectable:quartile: statistics:aggregation:selectable:quartile
- statistics:query:simple: statistics:query:simple
- statistics:aggregation:selectable:count: statistics:aggregation:selectable:count
- statistics:aggregation:selectable:min: statistics:aggregation:selectable:min
- statistics:aggregation:selectable:sum: statistics:aggregation:selectable:sum
- storage: storage
- alarm_storage: alarm_storage
- statistics:aggregation:selectable:avg: statistics:aggregation:selectable:avg
- meters:query:complex: meters:query:complex
- statistics:groupby: statistics:groupby
- alarms:history:query:complex: alarms:history:query:complex
- meters:query:simple: meters:query:simple
- samples:query:metadata: samples:query:metadata
- statistics:query:metadata: statistics:query:metadata
- storage:production_ready: storage:production_ready
- samples:query:simple: samples:query:simple
- resources:query:metadata: resources:query:metadata
- statistics:aggregation:selectable:max: statistics:aggregation:selectable:max
- samples:query:complex: samples:query:complex
- statistics:aggregation:standard: statistics:aggregation:standard
- events:query:simple: events:query:simple
- statistics:aggregation:selectable:stddev: statistics:aggregation:selectable:stddev
- alarms:query:complex: alarms:query:complex
- statistics:aggregation:selectable:cardinality: statistics:aggregation:selectable:cardinality
- event_storage: event_storage
- resources:query:complex: resources:query:complex
Response Example
----------------
.. literalinclude:: ../samples/capabilities-list-response.json
:language: javascript

View File

@ -1,273 +0,0 @@
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# ceilometer documentation build configuration file, created by
# sphinx-quickstart on Sat May 1 15:17:47 2010.
#
# This file is execfile()d with the current directory set to
# its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import subprocess
import sys
import warnings
import openstackdocstheme
html_theme = 'openstackdocs'
html_theme_path = [openstackdocstheme.get_html_theme_path()]
html_theme_options = {
"sidebar_mode": "toc",
}
extensions = [
'os_api_ref',
]
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('./'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Compute API Reference'
copyright = u'2010-present, OpenStack Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
from ceilometer.version import version_info as ceilometer_version
# The full version, including alpha/beta/rc tags.
release = ceilometer_version.version_string_with_vcs()
# The short X.Y version.
version = ceilometer_version.canonical_version_string()
# Config logABug feature
giturl = (
u'https://git.openstack.org/cgit/openstack/ceilometer/tree/api-ref/source')
# source tree
# html_context allows us to pass arbitrary values into the html template
html_context = {'bug_tag': 'api-ref',
'giturl': giturl,
'bug_project': 'ceilometer'}
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# The reST default role (used for this markup: `text`) to use
# for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for man page output ----------------------------------------------
# Grouping the document tree for man pages.
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local",
"-n1"]
try:
html_last_updated_fmt = subprocess.check_output(git_cmd).decode('utf-8')
except Exception:
warnings.warn('Cannot get last updated time from git repository. '
'Not setting "html_last_updated_fmt".')
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_use_modindex = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'ceilometerdoc'
# -- Options for LaTeX output -------------------------------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'CeilometerReleaseNotes.tex',
u'Ceilometer Release Notes Documentation',
u'Ceilometer Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# Additional stuff for the LaTeX preamble.
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_use_modindex = True
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ceilometerreleasenotes',
u'Ceilometer Release Notes Documentation', [u'Ceilometer Developers'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'CeilometerReleaseNotes',
u'Ceilometer Release Notes Documentation',
u'Ceilometer Developers', 'CeilometerReleaseNotes',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False

View File

@ -1,93 +0,0 @@
.. -*- rst -*-
======
Events
======
Lists all events and shows details for an event.
Show event details
==================
.. rest_method:: GET /v2/events/{message_id}
Shows details for an event.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- message_id: message_id_path
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- events: events
- raw: event_raw
- generated: event_generated
- event_type: event_type
- message_id: message_id
Response Example
----------------
.. literalinclude:: ../samples/event-show-response.json
:language: javascript
List events
===========
.. rest_method:: GET /v2/events
Lists all events.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- q: q
- limit: limit
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- events: events
- raw: event_raw
- generated: generated
- event_type: event_type
- message_id: message_id
Response Example
----------------
.. literalinclude:: ../samples/events-list-response.json
:language: javascript

View File

@ -1,8 +0,0 @@
=========================
Ceilometer Release Notes
=========================
.. toctree::
:maxdepth: 1

View File

@ -1,386 +0,0 @@
.. -*- rst -*-
======
Meters
======
Lists all meters, adds samples to meters, and lists samples for
meters. For list operations, if you do not explicitly set the
``limit`` query parameter, a default limit is applied. The default
limit is the ``default_api_return_limit`` configuration option
value.
Also, computes and lists statistics for samples in a time range.
You can use the ``aggregate`` query parameter in the ``statistics``
URI to explicitly select the ``stddev``, ``cardinality``, or any
other standard function. For example:
::
GET /v2/meters/METER_NAME/statistics?aggregate.func=NAME
&
aggregate.param=VALUE
The ``aggregate.param`` parameter value is optional for all
functions except the ``cardinality`` function.
The API silently ignores any duplicate aggregate function and
parameter pairs.
The API accepts and storage drivers support duplicate functions
with different parameter values. In this example, the
``cardinality`` function is accepted twice with two different
parameter values:
::
GET /v2/meters/METER_NAME/statistics?aggregate.func=cardinality
&
aggregate.param=resource_id
&
aggregate.func=cardinality
&
aggregate.param=project_id
**Examples:**
Use the ``stddev`` function to request the standard deviation of
CPU utilization:
::
GET /v2/meters/cpu_util/statistics?aggregate.func=stddev
The response looks like this:
.. code-block:: json
[
{
"aggregate": {
"stddev": 0.6858829
},
"duration_start": "2014-01-30T11:13:23",
"duration_end": "2014-01-31T16:07:13",
"duration": 104030,
"period": 0,
"period_start": "2014-01-30T11:13:23",
"period_end": "2014-01-31T16:07:13",
"groupby": null,
"unit": "%"
}
]
Use the ``cardinality`` function with the project ID to return the
number of distinct tenants with images:
::
GET /v2/meters/image/statistics?aggregate.func=cardinality
&
aggregate.param=project_id
The following, more complex, example determines:
- The number of distinct instances (``cardinality``)
- The total number of instance samples (``count``) for a tenant in
15-minute intervals (``period`` and ``groupby`` options)
::
GET /v2/meters/instance/statistics?aggregate.func=cardinality
&
aggregate.param=resource_id
&
aggregate.func=count
&
groupby=project_id
&
period=900
The response looks like this:
.. code-block:: json
[
{
"count": 19,
"aggregate": {
"count": 19,
"cardinality/resource_id": 3
},
"duration": 328.47803,
"duration_start": "2014-01-31T10:00:41.823919",
"duration_end": "2014-01-31T10:06:10.301948",
"period": 900,
"period_start": "2014-01-31T10:00:00",
"period_end": "2014-01-31T10:15:00",
"groupby": {
"project_id": "061a5c91811e4044b7dc86c6136c4f99"
},
"unit": "instance"
},
{
"count": 22,
"aggregate": {
"count": 22,
"cardinality/resource_id": 4
},
"duration": 808.00385,
"duration_start": "2014-01-31T10:15:15",
"duration_end": "2014-01-31T10:28:43.003840",
"period": 900,
"period_start": "2014-01-31T10:15:00",
"period_end": "2014-01-31T10:30:00",
"groupby": {
"project_id": "061a5c91811e4044b7dc86c6136c4f99"
},
"unit": "instance"
},
{
"count": 2,
"aggregate": {
"count": 2,
"cardinality/resource_id": 2
},
"duration": 0,
"duration_start": "2014-01-31T10:35:15",
"duration_end": "2014-01-31T10:35:15",
"period": 900,
"period_start": "2014-01-31T10:30:00",
"period_end": "2014-01-31T10:45:00",
"groupby": {
"project_id": "061a5c91811e4044b7dc86c6136c4f99"
},
"unit": "instance"
}
]
Show meter statistics
=====================
.. rest_method:: GET /v2/meters/{meter_name}/statistics
Computes and lists statistics for samples in a time range.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- meter_name: meter_name
- q: q
- groupby: groupby
- period: period
- aggregate: aggregate
- limit: limit
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- count: count
- duration_start: duration_start
- min: min
- max: max
- duration_end: duration_end
- period: period
- sum: sum
- duration: duration
- period_end: period_end
- aggregate: aggregate
- period_start: period_start
- avg: avg
- groupby: groupby
- unit: unit
Response Example
----------------
.. literalinclude:: ../samples/statistics-list-response.json
:language: javascript
List meters
===========
.. rest_method:: GET /v2/meters
Lists meters, based on the data recorded so far.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- q: q
- limit: limit
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- user_id: user_id
- name: name
- resource_id: resource_id
- source: source
- meter_id: meter_id
- project_id: project_id
- type: type
- unit: unit
Response Example
----------------
.. literalinclude:: ../samples/meters-list-response.json
:language: javascript
List samples for meter
======================
.. rest_method:: GET /v2/meters/{meter_name}
Lists samples for a meter, by meter name.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- meter_name: meter_name
- q: q
- limit: limit
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- user_id: user_id
- resource_id: resource_id
- timestamp: timestamp
- meter: meter
- volume: volume
- source: source
- recorded_at: recorded_at
- project_id: project_id
- type: type
- id: id
- unit: unit
- metadata: metadata
Response Example
----------------
.. literalinclude:: ../samples/samples-list-response.json
:language: javascript
Add samples to meter
====================
.. rest_method:: POST /v2/meters/{meter_name}
Adds samples to a meter, by meter name.
If you attempt to add a sample that is not supported, this call
returns the ``409`` response code.
Normal response codes: 200
Error response codes:409,
Request
-------
.. rest_parameters:: parameters.yaml
- user_id: user_id
- resource_id: resource_id
- timestamp: timestamp
- meter: meter
- volume: volume
- source: source
- recorded_at: recorded_at
- project_id: project_id
- type: type
- id: id
- unit: unit
- metadata: metadata
- meter_name: meter_name
- direct: direct
- samples: samples
Request Example
---------------
.. literalinclude:: ../samples/sample-create-request.json
:language: javascript
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- user_id: user_id
- resource_id: resource_id
- timestamp: timestamp
- meter: meter
- volume: volume
- source: source
- recorded_at: recorded_at
- project_id: project_id
- type: type
- id: id
- unit: unit
- metadata: metadata
Response Example
----------------
.. literalinclude:: ../samples/sample-show-response.json
:language: javascript

View File

@ -1,768 +0,0 @@
# variables in header
{}
# variables in path
alarm_id_path:
description: |
The UUID of the alarm.
in: path
required: false
type: string
message_id_path:
description: |
The UUID of the message.
in: path
required: false
type: string
meter_name:
description: |
The name of the meter.
in: path
required: false
type: string
resource_id_path:
description: |
The UUID of the resource.
in: path
required: false
type: string
sample_id:
description: |
The UUID of the sample.
in: path
required: false
type: string
# variables in query
aggregate:
description: |
A list of selectable aggregation functions to apply.
For example:
::
GET /v2/meters/METER_NAME/statistics?aggregate.func=cardinality
&
aggregate.param=resource_id
&
aggregate.func=cardinality
&
aggregate.param=project_id
in: query
required: false
type: object
direct:
description: |
Indicates whether the samples are POST ed
directly to storage. Set ``?direct=True`` to POST the samples
directly to storage.
in: query
required: false
type: string
groupby:
description: |
Fields for group by aggregation.
in: query
required: false
type: object
limit:
description: |
Limits the maximum number of samples that the response returns.
For example:
::
GET /v2/events?limit=1000
in: query
required: false
type: integer
limit_1:
description: |
Requests a page size of items. Returns a number
of items up to a limit value. Use the ``limit`` parameter to make
an initial limited request and use the ID of the last-seen item
from the response as the ``marker`` parameter value in a
subsequent limited request.
in: query
required: false
type: integer
meter_links:
description: |
Set ``?meter_links=1`` to return a self link and
related meter links.
in: query
required: false
type: integer
period:
description: |
The period, in seconds, for which you want
statistics.
in: query
required: false
type: integer
q:
description: |
Filters the response by one or more arguments.
For example: ``?q.field=Foo & q.value=my_text``.
in: query
required: false
type: array
q_1:
description: |
Filters the response by one or more event arguments.
For example:
::
GET /v2/events?q.field=Foo
&
q.value=my_text
in: query
required: false
type: array
samples:
description: |
A list of samples.
in: query
required: false
type: array
# variables in body
alarm_actions:
description: |
The list of actions that the alarm performs.
in: body
required: true
type: array
alarm_combination_rule:
description: |
The rules for the combination alarm type.
in: body
required: true
type: string
alarm_description:
description: |
Describes the alarm.
in: body
required: true
type: string
alarm_enabled:
description: |
If ``true``, evaluation and actioning is enabled
for the alarm.
in: body
required: true
type: boolean
alarm_id:
description: |
The UUID of the alarm.
in: body
required: true
type: string
alarm_insufficient_data_actions:
description: |
The list of actions that the alarm performs when
the alarm state is ``insufficient_data``.
in: body
required: true
type: array
alarm_name:
description: |
The name of the alarm.
in: body
required: true
type: string
alarm_ok_actions:
description: |
The list of actions that the alarm performs when
the alarm state is ``ok``.
in: body
required: true
type: array
alarm_repeat_actions:
description: |
If set to ``true``, the alarm notifications are
repeated. Otherwise, this value is ``false``.
in: body
required: true
type: boolean
alarm_request:
description: |
An alarm within the request body.
in: body
required: false
type: string
alarm_state:
description: |
The state of the alarm.
in: body
required: true
type: string
alarm_state_timestamp:
description: |
The date and time of the alarm state.
in: body
required: true
type: string
alarm_storage:
description: |
Defines the capabilities for the storage that
stores persisting alarm definitions. A value of ``true`` indicates
that the capability is available.
in: body
required: true
type: object
alarm_threshold_rule:
description: |
The rules for the threshold alarm type.
in: body
required: true
type: string
alarm_timestamp:
description: |
The date and time of the alarm.
in: body
required: true
type: string
alarm_type:
description: |
The type of the alarm, which is either
``threshold`` or ``combination``.
in: body
required: true
type: string
alarms:history:query:complex:
description: |
If ``true``, the complex query capability for
alarm history is available for the configured database back end.
in: body
required: true
type: boolean
alarms:history:query:simple:
description: |
If ``true``, the simple query capability for
alarm history is available for the configured database back end.
in: body
required: true
type: boolean
alarms:query:complex:
description: |
If ``true``, the complex query capability for
alarm definitions is available for the configured database back
end.
in: body
required: true
type: boolean
alarms:query:simple:
description: |
If ``true``, the simple query capability for
alarm definitions is available for the configured database back
end.
in: body
required: true
type: boolean
api:
description: |
A set of key and value pairs that contain the API
capabilities for the configured storage driver.
in: body
required: true
type: object
avg:
description: |
The average of all volume values in the data.
in: body
required: true
type: number
combination_rule:
description: |
The rules for the combination alarm type.
in: body
required: true
type: string
count:
description: |
The number of samples seen.
in: body
required: true
type: integer
description:
description: |
Describes the alarm.
in: body
required: true
type: string
duration:
description: |
The number of seconds between the oldest and
newest date and time stamp.
in: body
required: true
type: number
duration_end:
description: |
The date and time in UTC format of the query end
time.
in: body
required: true
type: string
duration_start:
description: |
The date and time in UTC format of the query
start time.
in: body
required: true
type: string
event_generated:
description: |
The date and time when the event occurred.
in: body
required: true
type: string
event_raw:
description: |
A dictionary object that stores event messages
for future evaluation.
in: body
required: true
type: object
event_storage:
description: |
If ``true``, the capabilities for the storage
that stores persisting events is available.
in: body
required: true
type: object
event_type:
description: |
The dotted string that represents the event.
in: body
required: true
type: string
events:
description: |
A list of objects. Each object contains key and
value pairs that describe the event.
in: body
required: true
type: array
events:query:simple:
description: |
If ``true``, the simple query capability for
events is available for the configured database back end.
in: body
required: true
type: boolean
id:
description: |
The UUID of the sample.
in: body
required: true
type: string
links:
description: |
A list that contains a self link and associated
meter links.
in: body
required: true
type: array
max:
description: |
The maximum volume seen in the data.
in: body
required: true
type: number
message_id:
description: |
The UUID of the message.
in: body
required: true
type: string
metadata:
description: |
An arbitrary set of one or more metadata key and
value pairs that are associated with the sample.
in: body
required: true
type: object
metadata_1:
description: |
A set of one or more arbitrary metadata key and
value pairs that are associated with the resource.
in: body
required: true
type: object
meter:
description: |
The meter name.
in: body
required: true
type: string
meter_id:
description: |
The UUID of the meter.
in: body
required: true
type: string
meters:query:complex:
description: |
If ``true``, the complex query capability for
meters is available for the configured database back end.
in: body
required: true
type: boolean
meters:query:metadata:
description: |
If ``true``, the simple query capability for the
metadata of meters is available for the configured database back
end.
in: body
required: true
type: boolean
meters:query:simple:
description: |
If ``true``, the simple query capability for
meters is available for the configured database back end.
in: body
required: true
type: boolean
min:
description: |
The minimum volume seen in the data.
in: body
required: true
type: number
name:
description: |
The name of the alarm.
in: body
required: true
type: string
name_1:
description: |
The meter name.
in: body
required: true
type: string
period_end:
description: |
The period end date and time in UTC format.
in: body
required: true
type: string
period_start:
description: |
The period start date and time in UTC format.
in: body
required: true
type: string
project_id:
description: |
The UUID of the project or tenant that owns the
resource.
in: body
required: true
type: string
project_id_1:
description: |
The UUID of the project.
in: body
required: true
type: string
project_id_2:
description: |
The UUID of the owning project or tenant.
in: body
required: true
type: string
recorded_at:
description: |
The date and time when the sample was recorded.
in: body
required: true
type: string
measurement_resource_id:
description: |
The UUID of the resource for which the
measurements are taken.
in: body
required: true
type: string
resource:
description: |
in: body
required: true
type: object
resource_id:
description: |
The UUID of the resource.
in: body
required: true
type: string
resouces:
description: |
List of the resources.
in: body
required: true
type: array
resources:query:complex:
description: |
If ``true``, the complex query capability for
resources is available for the configured database back end.
in: body
required: true
type: boolean
resources:query:metadata:
description: |
If ``true``, the simple query capability for the
metadata of resources is available for the configured database
back end.
in: body
required: true
type: boolean
resources:query:simple:
description: |
If ``true``, the simple query capability for
resources is available for the configured database back end.
in: body
required: true
type: boolean
samples:query:complex:
description: |
If ``true``, the complex query capability for
samples is available for the configured database back end.
in: body
required: true
type: boolean
samples:query:metadata:
description: |
If ``true``, the simple query capability for the
metadata of samples is available for the configured database back
end.
in: body
required: true
type: boolean
samples:query:simple:
description: |
If ``true``, the simple query capability for
samples is available for the configured database back end.
in: body
required: true
type: boolean
source:
description: |
The name of the source that identifies where the
sample comes from.
in: body
required: true
type: string
source_1:
description: |
The name of the source from which the meter came.
in: body
required: true
type: string
source_2:
description: |
The name of the source from which the resource
came.
in: body
required: true
type: string
state:
description: |
The state of the alarm.
in: body
required: true
type: string
statistics:aggregation:selectable:avg:
description: |
If ``true``, the ``avg`` capability is available
for the configured database back end. Use the ``avg`` capability
to get average values for samples.
in: body
required: true
type: boolean
statistics:aggregation:selectable:cardinality:
description: |
If ``true``, the ``cardinality`` capability is
available for the configured database back end. Use the
``cardinality`` capability to get cardinality for samples.
in: body
required: true
type: boolean
statistics:aggregation:selectable:count:
description: |
If ``true``, the ``count`` capability is
available for the configured database back end. Use the ``count``
capability to calculate the number of samples for a query.
in: body
required: true
type: boolean
statistics:aggregation:selectable:max:
description: |
If ``true``, the ``max`` capability is available
for the configured database back end. . Use the ``max`` capability
to calculate the maximum value for a query.
in: body
required: true
type: boolean
statistics:aggregation:selectable:min:
description: |
If ``true``, the ``min`` capability is available
for the configured database back end. Use the ``min`` capability
to calculate the minimum value for a query.
in: body
required: true
type: boolean
statistics:aggregation:selectable:quartile:
description: |
If ``true``, the ``quartile`` capability is
available for the configured database back end. Use the
``quartile`` capability to calculate the quartile of sample
volumes for a query.
in: body
required: true
type: boolean
statistics:aggregation:selectable:stddev:
description: |
If ``true``, the ``stddev`` capability is
available for the configured database back end. Use the ``stddev``
capability to calculate the standard deviation of sample volumes
for a query.
in: body
required: true
type: boolean
statistics:aggregation:selectable:sum:
description: |
If ``true``, the ``sum`` capability is available
for the configured database back end. Use the ``sum`` capability
to calculate the sum of sample volumes for a query.
in: body
required: true
type: boolean
statistics:aggregation:standard:
description: |
If ``true``, the ``standard`` set of aggregation
capability is available for the configured database back end.
in: body
required: true
type: boolean
statistics:groupby:
description: |
If ``true``, the ``groupby`` capability is
available for calculating statistics for the configured database
back end.
in: body
required: true
type: boolean
statistics:query:complex:
description: |
If ``true``, the complex query capability for
statistics is available for the configured database back end.
in: body
required: true
type: boolean
statistics:query:metadata:
description: |
If ``true``, the simple query capability for the
sample metadata that is used to calculate statistics is available
for the configured database back end.
in: body
required: true
type: boolean
statistics:query:simple:
description: |
If ``true``, the simple query capability for
statistics is available for the configured database back end.
in: body
required: true
type: boolean
storage:
description: |
If ``true``, the capabilities for the storage
that stores persisting samples is available.
in: body
required: true
type: object
storage:production_ready:
description: |
If ``true``, the database back end is ready to
use in a production environment.
in: body
required: true
type: boolean
sum:
description: |
The total of all of the volume values seen in the
data.
in: body
required: true
type: number
timestamp:
description: |
The date and time in UTC format when the
measurement was made.
in: body
required: true
type: string
timestamp_1:
description: |
The date and time of the alarm.
in: body
required: true
type: string
type:
description: |
The meter type.
in: body
required: true
type: string
type_2:
description: |
The meter type. The type value is gauge, delta,
or cumulative.
in: body
required: true
type: string
unit:
description: |
The unit of measure for the ``volume`` value.
in: body
required: true
type: string
unit_1:
description: |
The unit of measure.
in: body
required: true
type: string
unit_2:
description: |
The unit type of the data set.
in: body
required: true
type: string
user_id:
description: |
The UUID of the user who either created or last
updated the resource.
in: body
required: true
type: string
user_id_1:
description: |
The UUID of the user.
in: body
required: true
type: string
volume:
description: |
The actual measured value.
in: body
required: true
type: number

View File

@ -1,95 +0,0 @@
.. -*- rst -*-
=========
Resources
=========
Lists all and gets information for resources.
List resources
==============
.. rest_method:: GET /v2/resources
Lists definitions for all resources.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- q: q
- meter_links: meter_links
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- resources: resources
- user_id: user_id
- links: links
- resource_id: resource_id
- source: source
- project_id: project_id
- metadata: metadata
Response Example
----------------
.. literalinclude:: ../samples/resources-list-response.json
:language: javascript
Show resource details
=====================
.. rest_method:: GET /v2/resources/{resource_id}
Shows details for a resource, by resource ID.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- resource_id: resource_id_path
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- resource: resource
- user_id: user_id
- links: links
- resource_id: resource_id
- source: source
- project_id: project_id
- metadata: metadata
Response Example
----------------
.. literalinclude:: ../samples/resource-show-response.json
:language: javascript

View File

@ -1,111 +0,0 @@
.. -*- rst -*-
=======
Samples
=======
Lists all samples and gets information for a sample.
For list operations, if you do not explicitly set the ``limit``
query parameter, a default limit is applied. The default limit is
the ``default_api_return_limit`` configuration option value.
Show sample details
===================
.. rest_method:: GET /v2/samples/{sample_id}
Shows details for a sample, by sample ID.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- sample_id: sample_id
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- user_id: user_id
- resource_id: resource_id
- timestamp: timestamp
- meter: meter
- volume: volume
- source: source
- recorded_at: recorded_at
- project_id: project_id
- type: type
- id: id
- unit: unit
- metadata: metadata
Response Example
----------------
.. literalinclude:: ../samples/sample-show-response.json
:language: javascript
List samples
============
.. rest_method:: GET /v2/samples
Lists all known samples, based on the data recorded so far.
Normal response codes: 200
Error response codes:
Request
-------
.. rest_parameters:: parameters.yaml
- q: q
- limit: limit
Response Parameters
-------------------
.. rest_parameters:: parameters.yaml
- user_id: user_id
- resource_id: resource_id
- timestamp: timestamp
- meter: meter
- volume: volume
- source: source
- recorded_at: recorded_at
- project_id: project_id
- type: type
- id: id
- unit: unit
- metadata: metadata
Response Example
----------------
.. literalinclude:: ../samples/samples-list-response.json
:language: javascript

View File

@ -1,24 +0,0 @@
{
"alarm_actions": [
"http://site:8000/alarm"
],
"alarm_id": null,
"combination_rule": null,
"description": "An alarm",
"enabled": true,
"insufficient_data_actions": [
"http://site:8000/nodata"
],
"name": "SwiftObjectAlarm",
"ok_actions": [
"http://site:8000/ok"
],
"project_id": "c96c887c216949acbdfbd8b494863567",
"repeat_actions": false,
"state": "ok",
"state_timestamp": "2013-11-21T12:33:08.486228",
"threshold_rule": null,
"timestamp": "2013-11-21T12:33:08.486221",
"type": "threshold",
"user_id": "c96c887c216949acbdfbd8b494863567"
}

View File

@ -1,25 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<value>
<alarm_actions>
<item>http://site:8000/alarm</item>
</alarm_actions>
<alarm_id nil="true" />
<combination_rule nil="true" />
<description>An alarm</description>
<enabled>true</enabled>
<insufficient_data_actions>
<item>http://site:8000/nodata</item>
</insufficient_data_actions>
<name>SwiftObjectAlarm</name>
<ok_actions>
<item>http://site:8000/ok</item>
</ok_actions>
<project_id>c96c887c216949acbdfbd8b494863567</project_id>
<repeat_actions>false</repeat_actions>
<state>ok</state>
<state_timestamp>2013-11-21T12:33:08.486228</state_timestamp>
<threshold_rule nil="true" />
<timestamp>2013-11-21T12:33:08.486221</timestamp>
<type>threshold</type>
<user_id>c96c887c216949acbdfbd8b494863567</user_id>
</value>

View File

@ -1,26 +0,0 @@
[
{
"alarm_actions": [
"http://site:8000/alarm"
],
"alarm_id": null,
"combination_rule": null,
"description": "An alarm",
"enabled": true,
"insufficient_data_actions": [
"http://site:8000/nodata"
],
"name": "SwiftObjectAlarm",
"ok_actions": [
"http://site:8000/ok"
],
"project_id": "c96c887c216949acbdfbd8b494863567",
"repeat_actions": false,
"state": "ok",
"state_timestamp": "2013-11-21T12:33:08.486228",
"threshold_rule": null,
"timestamp": "2013-11-21T12:33:08.486221",
"type": "threshold",
"user_id": "c96c887c216949acbdfbd8b494863567"
}
]

View File

@ -1,27 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<values>
<value>
<alarm_actions>
<item>http://site:8000/alarm</item>
</alarm_actions>
<alarm_id nil="true" />
<combination_rule nil="true" />
<description>An alarm</description>
<enabled>true</enabled>
<insufficient_data_actions>
<item>http://site:8000/nodata</item>
</insufficient_data_actions>
<name>SwiftObjectAlarm</name>
<ok_actions>
<item>http://site:8000/ok</item>
</ok_actions>
<project_id>c96c887c216949acbdfbd8b494863567</project_id>
<repeat_actions>false</repeat_actions>
<state>ok</state>
<state_timestamp>2013-11-21T12:33:08.486228</state_timestamp>
<threshold_rule nil="true" />
<timestamp>2013-11-21T12:33:08.486221</timestamp>
<type>threshold</type>
<user_id>c96c887c216949acbdfbd8b494863567</user_id>
</value>
</values>

View File

@ -1,40 +0,0 @@
{
"alarm_storage": {
"storage:production_ready": true
},
"api": {
"alarms:history:query:complex": true,
"alarms:history:query:simple": true,
"alarms:query:complex": true,
"alarms:query:simple": true,
"events:query:simple": true,
"meters:query:complex": false,
"meters:query:metadata": true,
"meters:query:simple": true,
"resources:query:complex": false,
"resources:query:metadata": true,
"resources:query:simple": true,
"samples:query:complex": true,
"samples:query:metadata": true,
"samples:query:simple": true,
"statistics:aggregation:selectable:avg": true,
"statistics:aggregation:selectable:cardinality": true,
"statistics:aggregation:selectable:count": true,
"statistics:aggregation:selectable:max": true,
"statistics:aggregation:selectable:min": true,
"statistics:aggregation:selectable:quartile": false,
"statistics:aggregation:selectable:stddev": true,
"statistics:aggregation:selectable:sum": true,
"statistics:aggregation:standard": true,
"statistics:groupby": true,
"statistics:query:complex": false,
"statistics:query:metadata": true,
"statistics:query:simple": true
},
"event_storage": {
"storage:production_ready": true
},
"storage": {
"storage:production_ready": true
}
}

View File

@ -1,131 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<value>
<api>
<item>
<key>statistics:query:complex</key>
<value>false</value>
</item>
<item>
<key>alarms:history:query:simple</key>
<value>true</value>
</item>
<item>
<key>meters:query:metadata</key>
<value>true</value>
</item>
<item>
<key>alarms:query:simple</key>
<value>true</value>
</item>
<item>
<key>resources:query:simple</key>
<value>true</value>
</item>
<item>
<key>statistics:aggregation:selectable:quartile</key>
<value>false</value>
</item>
<item>
<key>statistics:query:simple</key>
<value>true</value>
</item>
<item>
<key>statistics:aggregation:selectable:count</key>
<value>true</value>
</item>
<item>
<key>statistics:aggregation:selectable:min</key>
<value>true</value>
</item>
<item>
<key>statistics:aggregation:selectable:sum</key>
<value>true</value>
</item>
<item>
<key>alarms:query:complex</key>
<value>true</value>
</item>
<item>
<key>meters:query:complex</key>
<value>false</value>
</item>
<item>
<key>statistics:groupby</key>
<value>true</value>
</item>
<item>
<key>alarms:history:query:complex</key>
<value>true</value>
</item>
<item>
<key>meters:query:simple</key>
<value>true</value>
</item>
<item>
<key>samples:query:metadata</key>
<value>true</value>
</item>
<item>
<key>statistics:query:metadata</key>
<value>true</value>
</item>
<item>
<key>samples:query:simple</key>
<value>true</value>
</item>
<item>
<key>resources:query:metadata</key>
<value>true</value>
</item>
<item>
<key>statistics:aggregation:selectable:max</key>
<value>true</value>
</item>
<item>
<key>samples:query:complex</key>
<value>true</value>
</item>
<item>
<key>statistics:aggregation:standard</key>
<value>true</value>
</item>
<item>
<key>events:query:simple</key>
<value>true</value>
</item>
<item>
<key>statistics:aggregation:selectable:stddev</key>
<value>true</value>
</item>
<item>
<key>statistics:aggregation:selectable:avg</key>
<value>true</value>
</item>
<item>
<key>statistics:aggregation:selectable:cardinality</key>
<value>true</value>
</item>
<item>
<key>resources:query:complex</key>
<value>false</value>
</item>
</api>
<storage>
<item>
<key>storage:production_ready</key>
<value>true</value>
</item>
</storage>
<alarm_storage>
<item>
<key>storage:production_ready</key>
<value>true</value>
</item>
</alarm_storage>
<event_storage>
<item>
<key>storage:production_ready</key>
<value>true</value>
</item>
</event_storage>
</value>

View File

@ -1,18 +0,0 @@
{
"raw": {},
"traits": [
{
"type": "string",
"name": "action",
"value": "read"
},
{
"type": "string",
"name": "eventTime",
"value": "2015-10-28T20:26:58.545477+0000"
}
],
"generated": "2015-10-28T20:26:58.546933",
"message_id": "bae43de6-e9fa-44ad-8c15-40a852584444",
"event_type": "http.request"
}

View File

@ -1,20 +0,0 @@
[
{
"raw": {},
"traits": [
{
"type": "string",
"name": "action",
"value": "read"
},
{
"type": "string",
"name": "eventTime",
"value": "2015-10-28T20:26:58.545477+0000"
}
],
"generated": "2015-10-28T20:26:58.546933",
"message_id": "bae43de6-e9fa-44ad-8c15-40a852584444",
"event_type": "http.request"
}
]

View File

@ -1,12 +0,0 @@
[
{
"meter_id": "YmQ5NDMxYzEtOGQ2OS00YWQzLTgwM2EtOGQ0YTZiODlmZDM2K2luc3RhbmNl",
"name": "instance",
"project_id": "35b17138-b364-4e6a-a131-8f3099c5be68",
"resource_id": "bd9431c1-8d69-4ad3-803a-8d4a6b89fd36",
"source": "openstack",
"type": "gauge",
"unit": "instance",
"user_id": "efd87807-12d2-4b38-9c70-5f5c2ac427ff"
}
]

View File

@ -1,13 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<values>
<value>
<name>instance</name>
<type>gauge</type>
<unit>instance</unit>
<resource_id>bd9431c1-8d69-4ad3-803a-8d4a6b89fd36</resource_id>
<project_id>35b17138-b364-4e6a-a131-8f3099c5be68</project_id>
<user_id>efd87807-12d2-4b38-9c70-5f5c2ac427ff</user_id>
<source>openstack</source>
<meter_id>YmQ5NDMxYzEtOGQ2OS00YWQzLTgwM2EtOGQ0YTZiODlmZDM2K2luc3RhbmNl</meter_id>
</value>
</values>

View File

@ -1,20 +0,0 @@
{
"links": [
{
"href": "http://localhost:8777/v2/resources/bd9431c1-8d69-4ad3-803a-8d4a6b89fd36",
"rel": "self"
},
{
"href": "http://localhost:8777/v2/meters/volume?q.field=resource_id&q.value=bd9431c1-8d69-4ad3-803a-8d4a6b89fd36",
"rel": "volume"
}
],
"metadata": {
"name1": "value1",
"name2": "value2"
},
"project_id": "35b17138-b364-4e6a-a131-8f3099c5be68",
"resource_id": "bd9431c1-8d69-4ad3-803a-8d4a6b89fd36",
"source": "openstack",
"user_id": "efd87807-12d2-4b38-9c70-5f5c2ac427ff"
}

View File

@ -1,27 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<value>
<resource_id>bd9431c1-8d69-4ad3-803a-8d4a6b89fd36</resource_id>
<project_id>35b17138-b364-4e6a-a131-8f3099c5be68</project_id>
<user_id>efd87807-12d2-4b38-9c70-5f5c2ac427ff</user_id>
<metadata>
<item>
<key>name2</key>
<value>value2</value>
</item>
<item>
<key>name1</key>
<value>value1</value>
</item>
</metadata>
<links>
<item>
<href>http://localhost:8777/v2/resources/bd9431c1-8d69-4ad3-803a-8d4a6b89fd36</href>
<rel>self</rel>
</item>
<item>
<href>http://localhost:8777/v2/meters/volume?q.field=resource_id&amp;q.value=bd9431c1-8d69-4ad3-803a-8d4a6b89fd36</href>
<rel>volume</rel>
</item>
</links>
<source>openstack</source>
</value>

View File

@ -1,22 +0,0 @@
[
{
"links": [
{
"href": "http://localhost:8777/v2/resources/bd9431c1-8d69-4ad3-803a-8d4a6b89fd36",
"rel": "self"
},
{
"href": "http://localhost:8777/v2/meters/volume?q.field=resource_id&q.value=bd9431c1-8d69-4ad3-803a-8d4a6b89fd36",
"rel": "volume"
}
],
"metadata": {
"name1": "value1",
"name2": "value2"
},
"project_id": "35b17138-b364-4e6a-a131-8f3099c5be68",
"resource_id": "bd9431c1-8d69-4ad3-803a-8d4a6b89fd36",
"source": "openstack",
"user_id": "efd87807-12d2-4b38-9c70-5f5c2ac427ff"
}
]

View File

@ -1,29 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<values>
<value>
<resource_id>bd9431c1-8d69-4ad3-803a-8d4a6b89fd36</resource_id>
<project_id>35b17138-b364-4e6a-a131-8f3099c5be68</project_id>
<user_id>efd87807-12d2-4b38-9c70-5f5c2ac427ff</user_id>
<metadata>
<item>
<key>name2</key>
<value>value2</value>
</item>
<item>
<key>name1</key>
<value>value1</value>
</item>
</metadata>
<links>
<item>
<href>http://localhost:8777/v2/resources/bd9431c1-8d69-4ad3-803a-8d4a6b89fd36</href>
<rel>self</rel>
</item>
<item>
<href>http://localhost:8777/v2/meters/volume?q.field=resource_id&amp;q.value=bd9431c1-8d69-4ad3-803a-8d4a6b89fd36</href>
<rel>volume</rel>
</item>
</links>
<source>openstack</source>
</value>
</values>

View File

@ -1,17 +0,0 @@
{
"id": "8db08c68-bc70-11e4-a8c4-fa163e1d1a9b",
"metadata": {
"name1": "value1",
"name2": "value2"
},
"meter": "instance",
"project_id": "35b17138-b364-4e6a-a131-8f3099c5be68",
"recorded_at": "2015-02-24T22:00:32.747930",
"resource_id": "bd9431c1-8d69-4ad3-803a-8d4a6b89fd36",
"source": "openstack",
"timestamp": "2015-02-24T22:00:32.747930",
"type": "gauge",
"unit": "instance",
"user_id": "efd87807-12d2-4b38-9c70-5f5c2ac427ff",
"volume": 1.0
}

View File

@ -1,23 +0,0 @@
<value>
<id>8db08c68-bc70-11e4-a8c4-fa163e1d1a9b</id>
<meter>instance</meter>
<type>gauge</type>
<unit>instance</unit>
<volume>1.0</volume>
<user_id>efd87807-12d2-4b38-9c70-5f5c2ac427ff</user_id>
<project_id>35b17138-b364-4e6a-a131-8f3099c5be68</project_id>
<resource_id>bd9431c1-8d69-4ad3-803a-8d4a6b89fd36</resource_id>
<source>openstack</source>
<timestamp>2015-02-24T22:00:32.747930</timestamp>
<recorded_at>2015-02-24T22:00:32.747930</recorded_at>
<metadata>
<item>
<key>name2</key>
<value>value2</value>
</item>
<item>
<key>name1</key>
<value>value1</value>
</item>
</metadata>
</value>

View File

@ -1,17 +0,0 @@
{
"id": "9b23b398-6139-11e5-97e9-bc764e045bf6",
"metadata": {
"name1": "value1",
"name2": "value2"
},
"meter": "instance",
"project_id": "35b17138-b364-4e6a-a131-8f3099c5be68",
"recorded_at": "2015-09-22T14:52:54.850725",
"resource_id": "bd9431c1-8d69-4ad3-803a-8d4a6b89fd36",
"source": "openstack",
"timestamp": "2015-09-22T14:52:54.850718",
"type": "gauge",
"unit": "instance",
"user_id": "efd87807-12d2-4b38-9c70-5f5c2ac427ff",
"volume": 1
}

View File

@ -1,24 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<value>
<id>9b23b398-6139-11e5-97e9-bc764e045bf6</id>
<meter>instance</meter>
<type>gauge</type>
<unit>instance</unit>
<volume>1.0</volume>
<user_id>efd87807-12d2-4b38-9c70-5f5c2ac427ff</user_id>
<project_id>35b17138-b364-4e6a-a131-8f3099c5be68</project_id>
<resource_id>bd9431c1-8d69-4ad3-803a-8d4a6b89fd36</resource_id>
<source>openstack</source>
<timestamp>2015-09-22T14:52:54.850718</timestamp>
<recorded_at>2015-09-22T14:52:54.850725</recorded_at>
<metadata>
<item>
<key>name2</key>
<value>value2</value>
</item>
<item>
<key>name1</key>
<value>value1</value>
</item>
</metadata>
</value>

View File

@ -1,19 +0,0 @@
[
{
"id": "9b23b398-6139-11e5-97e9-bc764e045bf6",
"metadata": {
"name1": "value1",
"name2": "value2"
},
"meter": "instance",
"project_id": "35b17138-b364-4e6a-a131-8f3099c5be68",
"recorded_at": "2015-09-22T14:52:54.850725",
"resource_id": "bd9431c1-8d69-4ad3-803a-8d4a6b89fd36",
"source": "openstack",
"timestamp": "2015-09-22T14:52:54.850718",
"type": "gauge",
"unit": "instance",
"user_id": "efd87807-12d2-4b38-9c70-5f5c2ac427ff",
"volume": 1
}
]

View File

@ -1,26 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<values>
<value>
<id>9b23b398-6139-11e5-97e9-bc764e045bf6</id>
<meter>instance</meter>
<type>gauge</type>
<unit>instance</unit>
<volume>1.0</volume>
<user_id>efd87807-12d2-4b38-9c70-5f5c2ac427ff</user_id>
<project_id>35b17138-b364-4e6a-a131-8f3099c5be68</project_id>
<resource_id>bd9431c1-8d69-4ad3-803a-8d4a6b89fd36</resource_id>
<source>openstack</source>
<timestamp>2015-09-22T14:52:54.850718</timestamp>
<recorded_at>2015-09-22T14:52:54.850725</recorded_at>
<metadata>
<item>
<key>name2</key>
<value>value2</value>
</item>
<item>
<key>name1</key>
<value>value1</value>
</item>
</metadata>
</value>
</values>

View File

@ -1,16 +0,0 @@
[
{
"avg": 4.5,
"count": 10,
"duration": 300,
"duration_end": "2013-01-04T16:47:00",
"duration_start": "2013-01-04T16:42:00",
"max": 9,
"min": 1,
"period": 7200,
"period_end": "2013-01-04T18:00:00",
"period_start": "2013-01-04T16:00:00",
"sum": 45,
"unit": "GiB"
}
]

View File

@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<values>
<value>
<avg>4.5</avg>
<count>10</count>
<duration>300.0</duration>
<duration_end>2013-01-04T16:47:00</duration_end>
<duration_start>2013-01-04T16:42:00</duration_start>
<max>9.0</max>
<min>1.0</min>
<period>7200</period>
<period_end>2013-01-04T18:00:00</period_end>
<period_start>2013-01-04T16:00:00</period_start>
<sum>45.0</sum>
<unit>GiB</unit>
</value>
</values>

View File

@ -14,7 +14,4 @@
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501
pass

View File

@ -1,112 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2015-2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
from oslo_config import cfg
from oslo_log import log
from paste import deploy
import pecan
from ceilometer.api import hooks
from ceilometer.api import middleware
LOG = log.getLogger(__name__)
OPTS = [
cfg.StrOpt('api_paste_config',
default="api_paste.ini",
help="Configuration file for WSGI definition of API."
),
]
API_OPTS = [
cfg.IntOpt('default_api_return_limit',
min=1,
default=100,
help='Default maximum number of items returned by API request.'
),
]
def setup_app(pecan_config=None, conf=None):
if conf is None:
raise RuntimeError("No configuration passed")
# FIXME: Replace DBHook with a hooks.TransactionHook
app_hooks = [hooks.ConfigHook(conf),
hooks.DBHook(conf),
hooks.NotifierHook(conf),
hooks.TranslationHook()]
pecan_config = pecan_config or {
"app": {
'root': 'ceilometer.api.controllers.root.RootController',
'modules': ['ceilometer.api'],
}
}
pecan.configuration.set_config(dict(pecan_config), overwrite=True)
app = pecan.make_app(
pecan_config['app']['root'],
hooks=app_hooks,
wrap_app=middleware.ParsableErrorMiddleware,
guess_content_type_from_ext=False
)
return app
# NOTE(sileht): pastedeploy uses ConfigParser to handle
# global_conf, since python 3 ConfigParser doesn't
# allow to store object as config value, only strings are
# permit, so to be able to pass an object created before paste load
# the app, we store them into a global var. But the each loaded app
# store it's configuration in unique key to be concurrency safe.
global APPCONFIGS
APPCONFIGS = {}
def load_app(conf):
global APPCONFIGS
# Build the WSGI app
cfg_file = None
cfg_path = conf.api_paste_config
if not os.path.isabs(cfg_path):
cfg_file = conf.find_file(cfg_path)
elif os.path.exists(cfg_path):
cfg_file = cfg_path
if not cfg_file:
raise cfg.ConfigFilesNotFoundError([conf.api_paste_config])
configkey = str(uuid.uuid4())
APPCONFIGS[configkey] = conf
LOG.info("Full WSGI config used: %s", cfg_file)
LOG.warning("Note: Ceilometer API is deprecated; use APIs from Aodh"
" (alarms), Gnocchi (metrics) and/or Panko (events).")
return deploy.loadapp("config:" + cfg_file,
global_conf={'configkey': configkey})
def app_factory(global_config, **local_conf):
global APPCONFIGS
conf = APPCONFIGS.get(global_config.get('configkey'))
return setup_app(conf=conf)

View File

@ -1,25 +0,0 @@
# -*- mode: python -*-
#
# Copyright 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Use this file for deploying the API under mod_wsgi.
See http://pecan.readthedocs.org/en/latest/deployment.html for details.
"""
from ceilometer import service
from ceilometer.api import app
# Initialize the oslo configuration library and logging
conf = service.prepare_service([])
application = app.load_app(conf)

View File

@ -1,56 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from ceilometer.api.controllers.v2 import root as v2
MEDIA_TYPE_JSON = 'application/vnd.openstack.telemetry-%s+json'
MEDIA_TYPE_XML = 'application/vnd.openstack.telemetry-%s+xml'
class RootController(object):
def __init__(self):
self.v2 = v2.V2Controller()
@pecan.expose('json')
def index(self):
base_url = pecan.request.application_url
available = [{'tag': 'v2', 'date': '2013-02-13T00:00:00Z', }]
collected = [version_descriptor(base_url, v['tag'], v['date'])
for v in available]
versions = {'versions': {'values': collected}}
return versions
def version_descriptor(base_url, version, released_on):
url = version_url(base_url, version)
return {
'id': version,
'links': [
{'href': url, 'rel': 'self', },
{'href': 'http://docs.openstack.org/',
'rel': 'describedby', 'type': 'text/html', }],
'media-types': [
{'base': 'application/json', 'type': MEDIA_TYPE_JSON % version, },
{'base': 'application/xml', 'type': MEDIA_TYPE_XML % version, }],
'status': 'stable',
'updated': released_on,
}
def version_url(base_url, version_number):
return '%s/%s' % (base_url, version_number)

View File

@ -1,222 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance <licensing@enovance.com>
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import datetime
import functools
import inspect
import json
from oslo_utils import strutils
from oslo_utils import timeutils
import pecan
import six
import wsme
from wsme import types as wtypes
from ceilometer.i18n import _
operation_kind = ('lt', 'le', 'eq', 'ne', 'ge', 'gt')
operation_kind_enum = wtypes.Enum(str, *operation_kind)
class ClientSideError(wsme.exc.ClientSideError):
def __init__(self, error, status_code=400):
pecan.response.translatable_error = error
super(ClientSideError, self).__init__(error, status_code)
class EntityNotFound(ClientSideError):
def __init__(self, entity, id):
super(EntityNotFound, self).__init__(
_("%(entity)s %(id)s Not Found") % {'entity': entity,
'id': id},
status_code=404)
class ProjectNotAuthorized(ClientSideError):
def __init__(self, id, aspect='project'):
params = dict(aspect=aspect, id=id)
super(ProjectNotAuthorized, self).__init__(
_("Not Authorized to access %(aspect)s %(id)s") % params,
status_code=401)
class Base(wtypes.DynamicBase):
@classmethod
def from_db_model(cls, m):
return cls(**(m.as_dict()))
@classmethod
def from_db_and_links(cls, m, links):
return cls(links=links, **(m.as_dict()))
def as_dict(self, db_model):
valid_keys = inspect.getargspec(db_model.__init__)[0]
if 'self' in valid_keys:
valid_keys.remove('self')
return self.as_dict_from_keys(valid_keys)
def as_dict_from_keys(self, keys):
return dict((k, getattr(self, k))
for k in keys
if hasattr(self, k) and
getattr(self, k) != wsme.Unset)
class Link(Base):
"""A link representation."""
href = wtypes.text
"The url of a link"
rel = wtypes.text
"The name of a link"
@classmethod
def sample(cls):
return cls(href=('http://localhost:8777/v2/meters/volume?'
'q.field=resource_id&'
'q.value=bd9431c1-8d69-4ad3-803a-8d4a6b89fd36'),
rel='volume'
)
class Query(Base):
"""Query filter."""
# The data types supported by the query.
_supported_types = ['integer', 'float', 'string', 'boolean', 'datetime']
# Functions to convert the data field to the correct type.
_type_converters = {'integer': int,
'float': float,
'boolean': functools.partial(
strutils.bool_from_string, strict=True),
'string': six.text_type,
'datetime': timeutils.parse_isotime}
_op = None # provide a default
def get_op(self):
return self._op or 'eq'
def set_op(self, value):
self._op = value
field = wsme.wsattr(wtypes.text, mandatory=True)
"The name of the field to test"
# op = wsme.wsattr(operation_kind, default='eq')
# this ^ doesn't seem to work.
op = wsme.wsproperty(operation_kind_enum, get_op, set_op)
"The comparison operator. Defaults to 'eq'."
value = wsme.wsattr(wtypes.text, mandatory=True)
"The value to compare against the stored data"
type = wtypes.text
"The data type of value to compare against the stored data"
def __repr__(self):
# for logging calls
return '<Query %r %s %r %s>' % (self.field,
self.op,
self.value,
self.type)
@classmethod
def sample(cls):
return cls(field='resource_id',
op='eq',
value='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
type='string'
)
def as_dict(self):
return self.as_dict_from_keys(['field', 'op', 'type', 'value'])
def _get_value_as_type(self, forced_type=None):
"""Convert metadata value to the specified data type.
This method is called during metadata query to help convert the
querying metadata to the data type specified by user. If there is no
data type given, the metadata will be parsed by ast.literal_eval to
try to do a smart converting.
NOTE (flwang) Using "_" as prefix to avoid an InvocationError raised
from wsmeext/sphinxext.py. It's OK to call it outside the Query class.
Because the "public" side of that class is actually the outside of the
API, and the "private" side is the API implementation. The method is
only used in the API implementation, so it's OK.
:returns: metadata value converted with the specified data type.
"""
type = forced_type or self.type
try:
converted_value = self.value
if not type:
try:
converted_value = ast.literal_eval(self.value)
except (ValueError, SyntaxError):
# Unable to convert the metadata value automatically
# let it default to self.value
pass
else:
if type not in self._supported_types:
# Types must be explicitly declared so the
# correct type converter may be used. Subclasses
# of Query may define _supported_types and
# _type_converters to define their own types.
raise TypeError()
converted_value = self._type_converters[type](self.value)
if isinstance(converted_value, datetime.datetime):
converted_value = timeutils.normalize_time(converted_value)
except ValueError:
msg = (_('Unable to convert the value %(value)s'
' to the expected data type %(type)s.') %
{'value': self.value, 'type': type})
raise ClientSideError(msg)
except TypeError:
msg = (_('The data type %(type)s is not supported. The supported'
' data type list is: %(supported)s') %
{'type': type, 'supported': self._supported_types})
raise ClientSideError(msg)
except Exception:
msg = (_('Unexpected exception converting %(value)s to'
' the expected data type %(type)s.') %
{'value': self.value, 'type': type})
raise ClientSideError(msg)
return converted_value
class JsonType(wtypes.UserType):
"""A simple JSON type."""
basetype = wtypes.text
name = 'json'
@staticmethod
def validate(value):
# check that value can be serialised
json.dumps(value)
return value

View File

@ -1,90 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance <licensing@enovance.com>
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from ceilometer.api.controllers.v2 import base
from ceilometer import utils
def _flatten_capabilities(capabilities):
return dict((k, v) for k, v in utils.recursive_keypairs(capabilities))
class Capabilities(base.Base):
"""A representation of the API and storage capabilities.
Usually constrained by restrictions imposed by the storage driver.
"""
api = {wtypes.text: bool}
"A flattened dictionary of API capabilities"
storage = {wtypes.text: bool}
"A flattened dictionary of storage capabilities"
@classmethod
def sample(cls):
return cls(
api=_flatten_capabilities({
'meters': {'query': {'simple': True,
'metadata': True}},
'resources': {'query': {'simple': True,
'metadata': True}},
'samples': {'query': {'simple': True,
'metadata': True,
'complex': True}},
'statistics': {'groupby': True,
'query': {'simple': True,
'metadata': True},
'aggregation': {'standard': True,
'selectable': {
'max': True,
'min': True,
'sum': True,
'avg': True,
'count': True,
'stddev': True,
'cardinality': True,
'quartile': False}}},
}),
storage=_flatten_capabilities(
{'storage': {'production_ready': True}}),
)
class CapabilitiesController(rest.RestController):
"""Manages capabilities queries."""
@wsme_pecan.wsexpose(Capabilities)
def get(self):
"""Returns a flattened dictionary of API capabilities.
Capabilities supported by the currently configured storage driver.
"""
# variation in API capabilities is effectively determined by
# the lack of strict feature parity across storage drivers
conn = pecan.request.storage_conn
driver_capabilities = conn.get_capabilities().copy()
driver_perf = conn.get_storage_capabilities()
return Capabilities(api=_flatten_capabilities(driver_capabilities),
storage=_flatten_capabilities(driver_perf))

View File

@ -1,505 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance <licensing@enovance.com>
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import datetime
from oslo_log import log
from oslo_utils import strutils
from oslo_utils import timeutils
import pecan
from pecan import rest
import six
import wsme
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from ceilometer.api.controllers.v2 import base
from ceilometer.api.controllers.v2 import utils as v2_utils
from ceilometer.api import rbac
from ceilometer.i18n import _
from ceilometer.publisher import utils as publisher_utils
from ceilometer import sample
from ceilometer import storage
from ceilometer.storage import base as storage_base
from ceilometer import utils
LOG = log.getLogger(__name__)
class OldSample(base.Base):
"""A single measurement for a given meter and resource.
This class is deprecated in favor of Sample.
"""
source = wtypes.text
"The ID of the source that identifies where the sample comes from"
counter_name = wsme.wsattr(wtypes.text, mandatory=True)
"The name of the meter"
# FIXME(dhellmann): Make this meter_name?
counter_type = wsme.wsattr(wtypes.text, mandatory=True)
"The type of the meter (see :ref:`measurements`)"
# FIXME(dhellmann): Make this meter_type?
counter_unit = wsme.wsattr(wtypes.text, mandatory=True)
"The unit of measure for the value in counter_volume"
# FIXME(dhellmann): Make this meter_unit?
counter_volume = wsme.wsattr(float, mandatory=True)
"The actual measured value"
user_id = wtypes.text
"The ID of the user who last triggered an update to the resource"
project_id = wtypes.text
"The ID of the project or tenant that owns the resource"
resource_id = wsme.wsattr(wtypes.text, mandatory=True)
"The ID of the :class:`Resource` for which the measurements are taken"
timestamp = datetime.datetime
"UTC date and time when the measurement was made"
recorded_at = datetime.datetime
"When the sample has been recorded."
resource_metadata = {wtypes.text: wtypes.text}
"Arbitrary metadata associated with the resource"
message_id = wtypes.text
"A unique identifier for the sample"
def __init__(self, counter_volume=None, resource_metadata=None,
timestamp=None, **kwds):
resource_metadata = resource_metadata or {}
if counter_volume is not None:
counter_volume = float(counter_volume)
resource_metadata = v2_utils.flatten_metadata(resource_metadata)
# this is to make it easier for clients to pass a timestamp in
if timestamp and isinstance(timestamp, six.string_types):
timestamp = timeutils.parse_isotime(timestamp)
super(OldSample, self).__init__(counter_volume=counter_volume,
resource_metadata=resource_metadata,
timestamp=timestamp, **kwds)
if self.resource_metadata in (wtypes.Unset, None):
self.resource_metadata = {}
@classmethod
def sample(cls):
return cls(source='openstack',
counter_name='instance',
counter_type='gauge',
counter_unit='instance',
counter_volume=1,
resource_id='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
project_id='35b17138-b364-4e6a-a131-8f3099c5be68',
user_id='efd87807-12d2-4b38-9c70-5f5c2ac427ff',
recorded_at=datetime.datetime(2015, 1, 1, 12, 0, 0, 0),
timestamp=datetime.datetime(2015, 1, 1, 12, 0, 0, 0),
resource_metadata={'name1': 'value1',
'name2': 'value2'},
message_id='5460acce-4fd6-480d-ab18-9735ec7b1996',
)
class Statistics(base.Base):
"""Computed statistics for a query."""
groupby = {wtypes.text: wtypes.text}
"Dictionary of field names for group, if groupby statistics are requested"
unit = wtypes.text
"The unit type of the data set"
min = float
"The minimum volume seen in the data"
max = float
"The maximum volume seen in the data"
avg = float
"The average of all of the volume values seen in the data"
sum = float
"The total of all of the volume values seen in the data"
count = int
"The number of samples seen"
aggregate = {wtypes.text: float}
"The selectable aggregate value(s)"
duration = float
"The difference, in seconds, between the oldest and newest timestamp"
duration_start = datetime.datetime
"UTC date and time of the earliest timestamp, or the query start time"
duration_end = datetime.datetime
"UTC date and time of the oldest timestamp, or the query end time"
period = int
"The difference, in seconds, between the period start and end"
period_start = datetime.datetime
"UTC date and time of the period start"
period_end = datetime.datetime
"UTC date and time of the period end"
def __init__(self, start_timestamp=None, end_timestamp=None, **kwds):
super(Statistics, self).__init__(**kwds)
self._update_duration(start_timestamp, end_timestamp)
def _update_duration(self, start_timestamp, end_timestamp):
# "Clamp" the timestamps we return to the original time
# range, excluding the offset.
if (start_timestamp and
self.duration_start and
self.duration_start < start_timestamp):
self.duration_start = start_timestamp
LOG.debug('clamping min timestamp to range')
if (end_timestamp and
self.duration_end and
self.duration_end > end_timestamp):
self.duration_end = end_timestamp
LOG.debug('clamping max timestamp to range')
# If we got valid timestamps back, compute a duration in seconds.
#
# If the min > max after clamping then we know the
# timestamps on the samples fell outside of the time
# range we care about for the query, so treat them as
# "invalid."
#
# If the timestamps are invalid, return None as a
# sentinel indicating that there is something "funny"
# about the range.
if (self.duration_start and
self.duration_end and
self.duration_start <= self.duration_end):
self.duration = timeutils.delta_seconds(self.duration_start,
self.duration_end)
else:
self.duration_start = self.duration_end = self.duration = None
@classmethod
def sample(cls):
return cls(unit='GiB',
min=1,
max=9,
avg=4.5,
sum=45,
count=10,
duration_start=datetime.datetime(2013, 1, 4, 16, 42),
duration_end=datetime.datetime(2013, 1, 4, 16, 47),
period=7200,
period_start=datetime.datetime(2013, 1, 4, 16, 00),
period_end=datetime.datetime(2013, 1, 4, 18, 00),
)
class Aggregate(base.Base):
func = wsme.wsattr(wtypes.text, mandatory=True)
"The aggregation function name"
param = wsme.wsattr(wtypes.text, default=None)
"The paramter to the aggregation function"
def __init__(self, **kwargs):
super(Aggregate, self).__init__(**kwargs)
@staticmethod
def validate(aggregate):
valid_agg = (storage_base.Connection.CAPABILITIES.get('statistics', {})
.get('aggregation', {}).get('selectable', {}).keys())
if aggregate.func not in valid_agg:
msg = _('Invalid aggregation function: %s') % aggregate.func
raise base.ClientSideError(msg)
return aggregate
@classmethod
def sample(cls):
return cls(func='cardinality',
param='resource_id')
def _validate_groupby_fields(groupby_fields):
"""Checks that the list of groupby fields from request is valid.
If all fields are valid, returns fields with duplicates removed.
"""
# NOTE(terriyu): Currently, metadata fields are supported in our
# group by statistics implementation only for mongodb
valid_fields = set(['user_id', 'resource_id', 'project_id', 'source',
'resource_metadata.instance_type'])
invalid_fields = set(groupby_fields) - valid_fields
if invalid_fields:
raise wsme.exc.UnknownArgument(invalid_fields,
"Invalid groupby fields")
# Remove duplicate fields
# NOTE(terriyu): This assumes that we don't care about the order of the
# group by fields.
return list(set(groupby_fields))
class MeterController(rest.RestController):
"""Manages operations on a single meter."""
_custom_actions = {
'statistics': ['GET'],
}
def __init__(self, meter_name):
pecan.request.context['meter_name'] = meter_name
self.meter_name = meter_name
@wsme_pecan.wsexpose([OldSample], [base.Query], int)
def get_all(self, q=None, limit=None):
"""Return samples for the meter.
:param q: Filter rules for the data to be returned.
:param limit: Maximum number of samples to return.
"""
rbac.enforce('get_samples', pecan.request)
q = q or []
limit = v2_utils.enforce_limit(limit)
kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__)
kwargs['meter'] = self.meter_name
f = storage.SampleFilter(**kwargs)
return [OldSample.from_db_model(e)
for e in pecan.request.storage_conn.get_samples(f, limit=limit)
]
@wsme_pecan.wsexpose([OldSample], str, body=[OldSample], status_code=201)
def post(self, direct='', samples=None):
"""Post a list of new Samples to Telemetry.
:param direct: a flag indicates whether the samples will be posted
directly to storage or not.
:param samples: a list of samples within the request body.
"""
rbac.enforce('create_samples', pecan.request)
direct = strutils.bool_from_string(direct)
if not samples:
msg = _('Samples should be included in request body')
raise base.ClientSideError(msg)
now = timeutils.utcnow()
auth_project = rbac.get_limited_to_project(pecan.request.headers)
def_source = pecan.request.cfg.sample_source
def_project_id = pecan.request.headers.get('X-Project-Id')
def_user_id = pecan.request.headers.get('X-User-Id')
published_samples = []
for s in samples:
if self.meter_name != s.counter_name:
raise wsme.exc.InvalidInput('counter_name', s.counter_name,
'should be %s' % self.meter_name)
if s.message_id:
raise wsme.exc.InvalidInput('message_id', s.message_id,
'The message_id must not be set')
if s.counter_type not in sample.TYPES:
raise wsme.exc.InvalidInput('counter_type', s.counter_type,
'The counter type must be: ' +
', '.join(sample.TYPES))
s.user_id = (s.user_id or def_user_id)
s.project_id = (s.project_id or def_project_id)
s.source = '%s:%s' % (s.project_id, (s.source or def_source))
s.timestamp = (s.timestamp or now)
if auth_project and auth_project != s.project_id:
# non admin user trying to cross post to another project_id
auth_msg = 'can not post samples to other projects'
raise wsme.exc.InvalidInput('project_id', s.project_id,
auth_msg)
published_sample = sample.Sample(
name=s.counter_name,
type=s.counter_type,
unit=s.counter_unit,
volume=s.counter_volume,
user_id=s.user_id,
project_id=s.project_id,
resource_id=s.resource_id,
timestamp=s.timestamp.isoformat(),
resource_metadata=utils.restore_nesting(s.resource_metadata,
separator='.'),
source=s.source)
s.message_id = published_sample.id
sample_dict = publisher_utils.meter_message_from_counter(
published_sample,
pecan.request.cfg.publisher.telemetry_secret)
if direct:
ts = timeutils.parse_isotime(sample_dict['timestamp'])
sample_dict['timestamp'] = timeutils.normalize_time(ts)
pecan.request.storage_conn.record_metering_data(sample_dict)
else:
published_samples.append(sample_dict)
if not direct:
pecan.request.notifier.sample(
{'user': def_user_id,
'tenant': def_project_id,
'is_admin': True},
'telemetry.api',
{'samples': published_samples})
return samples
@wsme_pecan.wsexpose([Statistics],
[base.Query], [six.text_type], int, [Aggregate])
def statistics(self, q=None, groupby=None, period=None, aggregate=None):
"""Computes the statistics of the samples in the time range given.
:param q: Filter rules for the data to be returned.
:param groupby: Fields for group by aggregation
:param period: Returned result will be an array of statistics for a
period long of that number of seconds.
:param aggregate: The selectable aggregation functions to be applied.
"""
rbac.enforce('compute_statistics', pecan.request)
q = q or []
groupby = groupby or []
aggregate = aggregate or []
if period and period < 0:
raise base.ClientSideError(_("Period must be positive."))
kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__)
kwargs['meter'] = self.meter_name
f = storage.SampleFilter(**kwargs)
g = _validate_groupby_fields(groupby)
aggregate = utils.uniq(aggregate, ['func', 'param'])
# Find the original timestamp in the query to use for clamping
# the duration returned in the statistics.
start = end = None
for i in q:
if i.field == 'timestamp' and i.op in ('lt', 'le'):
end = timeutils.parse_isotime(i.value).replace(
tzinfo=None)
elif i.field == 'timestamp' and i.op in ('gt', 'ge'):
start = timeutils.parse_isotime(i.value).replace(
tzinfo=None)
try:
computed = pecan.request.storage_conn.get_meter_statistics(
f, period, g, aggregate)
return [Statistics(start_timestamp=start,
end_timestamp=end,
**c.as_dict())
for c in computed]
except OverflowError as e:
params = dict(period=period, err=e)
raise base.ClientSideError(
_("Invalid period %(period)s: %(err)s") % params)
class Meter(base.Base):
"""One category of measurements."""
name = wtypes.text
"The unique name for the meter"
type = wtypes.Enum(str, *sample.TYPES)
"The meter type (see :ref:`measurements`)"
unit = wtypes.text
"The unit of measure"
resource_id = wtypes.text
"The ID of the :class:`Resource` for which the measurements are taken"
project_id = wtypes.text
"The ID of the project or tenant that owns the resource"
user_id = wtypes.text
"The ID of the user who last triggered an update to the resource"
source = wtypes.text
"The ID of the source that identifies where the meter comes from"
meter_id = wtypes.text
"The unique identifier for the meter"
def __init__(self, **kwargs):
meter_id = '%s+%s' % (kwargs['resource_id'], kwargs['name'])
# meter_id is of type Unicode but base64.encodestring() only accepts
# strings. See bug #1333177
meter_id = base64.b64encode(meter_id.encode('utf-8'))
kwargs['meter_id'] = meter_id
super(Meter, self).__init__(**kwargs)
@classmethod
def sample(cls):
return cls(name='instance',
type='gauge',
unit='instance',
resource_id='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
project_id='35b17138-b364-4e6a-a131-8f3099c5be68',
user_id='efd87807-12d2-4b38-9c70-5f5c2ac427ff',
source='openstack',
)
class MetersController(rest.RestController):
"""Works on meters."""
@pecan.expose()
def _lookup(self, meter_name, *remainder):
return MeterController(meter_name), remainder
@wsme_pecan.wsexpose([Meter], [base.Query], int, str)
def get_all(self, q=None, limit=None, unique=''):
"""Return all known meters, based on the data recorded so far.
:param q: Filter rules for the meters to be returned.
:param unique: flag to indicate unique meters to be returned.
"""
rbac.enforce('get_meters', pecan.request)
q = q or []
# Timestamp field is not supported for Meter queries
limit = v2_utils.enforce_limit(limit)
kwargs = v2_utils.query_to_kwargs(
q, pecan.request.storage_conn.get_meters,
['limit'], allow_timestamps=False)
return [Meter.from_db_model(m)
for m in pecan.request.storage_conn.get_meters(
limit=limit, unique=strutils.bool_from_string(unique),
**kwargs)]

View File

@ -1,359 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance <licensing@enovance.com>
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import jsonschema
from oslo_log import log
from oslo_utils import timeutils
import pecan
from pecan import rest
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from ceilometer.api.controllers.v2 import base
from ceilometer.api.controllers.v2 import samples
from ceilometer.api.controllers.v2 import utils as v2_utils
from ceilometer.api import rbac
from ceilometer.i18n import _
from ceilometer import storage
from ceilometer import utils
LOG = log.getLogger(__name__)
class ComplexQuery(base.Base):
"""Holds a sample query encoded in json."""
filter = wtypes.text
"The filter expression encoded in json."
orderby = wtypes.text
"List of single-element dicts for specifying the ordering of the results."
limit = int
"The maximum number of results to be returned."
@classmethod
def sample(cls):
return cls(filter='{"and": [{"and": [{"=": ' +
'{"counter_name": "cpu_util"}}, ' +
'{">": {"counter_volume": 0.23}}, ' +
'{"<": {"counter_volume": 0.26}}]}, ' +
'{"or": [{"and": [{">": ' +
'{"timestamp": "2013-12-01T18:00:00"}}, ' +
'{"<": ' +
'{"timestamp": "2013-12-01T18:15:00"}}]}, ' +
'{"and": [{">": ' +
'{"timestamp": "2013-12-01T18:30:00"}}, ' +
'{"<": ' +
'{"timestamp": "2013-12-01T18:45:00"}}]}]}]}',
orderby='[{"counter_volume": "ASC"}, ' +
'{"timestamp": "DESC"}]',
limit=42
)
def _list_to_regexp(items, regexp_prefix=""):
regexp = ["^%s$" % item for item in items]
regexp = regexp_prefix + "|".join(regexp)
return regexp
class ValidatedComplexQuery(object):
complex_operators = ["and", "or"]
order_directions = ["asc", "desc"]
simple_ops = ["=", "!=", "<", ">", "<=", "=<", ">=", "=>", "=~"]
regexp_prefix = "(?i)"
complex_ops = _list_to_regexp(complex_operators, regexp_prefix)
simple_ops = _list_to_regexp(simple_ops, regexp_prefix)
order_directions = _list_to_regexp(order_directions, regexp_prefix)
timestamp_fields = ["timestamp", "state_timestamp"]
def __init__(self, query, db_model, additional_name_mapping=None,
metadata_allowed=False):
additional_name_mapping = additional_name_mapping or {}
self.name_mapping = {"user": "user_id",
"project": "project_id"}
self.name_mapping.update(additional_name_mapping)
valid_keys = db_model.get_field_names()
valid_keys = list(valid_keys) + list(self.name_mapping.keys())
valid_fields = _list_to_regexp(valid_keys)
if metadata_allowed:
valid_filter_fields = valid_fields + "|^metadata\.[\S]+$"
else:
valid_filter_fields = valid_fields
schema_value = {
"oneOf": [{"type": "string"},
{"type": "number"},
{"type": "boolean"}],
"minProperties": 1,
"maxProperties": 1}
schema_value_in = {
"type": "array",
"items": {"oneOf": [{"type": "string"},
{"type": "number"}]},
"minItems": 1}
schema_field = {
"type": "object",
"patternProperties": {valid_filter_fields: schema_value},
"additionalProperties": False,
"minProperties": 1,
"maxProperties": 1}
schema_field_in = {
"type": "object",
"patternProperties": {valid_filter_fields: schema_value_in},
"additionalProperties": False,
"minProperties": 1,
"maxProperties": 1}
schema_leaf_in = {
"type": "object",
"patternProperties": {"(?i)^in$": schema_field_in},
"additionalProperties": False,
"minProperties": 1,
"maxProperties": 1}
schema_leaf_simple_ops = {
"type": "object",
"patternProperties": {self.simple_ops: schema_field},
"additionalProperties": False,
"minProperties": 1,
"maxProperties": 1}
schema_and_or_array = {
"type": "array",
"items": {"$ref": "#"},
"minItems": 2}
schema_and_or = {
"type": "object",
"patternProperties": {self.complex_ops: schema_and_or_array},
"additionalProperties": False,
"minProperties": 1,
"maxProperties": 1}
schema_not = {
"type": "object",
"patternProperties": {"(?i)^not$": {"$ref": "#"}},
"additionalProperties": False,
"minProperties": 1,
"maxProperties": 1}
self.schema = {
"oneOf": [{"$ref": "#/definitions/leaf_simple_ops"},
{"$ref": "#/definitions/leaf_in"},
{"$ref": "#/definitions/and_or"},
{"$ref": "#/definitions/not"}],
"minProperties": 1,
"maxProperties": 1,
"definitions": {"leaf_simple_ops": schema_leaf_simple_ops,
"leaf_in": schema_leaf_in,
"and_or": schema_and_or,
"not": schema_not}}
self.orderby_schema = {
"type": "array",
"items": {
"type": "object",
"patternProperties":
{valid_fields:
{"type": "string",
"pattern": self.order_directions}},
"additionalProperties": False,
"minProperties": 1,
"maxProperties": 1}}
self.original_query = query
def validate(self, visibility_field):
"""Validates the query content and does the necessary conversions."""
if self.original_query.filter is wtypes.Unset:
self.filter_expr = None
else:
try:
self.filter_expr = json.loads(self.original_query.filter)
self._validate_filter(self.filter_expr)
except (ValueError, jsonschema.exceptions.ValidationError) as e:
raise base.ClientSideError(
_("Filter expression not valid: %s") % e)
self._replace_isotime_with_datetime(self.filter_expr)
self._convert_operator_to_lower_case(self.filter_expr)
self._normalize_field_names_for_db_model(self.filter_expr)
self._force_visibility(visibility_field)
if self.original_query.orderby is wtypes.Unset:
self.orderby = None
else:
try:
self.orderby = json.loads(self.original_query.orderby)
self._validate_orderby(self.orderby)
except (ValueError, jsonschema.exceptions.ValidationError) as e:
raise base.ClientSideError(
_("Order-by expression not valid: %s") % e)
self._convert_orderby_to_lower_case(self.orderby)
self._normalize_field_names_in_orderby(self.orderby)
self.limit = (None if self.original_query.limit is wtypes.Unset
else self.original_query.limit)
self.limit = v2_utils.enforce_limit(self.limit)
@staticmethod
def _convert_orderby_to_lower_case(orderby):
for orderby_field in orderby:
utils.lowercase_values(orderby_field)
def _normalize_field_names_in_orderby(self, orderby):
for orderby_field in orderby:
self._replace_field_names(orderby_field)
def _traverse_postorder(self, tree, visitor):
op = list(tree.keys())[0]
if op.lower() in self.complex_operators:
for i, operand in enumerate(tree[op]):
self._traverse_postorder(operand, visitor)
if op.lower() == "not":
self._traverse_postorder(tree[op], visitor)
visitor(tree)
def _check_cross_project_references(self, own_project_id,
visibility_field):
"""Do not allow other than own_project_id."""
def check_project_id(subfilter):
op, value = list(subfilter.items())[0]
if (op.lower() not in self.complex_operators
and list(value.keys())[0] == visibility_field
and value[visibility_field] != own_project_id):
raise base.ProjectNotAuthorized(value[visibility_field])
self._traverse_postorder(self.filter_expr, check_project_id)
def _force_visibility(self, visibility_field):
"""Force visibility field.
If the tenant is not admin insert an extra
"and <visibility_field>=<tenant's project_id>" clause to the query.
"""
authorized_project = rbac.get_limited_to_project(pecan.request.headers)
is_admin = authorized_project is None
if not is_admin:
self._restrict_to_project(authorized_project, visibility_field)
self._check_cross_project_references(authorized_project,
visibility_field)
def _restrict_to_project(self, project_id, visibility_field):
restriction = {"=": {visibility_field: project_id}}
if self.filter_expr is None:
self.filter_expr = restriction
else:
self.filter_expr = {"and": [restriction, self.filter_expr]}
def _replace_isotime_with_datetime(self, filter_expr):
def replace_isotime(subfilter):
op, value = list(subfilter.items())[0]
if op.lower() not in self.complex_operators:
field = list(value.keys())[0]
if field in self.timestamp_fields:
date_time = self._convert_to_datetime(subfilter[op][field])
subfilter[op][field] = date_time
self._traverse_postorder(filter_expr, replace_isotime)
def _normalize_field_names_for_db_model(self, filter_expr):
def _normalize_field_names(subfilter):
op, value = list(subfilter.items())[0]
if op.lower() not in self.complex_operators:
self._replace_field_names(value)
self._traverse_postorder(filter_expr,
_normalize_field_names)
def _replace_field_names(self, subfilter):
field, value = list(subfilter.items())[0]
if field in self.name_mapping:
del subfilter[field]
subfilter[self.name_mapping[field]] = value
if field.startswith("metadata."):
del subfilter[field]
subfilter["resource_" + field] = value
def _convert_operator_to_lower_case(self, filter_expr):
self._traverse_postorder(filter_expr, utils.lowercase_keys)
@staticmethod
def _convert_to_datetime(isotime):
try:
date_time = timeutils.parse_isotime(isotime)
date_time = date_time.replace(tzinfo=None)
return date_time
except ValueError:
LOG.exception("String %s is not a valid isotime" % isotime)
msg = _('Failed to parse the timestamp value %s') % isotime
raise base.ClientSideError(msg)
def _validate_filter(self, filter_expr):
jsonschema.validate(filter_expr, self.schema)
def _validate_orderby(self, orderby_expr):
jsonschema.validate(orderby_expr, self.orderby_schema)
class QuerySamplesController(rest.RestController):
"""Provides complex query possibilities for samples."""
@wsme_pecan.wsexpose([samples.Sample], body=ComplexQuery)
def post(self, body):
"""Define query for retrieving Sample data.
:param body: Query rules for the samples to be returned.
"""
rbac.enforce('query_sample', pecan.request)
sample_name_mapping = {"resource": "resource_id",
"meter": "counter_name",
"type": "counter_type",
"unit": "counter_unit",
"volume": "counter_volume"}
query = ValidatedComplexQuery(body,
storage.models.Sample,
sample_name_mapping,
metadata_allowed=True)
query.validate(visibility_field="project_id")
conn = pecan.request.storage_conn
return [samples.Sample.from_db_model(s)
for s in conn.query_samples(query.filter_expr,
query.orderby,
query.limit)]
class QueryController(rest.RestController):
samples = QuerySamplesController()

View File

@ -1,158 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance <licensing@enovance.com>
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from six.moves import urllib
import pecan
from pecan import rest
import six
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from ceilometer.api.controllers.v2 import base
from ceilometer.api.controllers.v2 import utils
from ceilometer.api import rbac
from ceilometer.i18n import _
class Resource(base.Base):
"""An externally defined object for which samples have been received."""
resource_id = wtypes.text
"The unique identifier for the resource"
project_id = wtypes.text
"The ID of the owning project or tenant"
user_id = wtypes.text
"The ID of the user who created the resource or updated it last"
first_sample_timestamp = datetime.datetime
"UTC date & time not later than the first sample known for this resource"
last_sample_timestamp = datetime.datetime
"UTC date & time not earlier than the last sample known for this resource"
metadata = {wtypes.text: wtypes.text}
"Arbitrary metadata associated with the resource"
links = [base.Link]
"A list containing a self link and associated meter links"
source = wtypes.text
"The source where the resource come from"
def __init__(self, metadata=None, **kwds):
metadata = metadata or {}
metadata = utils.flatten_metadata(metadata)
super(Resource, self).__init__(metadata=metadata, **kwds)
@classmethod
def sample(cls):
return cls(
resource_id='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
project_id='35b17138-b364-4e6a-a131-8f3099c5be68',
user_id='efd87807-12d2-4b38-9c70-5f5c2ac427ff',
timestamp=datetime.datetime(2015, 1, 1, 12, 0, 0, 0),
source="openstack",
metadata={'name1': 'value1',
'name2': 'value2'},
links=[
base.Link(href=('http://localhost:8777/v2/resources/'
'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36'),
rel='self'),
base.Link(href=('http://localhost:8777/v2/meters/volume?'
'q.field=resource_id&q.value='
'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36'),
rel='volume')
],
)
class ResourcesController(rest.RestController):
"""Works on resources."""
@staticmethod
def _make_link(rel_name, url, type, type_arg, query=None):
query_str = ''
if query:
query_str = '?q.field=%s&q.value=%s' % (query['field'],
query['value'])
return base.Link(href='%s/v2/%s/%s%s' % (url, type,
type_arg, query_str),
rel=rel_name)
def _resource_links(self, resource_id, meter_links=1):
links = [self._make_link('self', pecan.request.application_url,
'resources', resource_id)]
if meter_links:
for meter in pecan.request.storage_conn.get_meters(
resource=resource_id):
query = {'field': 'resource_id', 'value': resource_id}
links.append(self._make_link(meter.name,
pecan.request.application_url,
'meters', meter.name,
query=query))
return links
@wsme_pecan.wsexpose(Resource, six.text_type)
def get_one(self, resource_id):
"""Retrieve details about one resource.
:param resource_id: The UUID of the resource.
"""
rbac.enforce('get_resource', pecan.request)
# In case we have special character in resource id, for example, swift
# can generate samples with resource id like
# 29f809d9-88bb-4c40-b1ba-a77a1fcf8ceb/glance
resource_id = urllib.parse.unquote(resource_id)
authorized_project = rbac.get_limited_to_project(pecan.request.headers)
resources = list(pecan.request.storage_conn.get_resources(
resource=resource_id, project=authorized_project))
if not resources:
raise base.EntityNotFound(_('Resource'), resource_id)
return Resource.from_db_and_links(resources[0],
self._resource_links(resource_id))
@wsme_pecan.wsexpose([Resource], [base.Query], int, int)
def get_all(self, q=None, limit=None, meter_links=1):
"""Retrieve definitions of all of the resources.
:param q: Filter rules for the resources to be returned.
:param limit: Maximum number of resources to return.
:param meter_links: option to include related meter links.
"""
rbac.enforce('get_resources', pecan.request)
q = q or []
limit = utils.enforce_limit(limit)
kwargs = utils.query_to_kwargs(
q, pecan.request.storage_conn.get_resources, ['limit'])
resources = [
Resource.from_db_and_links(r,
self._resource_links(r.resource_id,
meter_links))
for r in pecan.request.storage_conn.get_resources(limit=limit,
**kwargs)]
return resources

View File

@ -1,218 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance <licensing@enovance.com>
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneauth1 import exceptions
from oslo_config import cfg
from oslo_log import log
from oslo_utils import strutils
import pecan
from ceilometer.api.controllers.v2 import capabilities
from ceilometer.api.controllers.v2 import meters
from ceilometer.api.controllers.v2 import query
from ceilometer.api.controllers.v2 import resources
from ceilometer.api.controllers.v2 import samples
from ceilometer.i18n import _
from ceilometer import keystone_client
API_OPTS = [
cfg.BoolOpt('gnocchi_is_enabled',
help=('Set True to disable resource/meter/sample URLs. '
'Default autodetection by querying keystone.')),
cfg.BoolOpt('aodh_is_enabled',
help=('Set True to redirect alarms URLs to aodh. '
'Default autodetection by querying keystone.')),
cfg.StrOpt('aodh_url',
help=('The endpoint of Aodh to redirect alarms URLs '
'to Aodh API. Default autodetection by querying '
'keystone.')),
cfg.BoolOpt('panko_is_enabled',
help=('Set True to redirect events URLs to Panko. '
'Default autodetection by querying keystone.')),
cfg.StrOpt('panko_url',
help=('The endpoint of Panko to redirect events URLs '
'to Panko API. Default autodetection by querying '
'keystone.')),
]
LOG = log.getLogger(__name__)
def gnocchi_abort():
pecan.abort(410, ("This telemetry installation is configured to use "
"Gnocchi. Please use the Gnocchi API available on "
"the metric endpoint to retrieve data."))
def aodh_abort():
pecan.abort(410, _("alarms URLs is unavailable when Aodh is "
"disabled or unavailable."))
def _redirect(url):
pecan.redirect(location=url + pecan.request.path_qs,
code=308)
class QueryController(object):
def __init__(self, gnocchi_is_enabled=False,
aodh_url=None):
self.gnocchi_is_enabled = gnocchi_is_enabled
self.aodh_url = aodh_url
@pecan.expose()
def _lookup(self, kind, *remainder):
if kind == 'alarms' and self.aodh_url:
_redirect(self.aodh_url)
elif kind == 'alarms':
aodh_abort()
elif kind == 'samples' and self.gnocchi_is_enabled:
gnocchi_abort()
elif kind == 'samples':
return query.QuerySamplesController(), remainder
else:
pecan.abort(404)
class V2Controller(object):
"""Version 2 API controller root."""
capabilities = capabilities.CapabilitiesController()
def __init__(self):
self._gnocchi_is_enabled = None
self._aodh_is_enabled = None
self._aodh_url = None
self._panko_is_enabled = None
self._panko_url = None
@property
def gnocchi_is_enabled(self):
if self._gnocchi_is_enabled is None:
if pecan.request.cfg.api.gnocchi_is_enabled is not None:
self._gnocchi_is_enabled = (
pecan.request.cfg.api.gnocchi_is_enabled)
else:
try:
catalog = keystone_client.get_service_catalog(
keystone_client.get_client(pecan.request.cfg))
catalog.url_for(service_type='metric')
except exceptions.EndpointNotFound:
self._gnocchi_is_enabled = False
except exceptions.ClientException:
LOG.warning("Can't connect to keystone, assuming "
"gnocchi is disabled and retry later")
else:
self._gnocchi_is_enabled = True
LOG.warning("ceilometer-api started with gnocchi "
"enabled. The resources/meters/samples "
"URLs are disabled.")
return self._gnocchi_is_enabled
@property
def aodh_url(self):
if self._aodh_url is None:
if pecan.request.cfg.api.aodh_is_enabled is False:
self._aodh_url = ""
elif pecan.request.cfg.api.aodh_url is not None:
self._aodh_url = self._normalize_url(
pecan.request.cfg.api.aodh_url)
else:
try:
catalog = keystone_client.get_service_catalog(
keystone_client.get_client(pecan.request.cfg))
self._aodh_url = self._normalize_url(
catalog.url_for(service_type='alarming'))
except exceptions.EndpointNotFound:
self._aodh_url = ""
except exceptions.ClientException:
LOG.warning("Can't connect to keystone, assuming aodh "
"is disabled and retry later.")
else:
LOG.warning("ceilometer-api started with aodh "
"enabled. Alarms URLs will be redirected "
"to aodh endpoint.")
return self._aodh_url
@property
def panko_url(self):
if self._panko_url is None:
if pecan.request.cfg.api.panko_is_enabled is False:
self._panko_url = ""
elif pecan.request.cfg.api.panko_url is not None:
self._panko_url = self._normalize_url(
pecan.request.cfg.api.panko_url)
else:
try:
catalog = keystone_client.get_service_catalog(
keystone_client.get_client(pecan.request.cfg))
self._panko_url = self._normalize_url(
catalog.url_for(service_type='event'))
except exceptions.EndpointNotFound:
self._panko_url = ""
except exceptions.ClientException:
LOG.warning(
"Can't connect to keystone, assuming Panko "
"is disabled and retry later.")
else:
LOG.warning("ceilometer-api started with Panko "
"enabled. Events URLs will be redirected "
"to Panko endpoint.")
return self._panko_url
@pecan.expose()
def _lookup(self, kind, *remainder):
if (kind in ['meters', 'resources', 'samples']
and self.gnocchi_is_enabled):
if kind == 'meters' and pecan.request.method == 'POST':
direct = pecan.request.params.get('direct', '')
if strutils.bool_from_string(direct):
pecan.abort(400, _('direct option cannot be true when '
'Gnocchi is enabled.'))
return meters.MetersController(), remainder
gnocchi_abort()
elif kind == 'meters':
return meters.MetersController(), remainder
elif kind == 'resources':
return resources.ResourcesController(), remainder
elif kind == 'samples':
return samples.SamplesController(), remainder
elif kind == 'query':
return QueryController(
gnocchi_is_enabled=self.gnocchi_is_enabled,
aodh_url=self.aodh_url,
), remainder
elif kind == 'alarms' and (not self.aodh_url):
aodh_abort()
elif kind == 'alarms' and self.aodh_url:
_redirect(self.aodh_url)
elif kind == 'events' and self.panko_url:
return _redirect(self.panko_url)
elif kind == 'event_types' and self.panko_url:
return _redirect(self.panko_url)
else:
pecan.abort(404)
@staticmethod
def _normalize_url(url):
if url.endswith("/"):
return url[:-1]
return url

View File

@ -1,145 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance <licensing@enovance.com>
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid
import pecan
from pecan import rest
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from ceilometer.api.controllers.v2 import base
from ceilometer.api.controllers.v2 import utils
from ceilometer.api import rbac
from ceilometer.i18n import _
from ceilometer import sample
from ceilometer import storage
class Sample(base.Base):
"""One measurement."""
id = wtypes.text
"The unique identifier for the sample."
meter = wtypes.text
"The meter name this sample is for."
type = wtypes.Enum(str, *sample.TYPES)
"The meter type (see :ref:`meter_types`)"
unit = wtypes.text
"The unit of measure."
volume = float
"The metered value."
user_id = wtypes.text
"The user this sample was taken for."
project_id = wtypes.text
"The project this sample was taken for."
resource_id = wtypes.text
"The :class:`Resource` this sample was taken for."
source = wtypes.text
"The source that identifies where the sample comes from."
timestamp = datetime.datetime
"When the sample has been generated."
recorded_at = datetime.datetime
"When the sample has been recorded."
metadata = {wtypes.text: wtypes.text}
"Arbitrary metadata associated with the sample."
@classmethod
def from_db_model(cls, m):
return cls(id=m.message_id,
meter=m.counter_name,
type=m.counter_type,
unit=m.counter_unit,
volume=m.counter_volume,
user_id=m.user_id,
project_id=m.project_id,
resource_id=m.resource_id,
source=m.source,
timestamp=m.timestamp,
recorded_at=m.recorded_at,
metadata=utils.flatten_metadata(m.resource_metadata))
@classmethod
def sample(cls):
return cls(id=str(uuid.uuid1()),
meter='instance',
type='gauge',
unit='instance',
volume=1,
resource_id='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
project_id='35b17138-b364-4e6a-a131-8f3099c5be68',
user_id='efd87807-12d2-4b38-9c70-5f5c2ac427ff',
timestamp=datetime.datetime(2015, 1, 1, 12, 0, 0, 0),
recorded_at=datetime.datetime(2015, 1, 1, 12, 0, 0, 0),
source='openstack',
metadata={'name1': 'value1',
'name2': 'value2'},
)
class SamplesController(rest.RestController):
"""Controller managing the samples."""
@wsme_pecan.wsexpose([Sample], [base.Query], int)
def get_all(self, q=None, limit=None):
"""Return all known samples, based on the data recorded so far.
:param q: Filter rules for the samples to be returned.
:param limit: Maximum number of samples to be returned.
"""
rbac.enforce('get_samples', pecan.request)
q = q or []
limit = utils.enforce_limit(limit)
kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__)
f = storage.SampleFilter(**kwargs)
return map(Sample.from_db_model,
pecan.request.storage_conn.get_samples(f, limit=limit))
@wsme_pecan.wsexpose(Sample, wtypes.text)
def get_one(self, sample_id):
"""Return a sample.
:param sample_id: the id of the sample.
"""
rbac.enforce('get_sample', pecan.request)
f = storage.SampleFilter(message_id=sample_id)
samples = list(pecan.request.storage_conn.get_samples(f))
if len(samples) < 1:
raise base.EntityNotFound(_('Sample'), sample_id)
return Sample.from_db_model(samples[0])

View File

@ -1,316 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance <licensing@enovance.com>
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import inspect
from oslo_log import log
from oslo_utils import timeutils
import pecan
import six
import wsme
from ceilometer.api.controllers.v2 import base
from ceilometer.api import rbac
from ceilometer.i18n import _
from ceilometer import utils
LOG = log.getLogger(__name__)
def enforce_limit(limit):
"""Ensure limit is defined and is valid. if not, set a default."""
if limit is None:
limit = pecan.request.cfg.api.default_api_return_limit
LOG.info('No limit value provided, result set will be'
' limited to %(limit)d.', {'limit': limit})
if not limit or limit <= 0:
raise base.ClientSideError(_("Limit must be positive"))
return limit
def get_auth_project(on_behalf_of=None):
auth_project = rbac.get_limited_to_project(pecan.request.headers)
created_by = pecan.request.headers.get('X-Project-Id')
is_admin = auth_project is None
if is_admin and on_behalf_of != created_by:
auth_project = on_behalf_of
return auth_project
def sanitize_query(query, db_func, on_behalf_of=None):
"""Check the query.
See if:
1) the request is coming from admin - then allow full visibility
2) non-admin - make sure that the query includes the requester's project.
"""
q = copy.copy(query)
auth_project = get_auth_project(on_behalf_of)
if auth_project:
_verify_query_segregation(q, auth_project)
proj_q = [i for i in q if i.field == 'project_id']
valid_keys = inspect.getargspec(db_func)[0]
if not proj_q and 'on_behalf_of' not in valid_keys:
# The user is restricted, but they didn't specify a project
# so add it for them.
q.append(base.Query(field='project_id',
op='eq',
value=auth_project))
return q
def _verify_query_segregation(query, auth_project=None):
"""Ensure non-admin queries are not constrained to another project."""
auth_project = (auth_project or
rbac.get_limited_to_project(pecan.request.headers))
if not auth_project:
return
for q in query:
if q.field in ('project', 'project_id') and auth_project != q.value:
raise base.ProjectNotAuthorized(q.value)
def validate_query(query, db_func, internal_keys=None,
allow_timestamps=True):
"""Validates the syntax of the query and verifies the query.
Verification check if the query request is authorized for the included
project.
:param query: Query expression that should be validated
:param db_func: the function on the storage level, of which arguments
will form the valid_keys list, which defines the valid fields for a
query expression
:param internal_keys: internally used field names, that should not be
used for querying
:param allow_timestamps: defines whether the timestamp-based constraint is
applicable for this query or not
:raises InvalidInput: if an operator is not supported for a given field
:raises InvalidInput: if timestamp constraints are allowed, but
search_offset was included without timestamp constraint
:raises: UnknownArgument: if a field name is not a timestamp field, nor
in the list of valid keys
"""
internal_keys = internal_keys or []
_verify_query_segregation(query)
valid_keys = inspect.getargspec(db_func)[0]
internal_timestamp_keys = ['end_timestamp', 'start_timestamp',
'end_timestamp_op', 'start_timestamp_op']
if 'start_timestamp' in valid_keys:
internal_keys += internal_timestamp_keys
valid_keys += ['timestamp', 'search_offset']
internal_keys.append('self')
internal_keys.append('metaquery')
valid_keys = set(valid_keys) - set(internal_keys)
translation = {'user_id': 'user',
'project_id': 'project',
'resource_id': 'resource'}
has_timestamp_query = _validate_timestamp_fields(query,
'timestamp',
('lt', 'le', 'gt', 'ge'),
allow_timestamps)
has_search_offset_query = _validate_timestamp_fields(query,
'search_offset',
'eq',
allow_timestamps)
if has_search_offset_query and not has_timestamp_query:
raise wsme.exc.InvalidInput('field', 'search_offset',
"search_offset cannot be used without " +
"timestamp")
def _is_field_metadata(field):
return (field.startswith('metadata.') or
field.startswith('resource_metadata.'))
for i in query:
if i.field not in ('timestamp', 'search_offset'):
key = translation.get(i.field, i.field)
operator = i.op
if key in valid_keys or _is_field_metadata(i.field):
if operator == 'eq':
if key == 'enabled':
i._get_value_as_type('boolean')
elif _is_field_metadata(key):
i._get_value_as_type()
else:
raise wsme.exc.InvalidInput('op', i.op,
'unimplemented operator for '
'%s' % i.field)
else:
msg = ("unrecognized field in query: %s, "
"valid keys: %s") % (query, sorted(valid_keys))
raise wsme.exc.UnknownArgument(key, msg)
def _validate_timestamp_fields(query, field_name, operator_list,
allow_timestamps):
"""Validates the timestamp related constraints in a query if there are any.
:param query: query expression that may contain the timestamp fields
:param field_name: timestamp name, which should be checked (timestamp,
search_offset)
:param operator_list: list of operators that are supported for that
timestamp, which was specified in the parameter field_name
:param allow_timestamps: defines whether the timestamp-based constraint is
applicable to this query or not
:returns: True, if there was a timestamp constraint, containing
a timestamp field named as defined in field_name, in the query and it
was allowed and syntactically correct.
:returns: False, if there wasn't timestamp constraint, containing a
timestamp field named as defined in field_name, in the query
:raises InvalidInput: if an operator is unsupported for a given timestamp
field
:raises UnknownArgument: if the timestamp constraint is not allowed in
the query
"""
for item in query:
if item.field == field_name:
# If *timestamp* or *search_offset* field was specified in the
# query, but timestamp is not supported on that resource, on
# which the query was invoked, then raise an exception.
if not allow_timestamps:
raise wsme.exc.UnknownArgument(field_name,
"not valid for " +
"this resource")
if item.op not in operator_list:
raise wsme.exc.InvalidInput('op', item.op,
'unimplemented operator for %s' %
item.field)
return True
return False
def query_to_kwargs(query, db_func, internal_keys=None,
allow_timestamps=True):
validate_query(query, db_func, internal_keys=internal_keys,
allow_timestamps=allow_timestamps)
query = sanitize_query(query, db_func)
translation = {'user_id': 'user',
'project_id': 'project',
'resource_id': 'resource'}
stamp = {}
metaquery = {}
kwargs = {}
for i in query:
if i.field == 'timestamp':
if i.op in ('lt', 'le'):
stamp['end_timestamp'] = i.value
stamp['end_timestamp_op'] = i.op
elif i.op in ('gt', 'ge'):
stamp['start_timestamp'] = i.value
stamp['start_timestamp_op'] = i.op
else:
if i.op == 'eq':
if i.field == 'search_offset':
stamp['search_offset'] = i.value
elif i.field == 'enabled':
kwargs[i.field] = i._get_value_as_type('boolean')
elif i.field.startswith('metadata.'):
metaquery[i.field] = i._get_value_as_type()
elif i.field.startswith('resource_metadata.'):
metaquery[i.field[9:]] = i._get_value_as_type()
else:
key = translation.get(i.field, i.field)
kwargs[key] = i.value
if metaquery and 'metaquery' in inspect.getargspec(db_func)[0]:
kwargs['metaquery'] = metaquery
if stamp:
kwargs.update(_get_query_timestamps(stamp))
return kwargs
def _get_query_timestamps(args=None):
"""Return any optional timestamp information in the request.
Determine the desired range, if any, from the GET arguments. Set
up the query range using the specified offset.
[query_start ... start_timestamp ... end_timestamp ... query_end]
Returns a dictionary containing:
start_timestamp: First timestamp to use for query
start_timestamp_op: First timestamp operator to use for query
end_timestamp: Final timestamp to use for query
end_timestamp_op: Final timestamp operator to use for query
"""
if args is None:
return {}
search_offset = int(args.get('search_offset', 0))
def _parse_timestamp(timestamp):
if not timestamp:
return None
try:
iso_timestamp = timeutils.parse_isotime(timestamp)
iso_timestamp = iso_timestamp.replace(tzinfo=None)
except ValueError:
raise wsme.exc.InvalidInput('timestamp', timestamp,
'invalid timestamp format')
return iso_timestamp
start_timestamp = _parse_timestamp(args.get('start_timestamp'))
end_timestamp = _parse_timestamp(args.get('end_timestamp'))
start_timestamp = start_timestamp - datetime.timedelta(
minutes=search_offset) if start_timestamp else None
end_timestamp = end_timestamp + datetime.timedelta(
minutes=search_offset) if end_timestamp else None
return {'start_timestamp': start_timestamp,
'end_timestamp': end_timestamp,
'start_timestamp_op': args.get('start_timestamp_op'),
'end_timestamp_op': args.get('end_timestamp_op')}
def flatten_metadata(metadata):
"""Return flattened resource metadata.
Metadata is returned with flattened nested structures (except nested sets)
and with all values converted to unicode strings.
"""
if metadata:
# After changing recursive_keypairs` output we need to keep
# flattening output unchanged.
# Example: recursive_keypairs({'a': {'b':{'c':'d'}}}, '.')
# output before: a.b:c=d
# output now: a.b.c=d
# So to keep the first variant just replace all dots except the first
return dict((k.replace('.', ':').replace(':', '.', 1),
six.text_type(v))
for k, v in utils.recursive_keypairs(metadata,
separator='.')
if type(v) is not set)
return {}

View File

@ -1,91 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
import oslo_messaging
from oslo_policy import policy
from pecan import hooks
from ceilometer import messaging
from ceilometer import storage
LOG = log.getLogger(__name__)
class ConfigHook(hooks.PecanHook):
"""Attach the configuration object to the request.
That allows controllers to get it.
"""
def __init__(self, conf):
super(ConfigHook, self).__init__()
self.conf = conf
self.enforcer = policy.Enforcer(conf)
self.enforcer.load_rules()
def on_route(self, state):
state.request.cfg = self.conf
state.request.enforcer = self.enforcer
class DBHook(hooks.PecanHook):
def __init__(self, conf):
self.storage_connection = self.get_connection(conf)
if not self.storage_connection:
raise Exception(
"API failed to start. Failed to connect to database")
def before(self, state):
state.request.storage_conn = self.storage_connection
@staticmethod
def get_connection(conf):
try:
return storage.get_connection_from_config(conf)
except Exception as err:
LOG.exception("Failed to connect to db" "retry later: %s",
err)
class NotifierHook(hooks.PecanHook):
"""Create and attach a notifier to the request.
Usually, samples will be push to notification bus by notifier when they
are posted via /v2/meters/ API.
"""
def __init__(self, conf):
transport = messaging.get_transport(conf)
self.notifier = oslo_messaging.Notifier(
transport, driver=conf.publisher_notifier.telemetry_driver,
publisher_id="ceilometer.api")
def before(self, state):
state.request.notifier = self.notifier
class TranslationHook(hooks.PecanHook):
def after(self, state):
# After a request has been done, we need to see if
# ClientSideError has added an error onto the response.
# If it has we need to get it info the thread-safe WSGI
# environ to be used by the ParsableErrorMiddleware.
if hasattr(state.response, 'translatable_error'):
state.request.environ['translatable_error'] = (
state.response.translatable_error)

View File

@ -1,127 +0,0 @@
#
# Copyright 2013 IBM Corp.
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Middleware to replace the plain text message body of an error
response with one formatted so the client can parse it.
Based on pecan.middleware.errordocument
"""
import json
from lxml import etree
from oslo_log import log
import six
import webob
from ceilometer import i18n
LOG = log.getLogger(__name__)
class ParsableErrorMiddleware(object):
"""Replace error body with something the client can parse."""
@staticmethod
def best_match_language(accept_language):
"""Determines best available locale from the Accept-Language header.
:returns: the best language match or None if the 'Accept-Language'
header was not available in the request.
"""
if not accept_language:
return None
all_languages = i18n.get_available_languages()
return accept_language.best_match(all_languages)
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
# Request for this state, modified by replace_start_response()
# and used when an error is being reported.
state = {}
def replacement_start_response(status, headers, exc_info=None):
"""Overrides the default response to make errors parsable."""
try:
status_code = int(status.split(' ')[0])
state['status_code'] = status_code
except (ValueError, TypeError): # pragma: nocover
raise Exception((
'ErrorDocumentMiddleware received an invalid '
'status %s' % status
))
else:
if (state['status_code'] // 100) not in (2, 3):
# Remove some headers so we can replace them later
# when we have the full error message and can
# compute the length.
headers = [(h, v)
for (h, v) in headers
if h not in ('Content-Length', 'Content-Type')
]
# Save the headers in case we need to modify them.
state['headers'] = headers
return start_response(status, headers, exc_info)
app_iter = self.app(environ, replacement_start_response)
if (state['status_code'] // 100) not in (2, 3):
req = webob.Request(environ)
error = environ.get('translatable_error')
user_locale = self.best_match_language(req.accept_language)
if (req.accept.best_match(['application/json', 'application/xml'])
== 'application/xml'):
content_type = 'application/xml'
try:
# simple check xml is valid
fault = etree.fromstring(b'\n'.join(app_iter))
# Add the translated error to the xml data
if error is not None:
for fault_string in fault.findall('faultstring'):
fault_string.text = i18n.translate(error,
user_locale)
error_message = etree.tostring(fault)
body = b''.join((b'<error_message>',
error_message,
b'</error_message>'))
except etree.XMLSyntaxError as err:
LOG.error('Error parsing HTTP response: %s', err)
error_message = state['status_code']
body = '<error_message>%s</error_message>' % error_message
if six.PY3:
body = body.encode('utf-8')
else:
content_type = 'application/json'
app_data = b'\n'.join(app_iter)
if six.PY3:
app_data = app_data.decode('utf-8')
try:
fault = json.loads(app_data)
if error is not None and 'faultstring' in fault:
fault['faultstring'] = i18n.translate(error,
user_locale)
except ValueError as err:
fault = app_data
body = json.dumps({'error_message': fault})
if six.PY3:
body = body.encode('utf-8')
state['headers'].append(('Content-Length', str(len(body))))
state['headers'].append(('Content-Type', content_type))
body = [body]
else:
body = app_iter
return body

View File

@ -1,86 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2014 Hewlett-Packard Company
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Access Control Lists (ACL's) control access the API server."""
import pecan
def _has_rule(name):
return name in pecan.request.enforcer.rules.keys()
def enforce(policy_name, request):
"""Checks authorization of a rule against the request.
:param request: HTTP request
:param policy_name: the policy name to validate authz against.
"""
rule_method = "telemetry:" + policy_name
headers = request.headers
policy_dict = dict()
policy_dict['roles'] = headers.get('X-Roles', "").split(",")
policy_dict['user_id'] = (headers.get('X-User-Id'))
policy_dict['project_id'] = (headers.get('X-Project-Id'))
# maintain backward compat with Juno and previous by allowing the action if
# there is no rule defined for it
if ((_has_rule('default') or _has_rule(rule_method)) and
not pecan.request.enforcer.enforce(rule_method, {}, policy_dict)):
pecan.core.abort(status_code=403, detail='RBAC Authorization Failed')
# TODO(fabiog): these methods are still used because the scoping part is really
# convoluted and difficult to separate out.
def get_limited_to(headers):
"""Return the user and project the request should be limited to.
:param headers: HTTP headers dictionary
:return: A tuple of (user, project), set to None if there's no limit on
one of these.
"""
policy_dict = dict()
policy_dict['roles'] = headers.get('X-Roles', "").split(",")
policy_dict['user_id'] = (headers.get('X-User-Id'))
policy_dict['project_id'] = (headers.get('X-Project-Id'))
# maintain backward compat with Juno and previous by using context_is_admin
# rule if the segregation rule (added in Kilo) is not defined
rule_name = 'segregation' if _has_rule(
'segregation') else 'context_is_admin'
if not pecan.request.enforcer.enforce(rule_name,
{},
policy_dict):
return headers.get('X-User-Id'), headers.get('X-Project-Id')
return None, None
def get_limited_to_project(headers):
"""Return the project the request should be limited to.
:param headers: HTTP headers dictionary
:return: A project, or None if there's no limit on it.
"""
return get_limited_to(headers)[1]

View File

@ -1,34 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2015-2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from cotyledon import oslo_config_glue
from oslo_log import log
from ceilometer.api import app
from ceilometer import service
LOG = log.getLogger(__name__)
def build_wsgi_app(argv=None):
conf = service.prepare_service(argv=argv)
conf.register_opts(oslo_config_glue.service_opts)
if conf.log_options:
LOG.debug('Full set of CONF:')
conf.log_opt_values(LOG, logging.DEBUG)
return app.load_app(conf)

View File

@ -1,37 +0,0 @@
# Copyright 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_middleware import cors
def set_cors_middleware_defaults():
"""Update default configuration options for oslo.middleware."""
cors.set_defaults(
allow_headers=['X-Auth-Token',
'X-Identity-Status',
'X-Roles',
'X-Service-Catalog',
'X-User-Id',
'X-Tenant-Id',
'X-Openstack-Request-Id'],
expose_headers=['X-Auth-Token',
'X-Subject-Token',
'X-Service-Token',
'X-Openstack-Request-Id'],
allow_methods=['GET',
'PUT',
'POST',
'DELETE',
'PATCH']
)

View File

@ -18,8 +18,6 @@ from keystoneauth1 import loading
from oslo_config import cfg
import ceilometer.agent.manager
import ceilometer.api.app
import ceilometer.api.controllers.v2.root
import ceilometer.compute.discovery
import ceilometer.compute.virt.inspector
import ceilometer.compute.virt.libvirt.utils
@ -75,7 +73,6 @@ def list_opts():
return [
('DEFAULT',
itertools.chain(ceilometer.agent.manager.OPTS,
ceilometer.api.app.OPTS,
ceilometer.compute.virt.inspector.OPTS,
ceilometer.compute.virt.libvirt.utils.OPTS,
ceilometer.dispatcher.OPTS,
@ -84,8 +81,6 @@ def list_opts():
ceilometer.sample.OPTS,
ceilometer.utils.OPTS,
OPTS)),
('api', itertools.chain(ceilometer.api.app.API_OPTS,
ceilometer.api.controllers.v2.root.API_OPTS)),
('compute', ceilometer.compute.discovery.OPTS),
('coordination', [
cfg.StrOpt(

View File

@ -18,10 +18,8 @@ from oslo_config import cfg
from oslo_db import options as db_options
import oslo_i18n
from oslo_log import log
from oslo_policy import opts as policy_opts
from oslo_reports import guru_meditation_report as gmr
from ceilometer.conf import defaults
from ceilometer import keystone_client
from ceilometer import messaging
from ceilometer import opts
@ -47,8 +45,6 @@ def prepare_service(argv=None, config_files=None, conf=None):
['futurist=INFO', 'neutronclient=INFO',
'keystoneclient=INFO'])
log.set_defaults(default_log_levels=log_levels)
defaults.set_cors_middleware_defaults()
policy_opts.set_defaults(conf)
db_options.set_defaults(conf)
conf(argv[1:], project='ceilometer', validate_default_values=True,
@ -61,10 +57,6 @@ def prepare_service(argv=None, config_files=None, conf=None):
utils.setup_root_helper(conf)
sample.setup(conf)
# NOTE(liusheng): guru cannot run with service under apache daemon, so when
# ceilometer-api running with mod_wsgi, the argv is [], we don't start
# guru.
if argv:
gmr.TextGuruMeditation.setup_autorun(version)
gmr.TextGuruMeditation.setup_autorun(version)
messaging.setup()
return conf

View File

@ -37,10 +37,10 @@ class TelemetryBase(plugin_base.NotificationBase):
class TelemetryIpc(TelemetryBase):
"""Handle sample from notification bus
Telemetry samples can be posted via API or polled by Polling agent.
Telemetry samples polled by polling agent.
"""
event_types = ['telemetry.api', 'telemetry.polling']
event_types = ['telemetry.polling']
def process_notification(self, message):
samples = message['payload']['samples']

View File

@ -23,7 +23,6 @@ from oslo_utils import timeutils
from oslotest import base
import six
from testtools import testcase
import webtest
import yaml
import ceilometer
@ -98,10 +97,6 @@ def _skip_decorator(func):
return func(*args, **kwargs)
except ceilometer.NotImplementedError as e:
raise testcase.TestSkipped(six.text_type(e))
except webtest.app.AppError as e:
if 'not implemented' in six.text_type(e):
raise testcase.TestSkipped(six.text_type(e))
raise
return skip_if_not_implemented

View File

@ -1,177 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base classes for API tests.
"""
import pecan
import pecan.testing
from ceilometer import service
from ceilometer.tests import db as db_test_base
class FunctionalTest(db_test_base.TestBase):
"""Used for functional tests of Pecan controllers.
Used in case when you need to test your literal application and its
integration with the framework.
"""
PATH_PREFIX = ''
def setUp(self):
super(FunctionalTest, self).setUp()
self.CONF = service.prepare_service([], [])
self.setup_messaging(self.CONF)
self.CONF.set_override("policy_file",
self.path_get('etc/ceilometer/policy.json'),
group='oslo_policy')
self.CONF.set_override('gnocchi_is_enabled', False, group='api')
self.CONF.set_override('aodh_is_enabled', False, group='api')
self.CONF.set_override('panko_is_enabled', False, group='api')
self.app = self._make_app()
def _make_app(self, enable_acl=False):
self.config = {
'app': {
'root': 'ceilometer.api.controllers.root.RootController',
'modules': ['ceilometer.api'],
'enable_acl': enable_acl,
},
'wsme': {
'debug': True,
},
}
return pecan.testing.load_test_app(self.config, conf=self.CONF)
def tearDown(self):
super(FunctionalTest, self).tearDown()
pecan.set_config({}, overwrite=True)
def put_json(self, path, params, expect_errors=False, headers=None,
extra_environ=None, status=None):
"""Sends simulated HTTP PUT request to Pecan test app.
:param path: url path of target service
:param params: content for wsgi.input of request
:param expect_errors: boolean value whether an error is expected based
on request
:param headers: A dictionary of headers to send along with the request
:param extra_environ: A dictionary of environ variables to send along
with the request
:param status: Expected status code of response
"""
return self.post_json(path=path, params=params,
expect_errors=expect_errors,
headers=headers, extra_environ=extra_environ,
status=status, method="put")
def post_json(self, path, params, expect_errors=False, headers=None,
method="post", extra_environ=None, status=None):
"""Sends simulated HTTP POST request to Pecan test app.
:param path: url path of target service
:param params: content for wsgi.input of request
:param expect_errors: boolean value whether an error is expected based
on request
:param headers: A dictionary of headers to send along with the request
:param method: Request method type. Appropriate method function call
should be used rather than passing attribute in.
:param extra_environ: A dictionary of environ variables to send along
with the request
:param status: Expected status code of response
"""
full_path = self.PATH_PREFIX + path
response = getattr(self.app, "%s_json" % method)(
str(full_path),
params=params,
headers=headers,
status=status,
extra_environ=extra_environ,
expect_errors=expect_errors
)
return response
def delete(self, path, expect_errors=False, headers=None,
extra_environ=None, status=None):
"""Sends simulated HTTP DELETE request to Pecan test app.
:param path: url path of target service
:param expect_errors: boolean value whether an error is expected based
on request
:param headers: A dictionary of headers to send along with the request
:param extra_environ: A dictionary of environ variables to send along
with the request
:param status: Expected status code of response
"""
full_path = self.PATH_PREFIX + path
response = self.app.delete(str(full_path),
headers=headers,
status=status,
extra_environ=extra_environ,
expect_errors=expect_errors)
return response
def get_json(self, path, expect_errors=False, headers=None,
extra_environ=None, q=None, groupby=None, status=None,
override_params=None, **params):
"""Sends simulated HTTP GET request to Pecan test app.
:param path: url path of target service
:param expect_errors: boolean value whether an error is expected based
on request
:param headers: A dictionary of headers to send along with the request
:param extra_environ: A dictionary of environ variables to send along
with the request
:param q: list of queries consisting of: field, value, op, and type
keys
:param groupby: list of fields to group by
:param status: Expected status code of response
:param override_params: literally encoded query param string
:param params: content for wsgi.input of request
"""
q = q or []
groupby = groupby or []
full_path = self.PATH_PREFIX + path
if override_params:
all_params = override_params
else:
query_params = {'q.field': [],
'q.value': [],
'q.op': [],
'q.type': [],
}
for query in q:
for name in ['field', 'op', 'value', 'type']:
query_params['q.%s' % name].append(query.get(name, ''))
all_params = {}
all_params.update(params)
if q:
all_params.update(query_params)
if groupby:
all_params.update({'groupby': groupby})
response = self.app.get(full_path,
params=all_params,
headers=headers,
extra_environ=extra_environ,
expect_errors=expect_errors,
status=status)
if not expect_errors:
response = response.json
return response

View File

@ -1,20 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ceilometer.tests.functional import api
class FunctionalTest(api.FunctionalTest):
PATH_PREFIX = '/v2'

View File

@ -1,180 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test ACL."""
import datetime
import uuid
from keystonemiddleware import fixture as ksm_fixture
import webtest
from ceilometer.api import app
from ceilometer.publisher import utils
from ceilometer import sample
from ceilometer.tests.functional.api import v2
VALID_TOKEN = uuid.uuid4().hex
VALID_TOKEN2 = uuid.uuid4().hex
class TestAPIACL(v2.FunctionalTest):
def setUp(self):
super(TestAPIACL, self).setUp()
self.auth_token_fixture = self.useFixture(
ksm_fixture.AuthTokenFixture())
self.auth_token_fixture.add_token_data(
token_id=VALID_TOKEN,
# FIXME(morganfainberg): The project-id should be a proper uuid
project_id='123i2910',
role_list=['admin'],
user_name='user_id2',
user_id='user_id2',
is_v2=True
)
self.auth_token_fixture.add_token_data(
token_id=VALID_TOKEN2,
# FIXME(morganfainberg): The project-id should be a proper uuid
project_id='project-good',
role_list=['Member'],
user_name='user_id1',
user_id='user_id1',
is_v2=True)
for cnt in [
sample.Sample(
'meter.test',
'cumulative',
'',
1,
'user-good',
'project-good',
'resource-good',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample'},
source='test_source'),
sample.Sample(
'meter.mine',
'gauge',
'',
1,
'user-fred',
'project-good',
'resource-56',
timestamp=datetime.datetime(2012, 7, 2, 10, 43),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample4'},
source='test_source')]:
msg = utils.meter_message_from_counter(
cnt, self.CONF.publisher.telemetry_secret)
self.conn.record_metering_data(msg)
def get_json(self, path, expect_errors=False, headers=None,
q=None, **params):
return super(TestAPIACL, self).get_json(path,
expect_errors=expect_errors,
headers=headers,
q=q or [],
**params)
def _make_app(self):
file_name = self.path_get('etc/ceilometer/api_paste.ini')
self.CONF.set_override("api_paste_config", file_name)
return webtest.TestApp(app.load_app(self.CONF))
def test_non_authenticated(self):
response = self.get_json('/meters', expect_errors=True)
self.assertEqual(401, response.status_int)
def test_authenticated_wrong_role(self):
response = self.get_json('/meters',
expect_errors=True,
headers={
"X-Roles": "Member",
"X-Tenant-Name": "admin",
"X-Project-Id":
"bc23a9d531064583ace8f67dad60f6bb",
})
self.assertEqual(401, response.status_int)
# FIXME(dhellmann): This test is not properly looking at the tenant
# info. We do not correctly detect the improper tenant. That's
# really something the keystone middleware would have to do using
# the incoming token, which we aren't providing.
#
# def test_authenticated_wrong_tenant(self):
# response = self.get_json('/meters',
# expect_errors=True,
# headers={
# "X-Roles": "admin",
# "X-Tenant-Name": "achoo",
# "X-Project-Id": "bc23a9d531064583ace8f67dad60f6bb",
# })
# self.assertEqual(401, response.status_int)
def test_authenticated(self):
data = self.get_json('/meters',
headers={"X-Auth-Token": VALID_TOKEN,
"X-Roles": "admin",
"X-Project-Id":
"bc23a9d531064583ace8f67dad60f6bb",
})
ids = set(r['resource_id'] for r in data)
self.assertEqual(set(['resource-good', 'resource-56']), ids)
def test_with_non_admin_missing_project_query(self):
data = self.get_json('/meters',
headers={"X-Roles": "Member",
"X-Auth-Token": VALID_TOKEN2,
"X-Project-Id": "project-good"})
ids = set(r['resource_id'] for r in data)
self.assertEqual(set(['resource-good', 'resource-56']), ids)
def test_with_non_admin(self):
data = self.get_json('/meters',
headers={"X-Roles": "Member",
"X-Auth-Token": VALID_TOKEN2,
"X-Project-Id": "project-good"},
q=[{'field': 'project_id',
'value': 'project-good',
}])
ids = set(r['resource_id'] for r in data)
self.assertEqual(set(['resource-good', 'resource-56']), ids)
def test_non_admin_wrong_project(self):
data = self.get_json('/meters',
expect_errors=True,
headers={"X-Roles": "Member",
"X-Auth-Token": VALID_TOKEN2,
"X-Project-Id": "project-good"},
q=[{'field': 'project_id',
'value': 'project-wrong',
}])
self.assertEqual(401, data.status_int)
def test_non_admin_two_projects(self):
data = self.get_json('/meters',
expect_errors=True,
headers={"X-Roles": "Member",
"X-Auth-Token": VALID_TOKEN2,
"X-Project-Id": "project-good"},
q=[{'field': 'project_id',
'value': 'project-good',
},
{'field': 'project_id',
'value': 'project-naughty',
}])
self.assertEqual(401, data.status_int)

View File

@ -1,183 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import mock
from oslo_utils import fileutils
import six
from ceilometer.tests.functional.api import v2
class TestAPIUpgradePath(v2.FunctionalTest):
def _make_app(self):
content = ('{"default": ""}')
if six.PY3:
content = content.encode('utf-8')
self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='policy',
suffix='.json')
self.CONF.set_override("policy_file", self.tempfile,
group='oslo_policy')
return super(TestAPIUpgradePath, self)._make_app()
def _setup_osloconfig_options(self):
self.CONF.set_override('gnocchi_is_enabled', True, group='api')
self.CONF.set_override('aodh_is_enabled', True, group='api')
self.CONF.set_override('aodh_url', 'http://alarm-endpoint:8008/',
group='api')
self.CONF.set_override('panko_is_enabled', True, group='api')
self.CONF.set_override('panko_url', 'http://event-endpoint:8009/',
group='api')
def _setup_keystone_mock(self):
self.CONF.set_override('gnocchi_is_enabled', None, group='api')
self.CONF.set_override('aodh_is_enabled', None, group='api')
self.CONF.set_override('aodh_url', None, group='api')
self.CONF.set_override('panko_is_enabled', None, group='api')
self.CONF.set_override('panko_url', None, group='api')
self.CONF.set_override('meter_dispatchers', ['database'])
self.ks = mock.Mock()
self.catalog = (self.ks.session.auth.get_access.
return_value.service_catalog)
self.catalog.url_for.side_effect = self._url_for
self.useFixture(fixtures.MockPatch(
'ceilometer.keystone_client.get_client', return_value=self.ks))
@staticmethod
def _url_for(service_type=None):
if service_type == 'metric':
return 'http://gnocchi/'
elif service_type == 'alarming':
return 'http://alarm-endpoint:8008/'
elif service_type == 'event':
return 'http://event-endpoint:8009/'
def _do_test_gnocchi_enabled_without_database_backend(self):
for endpoint in ['meters', 'samples', 'resources']:
response = self.app.get(self.PATH_PREFIX + '/' + endpoint,
status=410)
self.assertIn(b'Gnocchi API', response.body)
response = self.post_json('/query/samples',
params={
"filter": '{"=": {"type": "creation"}}',
"orderby": '[{"timestamp": "DESC"}]',
"limit": 3
}, status=410)
self.assertIn(b'Gnocchi API', response.body)
sample_params = {
"counter_type": "gauge",
"counter_name": "fake_counter",
"resource_id": "fake_resource_id",
"counter_unit": "fake_unit",
"counter_volume": "1"
}
self.post_json('/meters/fake_counter',
params=[sample_params],
status=201)
response = self.post_json('/meters/fake_counter?direct=1',
params=[sample_params],
status=400)
self.assertIn(b'direct option cannot be true when Gnocchi is enabled',
response.body)
def _do_test_alarm_redirect(self):
response = self.app.get(self.PATH_PREFIX + '/alarms',
expect_errors=True)
self.assertEqual(308, response.status_code)
self.assertEqual("http://alarm-endpoint:8008/v2/alarms",
response.headers['Location'])
response = self.app.get(self.PATH_PREFIX + '/alarms/uuid',
expect_errors=True)
self.assertEqual(308, response.status_code)
self.assertEqual("http://alarm-endpoint:8008/v2/alarms/uuid",
response.headers['Location'])
response = self.app.delete(self.PATH_PREFIX + '/alarms/uuid',
expect_errors=True)
self.assertEqual(308, response.status_code)
self.assertEqual("http://alarm-endpoint:8008/v2/alarms/uuid",
response.headers['Location'])
response = self.post_json('/query/alarms',
params={
"filter": '{"=": {"type": "creation"}}',
"orderby": '[{"timestamp": "DESC"}]',
"limit": 3
}, status=308)
self.assertEqual("http://alarm-endpoint:8008/v2/query/alarms",
response.headers['Location'])
def _do_test_event_redirect(self):
response = self.app.get(self.PATH_PREFIX + '/events',
expect_errors=True)
self.assertEqual(308, response.status_code)
self.assertEqual("http://event-endpoint:8009/v2/events",
response.headers['Location'])
response = self.app.get(self.PATH_PREFIX + '/events/uuid',
expect_errors=True)
self.assertEqual(308, response.status_code)
self.assertEqual("http://event-endpoint:8009/v2/events/uuid",
response.headers['Location'])
response = self.app.delete(self.PATH_PREFIX + '/events/uuid',
expect_errors=True)
self.assertEqual(308, response.status_code)
self.assertEqual("http://event-endpoint:8009/v2/events/uuid",
response.headers['Location'])
response = self.app.get(self.PATH_PREFIX + '/event_types',
expect_errors=True)
self.assertEqual(308, response.status_code)
self.assertEqual("http://event-endpoint:8009/v2/event_types",
response.headers['Location'])
def test_gnocchi_enabled_without_database_backend_keystone(self):
self._setup_keystone_mock()
self._do_test_gnocchi_enabled_without_database_backend()
self.catalog.url_for.assert_has_calls(
[mock.call(service_type="metric")])
def test_gnocchi_enabled_without_database_backend_configoptions(self):
self._setup_osloconfig_options()
self._do_test_gnocchi_enabled_without_database_backend()
def test_alarm_redirect_keystone(self):
self._setup_keystone_mock()
self._do_test_alarm_redirect()
self.catalog.url_for.assert_has_calls(
[mock.call(service_type="alarming")])
def test_event_redirect_keystone(self):
self._setup_keystone_mock()
self._do_test_event_redirect()
self.catalog.url_for.assert_has_calls(
[mock.call(service_type="event")])
def test_alarm_redirect_configoptions(self):
self._setup_osloconfig_options()
self._do_test_alarm_redirect()
def test_event_redirect_configoptions(self):
self._setup_osloconfig_options()
self._do_test_event_redirect()

View File

@ -1,98 +0,0 @@
#
# Copyright 2013 IBM Corp.
# Copyright 2013 Julien Danjou
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test basic ceilometer-api app
"""
from ceilometer.tests.functional.api import v2
class TestPecanApp(v2.FunctionalTest):
def test_pecan_extension_guessing_unset(self):
# check Pecan does not assume .jpg is an extension
response = self.app.get(self.PATH_PREFIX + '/meters/meter.jpg')
self.assertEqual('application/json', response.content_type)
class TestApiMiddleware(v2.FunctionalTest):
no_lang_translated_error = 'No lang translated error'
en_US_translated_error = 'en-US translated error'
def _fake_translate(self, message, user_locale):
if user_locale is None:
return self.no_lang_translated_error
else:
return self.en_US_translated_error
def test_json_parsable_error_middleware_404(self):
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/json"}
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertTrue(response.json['error_message'])
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/json,application/xml"}
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertTrue(response.json['error_message'])
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/xml;q=0.8, \
application/json"}
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertTrue(response.json['error_message'])
response = self.get_json('/invalid_path',
expect_errors=True
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertTrue(response.json['error_message'])
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"text/html,*/*"}
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertTrue(response.json['error_message'])
def test_xml_parsable_error_middleware_404(self):
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/xml,*/*"}
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/xml", response.content_type)
self.assertEqual('error_message', response.xml.tag)
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/json;q=0.8 \
,application/xml"}
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/xml", response.content_type)
self.assertEqual('error_message', response.xml.tag)

View File

@ -1,30 +0,0 @@
#
# Copyright Ericsson AB 2014. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ceilometer.tests.functional.api import v2 as tests_api
class TestCapabilitiesController(tests_api.FunctionalTest):
def setUp(self):
super(TestCapabilitiesController, self).setUp()
self.url = '/capabilities'
def test_capabilities(self):
data = self.get_json(self.url)
# check that capabilities data contains both 'api' and 'storage' fields
self.assertIsNotNone(data)
self.assertNotEqual({}, data)
self.assertIn('api', data)
self.assertIn('storage', data)

View File

@ -1,312 +0,0 @@
#
# Copyright Ericsson AB 2013. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests complex queries for samples
"""
import datetime
from oslo_utils import timeutils
from ceilometer.publisher import utils
from ceilometer import sample
from ceilometer.tests.functional.api import v2 as tests_api
admin_header = {"X-Roles": "admin",
"X-Project-Id":
"project-id1"}
non_admin_header = {"X-Roles": "Member",
"X-Project-Id":
"project-id1"}
class TestQueryMetersController(tests_api.FunctionalTest):
def setUp(self):
super(TestQueryMetersController, self).setUp()
self.url = '/query/samples'
for cnt in [
sample.Sample('meter.test',
'cumulative',
'',
1,
'user-id1',
'project-id1',
'resource-id1',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server1',
'tag': 'self.sample',
'size': 456,
'util': 0.25,
'is_public': True},
source='test_source'),
sample.Sample('meter.test',
'cumulative',
'',
2,
'user-id2',
'project-id2',
'resource-id2',
timestamp=datetime.datetime(2012, 7, 2, 10, 41),
resource_metadata={'display_name': 'test-server2',
'tag': 'self.sample',
'size': 123,
'util': 0.75,
'is_public': True},
source='test_source'),
sample.Sample('meter.test',
'cumulative',
'',
3,
'user-id3',
'project-id3',
'resource-id3',
timestamp=datetime.datetime(2012, 7, 2, 10, 42),
resource_metadata={'display_name': 'test-server3',
'tag': 'self.sample',
'size': 789,
'util': 0.95,
'is_public': True},
source='test_source')]:
msg = utils.meter_message_from_counter(
cnt, self.CONF.publisher.telemetry_secret)
self.conn.record_metering_data(msg)
def test_query_fields_are_optional(self):
data = self.post_json(self.url, params={})
self.assertEqual(3, len(data.json))
def test_query_with_isotime(self):
date_time = datetime.datetime(2012, 7, 2, 10, 41)
isotime = date_time.isoformat()
data = self.post_json(self.url,
params={"filter":
'{">=": {"timestamp": "'
+ isotime + '"}}'})
self.assertEqual(2, len(data.json))
for sample_item in data.json:
result_time = timeutils.parse_isotime(sample_item['timestamp'])
result_time = result_time.replace(tzinfo=None)
self.assertGreaterEqual(result_time, date_time)
def test_non_admin_tenant_sees_only_its_own_project(self):
data = self.post_json(self.url,
params={},
headers=non_admin_header)
for sample_item in data.json:
self.assertEqual("project-id1", sample_item['project_id'])
def test_non_admin_tenant_cannot_query_others_project(self):
data = self.post_json(self.url,
params={"filter":
'{"=": {"project_id": "project-id2"}}'},
expect_errors=True,
headers=non_admin_header)
self.assertEqual(401, data.status_int)
self.assertIn(b"Not Authorized to access project project-id2",
data.body)
def test_non_admin_tenant_can_explicitly_filter_for_own_project(self):
data = self.post_json(self.url,
params={"filter":
'{"=": {"project_id": "project-id1"}}'},
headers=non_admin_header)
for sample_item in data.json:
self.assertEqual("project-id1", sample_item['project_id'])
def test_admin_tenant_sees_every_project(self):
data = self.post_json(self.url,
params={},
headers=admin_header)
self.assertEqual(3, len(data.json))
for sample_item in data.json:
self.assertIn(sample_item['project_id'],
(["project-id1", "project-id2", "project-id3"]))
def test_admin_tenant_sees_every_project_with_complex_filter(self):
filter = ('{"OR": ' +
'[{"=": {"project_id": "project-id1"}}, ' +
'{"=": {"project_id": "project-id2"}}]}')
data = self.post_json(self.url,
params={"filter": filter},
headers=admin_header)
self.assertEqual(2, len(data.json))
for sample_item in data.json:
self.assertIn(sample_item['project_id'],
(["project-id1", "project-id2"]))
def test_admin_tenant_sees_every_project_with_in_filter(self):
filter = ('{"In": ' +
'{"project_id": ["project-id1", "project-id2"]}}')
data = self.post_json(self.url,
params={"filter": filter},
headers=admin_header)
self.assertEqual(2, len(data.json))
for sample_item in data.json:
self.assertIn(sample_item['project_id'],
(["project-id1", "project-id2"]))
def test_admin_tenant_can_query_any_project(self):
data = self.post_json(self.url,
params={"filter":
'{"=": {"project_id": "project-id2"}}'},
headers=admin_header)
self.assertEqual(1, len(data.json))
for sample_item in data.json:
self.assertIn(sample_item['project_id'], set(["project-id2"]))
def test_query_with_orderby(self):
data = self.post_json(self.url,
params={"orderby": '[{"project_id": "DESC"}]'})
self.assertEqual(3, len(data.json))
self.assertEqual(["project-id3", "project-id2", "project-id1"],
[s["project_id"] for s in data.json])
def test_query_with_field_name_project(self):
data = self.post_json(self.url,
params={"filter":
'{"=": {"project": "project-id2"}}'})
self.assertEqual(1, len(data.json))
for sample_item in data.json:
self.assertIn(sample_item['project_id'], set(["project-id2"]))
def test_query_with_field_name_resource(self):
data = self.post_json(self.url,
params={"filter":
'{"=": {"resource": "resource-id2"}}'})
self.assertEqual(1, len(data.json))
for sample_item in data.json:
self.assertIn(sample_item['resource_id'], set(["resource-id2"]))
def test_query_with_wrong_field_name(self):
data = self.post_json(self.url,
params={"filter":
'{"=": {"unknown": "resource-id2"}}'},
expect_errors=True)
self.assertEqual(400, data.status_int)
self.assertIn(b"is not valid under any of the given schemas",
data.body)
def test_query_with_wrong_json(self):
data = self.post_json(self.url,
params={"filter":
'{"=": "resource": "resource-id2"}}'},
expect_errors=True)
self.assertEqual(400, data.status_int)
self.assertIn(b"Filter expression not valid", data.body)
def test_query_with_field_name_user(self):
data = self.post_json(self.url,
params={"filter":
'{"=": {"user": "user-id2"}}'})
self.assertEqual(1, len(data.json))
for sample_item in data.json:
self.assertIn(sample_item['user_id'], set(["user-id2"]))
def test_query_with_field_name_meter(self):
data = self.post_json(self.url,
params={"filter":
'{"=": {"meter": "meter.test"}}'})
self.assertEqual(3, len(data.json))
for sample_item in data.json:
self.assertIn(sample_item['meter'], set(["meter.test"]))
def test_query_with_lower_and_upper_case_orderby(self):
data = self.post_json(self.url,
params={"orderby": '[{"project_id": "DeSc"}]'})
self.assertEqual(3, len(data.json))
self.assertEqual(["project-id3", "project-id2", "project-id1"],
[s["project_id"] for s in data.json])
def test_query_with_user_field_name_orderby(self):
data = self.post_json(self.url,
params={"orderby": '[{"user": "aSc"}]'})
self.assertEqual(3, len(data.json))
self.assertEqual(["user-id1", "user-id2", "user-id3"],
[s["user_id"] for s in data.json])
def test_query_with_volume_field_name_orderby(self):
data = self.post_json(self.url,
params={"orderby": '[{"volume": "deSc"}]'})
self.assertEqual(3, len(data.json))
self.assertEqual([3, 2, 1],
[s["volume"] for s in data.json])
def test_query_with_missing_order_in_orderby(self):
data = self.post_json(self.url,
params={"orderby": '[{"project_id": ""}]'},
expect_errors=True)
self.assertEqual(400, data.status_int)
self.assertIn(b"does not match '(?i)^asc$|^desc$'", data.body)
def test_query_with_wrong_json_in_orderby(self):
data = self.post_json(self.url,
params={"orderby": '{"project_id": "desc"}]'},
expect_errors=True)
self.assertEqual(400, data.status_int)
self.assertIn(b"Order-by expression not valid: Extra data", data.body)
def test_filter_with_metadata(self):
data = self.post_json(self.url,
params={"filter":
'{">=": {"metadata.util": 0.5}}'})
self.assertEqual(2, len(data.json))
for sample_item in data.json:
self.assertGreaterEqual(float(sample_item["metadata"]["util"]),
0.5)
def test_filter_with_negation(self):
filter_expr = '{"not": {">=": {"metadata.util": 0.5}}}'
data = self.post_json(self.url,
params={"filter": filter_expr})
self.assertEqual(1, len(data.json))
for sample_item in data.json:
self.assertLess(float(sample_item["metadata"]["util"]), 0.5)
def test_limit_must_be_positive(self):
data = self.post_json(self.url,
params={"limit": 0},
expect_errors=True)
self.assertEqual(400, data.status_int)
self.assertIn(b"Limit must be positive", data.body)
def test_default_limit(self):
self.CONF.set_override('default_api_return_limit', 1, group='api')
data = self.post_json(self.url, params={})
self.assertEqual(1, len(data.json))

View File

@ -1,193 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test listing raw events.
"""
import datetime
import mock
from oslo_utils import timeutils
from ceilometer.storage import models
from ceilometer.tests.functional.api import v2
class TestComputeDurationByResource(v2.FunctionalTest):
def setUp(self):
super(TestComputeDurationByResource, self).setUp()
# Create events relative to the range and pretend
# that the intervening events exist.
self.early1 = datetime.datetime(2012, 8, 27, 7, 0)
self.early2 = datetime.datetime(2012, 8, 27, 17, 0)
self.start = datetime.datetime(2012, 8, 28, 0, 0)
self.middle1 = datetime.datetime(2012, 8, 28, 8, 0)
self.middle2 = datetime.datetime(2012, 8, 28, 18, 0)
self.end = datetime.datetime(2012, 8, 28, 23, 59)
self.late1 = datetime.datetime(2012, 8, 29, 9, 0)
self.late2 = datetime.datetime(2012, 8, 29, 19, 0)
def _patch_get_interval(self, start, end):
def get_interval(sample_filter, period, groupby, aggregate):
self.assertIsNotNone(sample_filter.start_timestamp)
self.assertIsNotNone(sample_filter.end_timestamp)
if (sample_filter.start_timestamp > end or
sample_filter.end_timestamp < start):
return []
duration_start = max(sample_filter.start_timestamp, start)
duration_end = min(sample_filter.end_timestamp, end)
duration = timeutils.delta_seconds(duration_start, duration_end)
return [
models.Statistics(
unit='',
min=0,
max=0,
avg=0,
sum=0,
count=0,
period=None,
period_start=None,
period_end=None,
duration=duration,
duration_start=duration_start,
duration_end=duration_end,
groupby=None,
)
]
return mock.patch.object(type(self.conn), 'get_meter_statistics',
side_effect=get_interval)
def _invoke_api(self):
return self.get_json('/meters/instance/statistics',
q=[{'field': 'timestamp',
'op': 'ge',
'value': self.start.isoformat()},
{'field': 'timestamp',
'op': 'le',
'value': self.end.isoformat()},
{'field': 'search_offset',
'value': 10}])
def test_before_range(self):
with self._patch_get_interval(self.early1, self.early2):
data = self._invoke_api()
self.assertEqual([], data)
def _assert_times_match(self, actual, expected):
if actual:
actual = timeutils.parse_isotime(actual)
actual = actual.replace(tzinfo=None)
self.assertEqual(expected, actual)
def test_overlap_range_start(self):
with self._patch_get_interval(self.early1, self.middle1):
data = self._invoke_api()
self._assert_times_match(data[0]['duration_start'], self.start)
self._assert_times_match(data[0]['duration_end'], self.middle1)
self.assertEqual(8 * 60 * 60, data[0]['duration'])
def test_within_range(self):
with self._patch_get_interval(self.middle1, self.middle2):
data = self._invoke_api()
self._assert_times_match(data[0]['duration_start'], self.middle1)
self._assert_times_match(data[0]['duration_end'], self.middle2)
self.assertEqual(10 * 60 * 60, data[0]['duration'])
def test_within_range_zero_duration(self):
with self._patch_get_interval(self.middle1, self.middle1):
data = self._invoke_api()
self._assert_times_match(data[0]['duration_start'], self.middle1)
self._assert_times_match(data[0]['duration_end'], self.middle1)
self.assertEqual(0, data[0]['duration'])
def test_overlap_range_end(self):
with self._patch_get_interval(self.middle2, self.late1):
data = self._invoke_api()
self._assert_times_match(data[0]['duration_start'], self.middle2)
self._assert_times_match(data[0]['duration_end'], self.end)
self.assertEqual(((6 * 60) - 1) * 60, data[0]['duration'])
def test_after_range(self):
with self._patch_get_interval(self.late1, self.late2):
data = self._invoke_api()
self.assertEqual([], data)
def test_without_end_timestamp(self):
statistics = [
models.Statistics(
unit=None,
count=0,
min=None,
max=None,
avg=None,
duration=None,
duration_start=self.late1,
duration_end=self.late2,
sum=0,
period=None,
period_start=None,
period_end=None,
groupby=None,
)
]
with mock.patch.object(type(self.conn), 'get_meter_statistics',
return_value=statistics):
data = self.get_json('/meters/instance/statistics',
q=[{'field': 'timestamp',
'op': 'ge',
'value': self.late1.isoformat()},
{'field': 'resource_id',
'value': 'resource-id'},
{'field': 'search_offset',
'value': 10}])
self._assert_times_match(data[0]['duration_start'], self.late1)
self._assert_times_match(data[0]['duration_end'], self.late2)
def test_without_start_timestamp(self):
statistics = [
models.Statistics(
unit=None,
count=0,
min=None,
max=None,
avg=None,
duration=None,
duration_start=self.early1,
duration_end=self.early2,
sum=0,
period=None,
period_start=None,
period_end=None,
groupby=None,
)
]
with mock.patch.object(type(self.conn), 'get_meter_statistics',
return_value=statistics):
data = self.get_json('/meters/instance/statistics',
q=[{'field': 'timestamp',
'op': 'le',
'value': self.early2.isoformat()},
{'field': 'resource_id',
'value': 'resource-id'},
{'field': 'search_offset',
'value': 10}])
self._assert_times_match(data[0]['duration_start'], self.early1)
self._assert_times_match(data[0]['duration_end'], self.early2)

View File

@ -1,797 +0,0 @@
#
# Copyright 2012 Red Hat, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test listing meters.
"""
import base64
import datetime
from oslo_serialization import jsonutils
import six
import webtest.app
from ceilometer.publisher import utils
from ceilometer import sample
from ceilometer.tests.functional.api import v2
class TestListEmptyMeters(v2.FunctionalTest):
def test_empty(self):
data = self.get_json('/meters')
self.assertEqual([], data)
class TestValidateUserInput(v2.FunctionalTest):
def test_list_meters_query_float_metadata(self):
self.assertRaises(webtest.app.AppError, self.get_json,
'/meters/meter.test',
q=[{'field': 'metadata.util',
'op': 'eq',
'value': '0.7.5',
'type': 'float'}])
self.assertRaises(webtest.app.AppError, self.get_json,
'/meters/meter.test',
q=[{'field': 'metadata.util',
'op': 'eq',
'value': 'abacaba',
'type': 'boolean'}])
self.assertRaises(webtest.app.AppError, self.get_json,
'/meters/meter.test',
q=[{'field': 'metadata.util',
'op': 'eq',
'value': '45.765',
'type': 'integer'}])
class TestListMetersRestriction(v2.FunctionalTest):
def setUp(self):
super(TestListMetersRestriction, self).setUp()
self.CONF.set_override('default_api_return_limit', 3, group='api')
for x in range(5):
for i in range(5):
s = sample.Sample(
'volume.size%s' % x,
'gauge',
'GiB',
5 + i,
'user-id',
'project1',
'resource-id',
timestamp=(datetime.datetime(2012, 9, 25, 10, 30) +
datetime.timedelta(seconds=i)),
resource_metadata={'display_name': 'test-volume',
'tag': 'self.sample',
},
source='source1',
)
msg = utils.meter_message_from_counter(
s, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
def test_meter_limit(self):
data = self.get_json('/meters?limit=1')
self.assertEqual(1, len(data))
def test_meter_limit_negative(self):
self.assertRaises(webtest.app.AppError,
self.get_json,
'/meters?limit=-2')
def test_meter_limit_bigger(self):
data = self.get_json('/meters?limit=42')
self.assertEqual(5, len(data))
def test_meter_default_limit(self):
data = self.get_json('/meters')
self.assertEqual(3, len(data))
def test_old_sample_limit(self):
data = self.get_json('/meters/volume.size0?limit=1')
self.assertEqual(1, len(data))
def test_old_sample_limit_negative(self):
self.assertRaises(webtest.app.AppError,
self.get_json,
'/meters/volume.size0?limit=-2')
def test_old_sample_limit_bigger(self):
data = self.get_json('/meters/volume.size0?limit=42')
self.assertEqual(5, len(data))
def test_old_sample_default_limit(self):
data = self.get_json('/meters/volume.size0')
self.assertEqual(3, len(data))
def test_sample_limit(self):
data = self.get_json('/samples?limit=1')
self.assertEqual(1, len(data))
def test_sample_limit_negative(self):
self.assertRaises(webtest.app.AppError,
self.get_json,
'/samples?limit=-2')
def test_sample_limit_bigger(self):
data = self.get_json('/samples?limit=42')
self.assertEqual(25, len(data))
def test_sample_default_limit(self):
data = self.get_json('/samples')
self.assertEqual(3, len(data))
class TestListMeters(v2.FunctionalTest):
def setUp(self):
super(TestListMeters, self).setUp()
self.messages = []
for cnt in [
sample.Sample(
'meter.test',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample',
'size': 123,
'util': 0.75,
'is_public': True},
source='test_source'),
sample.Sample(
'meter.test',
'cumulative',
'',
3,
'user-id',
'project-id',
'resource-id',
timestamp=datetime.datetime(2012, 7, 2, 11, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample1',
'size': 0,
'util': 0.47,
'is_public': False},
source='test_source'),
sample.Sample(
'meter.mine',
'gauge',
'',
1,
'user-id',
'project-id',
'resource-id2',
timestamp=datetime.datetime(2012, 7, 2, 10, 41),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample2',
'size': 456,
'util': 0.64,
'is_public': False},
source='test_source'),
sample.Sample(
'meter.test',
'cumulative',
'',
1,
'user-id2',
'project-id2',
'resource-id3',
timestamp=datetime.datetime(2012, 7, 2, 10, 42),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample3',
'size': 0,
'util': 0.75,
'is_public': False},
source='test_source'),
sample.Sample(
'meter.test.new',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample3',
'size': 0,
'util': 0.75,
'is_public': False},
source='test_source'),
sample.Sample(
'meter.mine',
'gauge',
'',
1,
'user-id4',
'project-id2',
'resource-id4',
timestamp=datetime.datetime(2012, 7, 2, 10, 43),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample4',
'properties': {
'prop_1': 'prop_value',
'prop_2': {'sub_prop_1':
'sub_prop_value'},
'prop.3': {'$sub_prop.2':
'sub_prop_value2'}
},
'size': 0,
'util': 0.58,
'is_public': True},
source='test_source1'),
sample.Sample(
u'meter.accent\xe9\u0437',
'gauge',
'',
1,
'user-id4',
'project-id2',
'resource-id4',
timestamp=datetime.datetime(2014, 7, 2, 10, 43),
resource_metadata={},
source='test_source1')]:
msg = utils.meter_message_from_counter(
cnt, self.CONF.publisher.telemetry_secret)
self.messages.append(msg)
self.conn.record_metering_data(msg)
def test_list_meters(self):
data = self.get_json('/meters')
self.assertEqual(6, len(data))
self.assertEqual(set(['resource-id',
'resource-id2',
'resource-id3',
'resource-id4']),
set(r['resource_id'] for r in data))
self.assertEqual(set(['meter.test', 'meter.mine', 'meter.test.new',
u'meter.accent\xe9\u0437']),
set(r['name'] for r in data))
self.assertEqual(set(['test_source', 'test_source1']),
set(r['source'] for r in data))
def test_list_unique_meters(self):
data = self.get_json('/meters?unique=True')
self.assertEqual(4, len(data))
self.assertEqual(set(['meter.test', 'meter.mine', 'meter.test.new',
u'meter.accent\xe9\u0437']),
set(r['name'] for r in data))
def test_meters_query_with_timestamp(self):
date_time = datetime.datetime(2012, 7, 2, 10, 41)
isotime = date_time.isoformat()
resp = self.get_json('/meters',
q=[{'field': 'timestamp',
'op': 'gt',
'value': isotime}],
expect_errors=True)
self.assertEqual(400, resp.status_code)
self.assertEqual('Unknown argument: "timestamp": '
'not valid for this resource',
jsonutils.loads(resp.body)['error_message']
['faultstring'])
def test_list_samples(self):
data = self.get_json('/samples')
self.assertEqual(7, len(data))
def test_query_samples_with_invalid_field_name_and_non_eq_operator(self):
resp = self.get_json('/samples',
q=[{'field': 'non_valid_field_name',
'op': 'gt',
'value': 3}],
expect_errors=True)
resp_string = jsonutils.loads(resp.body)
fault_string = resp_string['error_message']['faultstring']
msg = ('Unknown argument: "non_valid_field_name"'
': unrecognized field in query: '
'[<Query {key!r} '
'gt {value!r} ')
msg = msg.format(key=u'non_valid_field_name', value=u'3')
self.assertEqual(400, resp.status_code)
self.assertTrue(fault_string.startswith(msg))
def test_query_samples_with_invalid_field_name_and_eq_operator(self):
resp = self.get_json('/samples',
q=[{'field': 'non_valid_field_name',
'op': 'eq',
'value': 3}],
expect_errors=True)
resp_string = jsonutils.loads(resp.body)
fault_string = resp_string['error_message']['faultstring']
msg = ('Unknown argument: "non_valid_field_name"'
': unrecognized field in query: '
'[<Query {key!r} eq {value!r} ')
msg = msg.format(key=u'non_valid_field_name', value=u'3')
self.assertEqual(400, resp.status_code)
self.assertTrue(fault_string.startswith(msg))
def test_query_samples_with_invalid_operator_and_valid_field_name(self):
resp = self.get_json('/samples',
q=[{'field': 'project_id',
'op': 'lt',
'value': '3'}],
expect_errors=True)
resp_string = jsonutils.loads(resp.body)
fault_string = resp_string['error_message']['faultstring']
expected_error_message = ("Invalid input for field/attribute op. " +
"Value: 'lt'. unimplemented operator for" +
" project_id")
self.assertEqual(400, resp.status_code)
self.assertEqual(fault_string, expected_error_message)
def test_list_meters_query_wrong_type_metadata(self):
resp = self.get_json('/meters/meter.test',
q=[{'field': 'metadata.size',
'op': 'eq',
'value': '0',
'type': 'blob'}],
expect_errors=True
)
expected_error_message = 'The data type blob is not supported.'
resp_string = jsonutils.loads(resp.body)
fault_string = resp_string['error_message']['faultstring']
self.assertTrue(fault_string.startswith(expected_error_message))
def test_query_samples_with_search_offset(self):
resp = self.get_json('/samples',
q=[{'field': 'search_offset',
'op': 'eq',
'value': 42}],
expect_errors=True)
self.assertEqual(400, resp.status_code)
self.assertEqual("Invalid input for field/attribute field. "
"Value: 'search_offset'. "
"search_offset cannot be used without timestamp",
jsonutils.loads(resp.body)['error_message']
['faultstring'])
def test_list_meters_with_dict_metadata(self):
data = self.get_json('/meters/meter.mine',
q=[{'field':
'metadata.properties.prop_2.sub_prop_1',
'op': 'eq',
'value': 'sub_prop_value',
}])
self.assertEqual(1, len(data))
self.assertEqual('resource-id4', data[0]['resource_id'])
metadata = data[0]['resource_metadata']
self.assertIsNotNone(metadata)
self.assertEqual('self.sample4', metadata['tag'])
self.assertEqual('prop_value', metadata['properties.prop_1'])
def test_list_meters_with_dict_metadata_with_dot_dollar_in_key(self):
data = self.get_json('/meters/meter.mine',
q=[{'field':
'metadata.properties.prop.3.$sub_prop.2',
'op': 'eq',
'value': 'sub_prop_value2',
}])
self.assertEqual(1, len(data))
self.assertEqual('resource-id4', data[0]['resource_id'])
metadata = data[0]['resource_metadata']
self.assertIsNotNone(metadata)
self.assertEqual('self.sample4', metadata['tag'])
self.assertEqual('prop_value', metadata['properties.prop_1'])
self.assertEqual('sub_prop_value',
metadata['properties.prop_2:sub_prop_1'])
def test_get_one_sample(self):
sample_id = self.messages[1]['message_id']
data = self.get_json('/samples/%s' % sample_id)
self.assertIn('id', data)
del data['recorded_at']
self.assertEqual({
u'id': sample_id,
u'metadata': {u'display_name': u'test-server',
u'is_public': u'False',
u'size': u'0',
u'tag': u'self.sample1',
u'util': u'0.47'},
u'meter': u'meter.test',
u'project_id': u'project-id',
u'resource_id': u'resource-id',
u'timestamp': u'2012-07-02T11:40:00',
u'type': u'cumulative',
u'unit': u'',
u'source': 'test_source',
u'user_id': u'user-id',
u'volume': 3.0}, data)
def test_get_not_existing_sample(self):
resp = self.get_json('/samples/not_exists', expect_errors=True,
status=404)
self.assertEqual("Sample not_exists Not Found",
jsonutils.loads(resp.body)['error_message']
['faultstring'])
def test_list_samples_with_dict_metadata(self):
data = self.get_json('/samples',
q=[{'field':
'metadata.properties.prop_2.sub_prop_1',
'op': 'eq',
'value': 'sub_prop_value',
}])
self.assertIn('id', data[0])
del data[0]['id'] # Randomly generated
del data[0]['recorded_at']
self.assertEqual([{
u'user_id': u'user-id4',
u'resource_id': u'resource-id4',
u'timestamp': u'2012-07-02T10:43:00',
u'meter': u'meter.mine',
u'volume': 1.0,
u'project_id': u'project-id2',
u'type': u'gauge',
u'unit': u'',
u'source': u'test_source1',
u'metadata': {
u'display_name': u'test-server',
u'properties.prop_2:sub_prop_1': u'sub_prop_value',
u'util': u'0.58',
u'tag': u'self.sample4',
u'properties.prop_1': u'prop_value',
u'is_public': u'True',
u'size': u'0',
u'properties.prop:3:$sub_prop:2': u'sub_prop_value2',
}
}], data)
def test_list_with_field_metaquery(self):
def _helper(url):
resp = self.get_json(url,
q=[{'field':
'metaquery',
'op': 'eq',
'value': 'cow',
}],
expect_errors=True)
self.assertEqual(400, resp.status_code)
expected = ('Unknown argument: "metaquery": '
'unrecognized field in query')
self.assertIn(expected, resp.json['error_message']['faultstring'])
_helper('/samples')
_helper('/meters/meter.test')
def test_list_meters_metadata_query(self):
data = self.get_json('/meters/meter.test',
q=[{'field': 'metadata.tag',
'op': 'eq',
'value': 'self.sample1',
}],)
self.assertEqual(1, len(data))
self.assertEqual(set(['resource-id']),
set(r['resource_id'] for r in data))
self.assertEqual(set(['meter.test']),
set(r['counter_name'] for r in data))
def test_list_meters_resource_metadata_query(self):
# NOTE(jd) Same test as above, but with the alias resource_metadata
# as query field
data = self.get_json('/meters/meter.test',
q=[{'field': 'resource_metadata.tag',
'op': 'eq',
'value': 'self.sample1',
}],)
self.assertEqual(1, len(data))
self.assertEqual(set(['resource-id']),
set(r['resource_id'] for r in data))
self.assertEqual(set(['meter.test']),
set(r['counter_name'] for r in data))
def test_list_meters_multi_metadata_query(self):
data = self.get_json('/meters/meter.test',
q=[{'field': 'metadata.tag',
'op': 'eq',
'value': 'self.sample1',
},
{'field': 'metadata.display_name',
'op': 'eq',
'value': 'test-server',
}],)
self.assertEqual(1, len(data))
self.assertEqual(set(['resource-id']),
set(r['resource_id'] for r in data))
self.assertEqual(set(['meter.test']),
set(r['counter_name'] for r in data))
def test_list_meters_query_integer_metadata(self):
data = self.get_json('/meters/meter.test',
q=[{'field': 'metadata.size',
'op': 'eq',
'value': '0',
'type': 'integer'}]
)
self.assertEqual(2, len(data))
self.assertEqual(set(['resource-id',
'resource-id3']),
set(r['resource_id'] for r in data))
self.assertEqual(set(['meter.test']),
set(r['counter_name'] for r in data))
self.assertEqual(set(['0']),
set(r['resource_metadata']['size'] for r in data))
def test_list_meters_query_float_metadata(self):
data = self.get_json('/meters/meter.test',
q=[{'field': 'metadata.util',
'op': 'eq',
'value': '0.75',
'type': 'float'}]
)
self.assertEqual(2, len(data))
self.assertEqual(set(['resource-id',
'resource-id3']),
set(r['resource_id'] for r in data))
self.assertEqual(set(['meter.test']),
set(r['counter_name'] for r in data))
self.assertEqual(set(['0.75']),
set(r['resource_metadata']['util'] for r in data))
def test_list_meters_query_boolean_metadata(self):
data = self.get_json('/meters/meter.mine',
q=[{'field': 'metadata.is_public',
'op': 'eq',
'value': 'False',
'type': 'boolean'}]
)
self.assertEqual(1, len(data))
self.assertEqual(set(['resource-id2']),
set(r['resource_id'] for r in data))
self.assertEqual(set(['meter.mine']),
set(r['counter_name'] for r in data))
self.assertEqual(set(['False']),
set(r['resource_metadata']['is_public']
for r in data))
def test_list_meters_query_string_metadata(self):
data = self.get_json('/meters/meter.test',
q=[{'field': 'metadata.tag',
'op': 'eq',
'value': 'self.sample'}]
)
self.assertEqual(1, len(data))
self.assertEqual(set(['resource-id']),
set(r['resource_id'] for r in data))
self.assertEqual(set(['meter.test']),
set(r['counter_name'] for r in data))
self.assertEqual(set(['self.sample']),
set(r['resource_metadata']['tag'] for r in data))
def test_list_meters_query_integer_float_metadata_without_type(self):
data = self.get_json('/meters/meter.test',
q=[{'field': 'metadata.size',
'op': 'eq',
'value': '0'},
{'field': 'metadata.util',
'op': 'eq',
'value': '0.75'}]
)
self.assertEqual(1, len(data))
self.assertEqual(set(['resource-id3']),
set(r['resource_id'] for r in data))
self.assertEqual(set(['meter.test']),
set(r['counter_name'] for r in data))
self.assertEqual(set(['0']),
set(r['resource_metadata']['size'] for r in data))
self.assertEqual(set(['0.75']),
set(r['resource_metadata']['util'] for r in data))
def test_with_resource(self):
data = self.get_json('/meters', q=[{'field': 'resource_id',
'value': 'resource-id',
}])
nids = set(r['name'] for r in data)
self.assertEqual(set(['meter.test', 'meter.test.new']), nids)
sids = set(r['source'] for r in data)
self.assertEqual(set(['test_source']), sids)
def test_with_resource_and_source(self):
data = self.get_json('/meters', q=[{'field': 'resource_id',
'value': 'resource-id4',
},
{'field': 'source',
'value': 'test_source1',
}])
nids = set(r['name'] for r in data)
self.assertEqual(set(['meter.mine', u'meter.accent\xe9\u0437']), nids)
sids = set(r['source'] for r in data)
self.assertEqual(set(['test_source1']), sids)
def test_with_resource_and_metadata_query(self):
data = self.get_json('/meters/meter.mine',
q=[{'field': 'resource_id',
'op': 'eq',
'value': 'resource-id2',
},
{'field': 'metadata.tag',
'op': 'eq',
'value': 'self.sample2',
}])
self.assertEqual(1, len(data))
self.assertEqual(set(['resource-id2']),
set(r['resource_id'] for r in data))
self.assertEqual(set(['meter.mine']),
set(r['counter_name'] for r in data))
def test_with_source(self):
data = self.get_json('/meters', q=[{'field': 'source',
'value': 'test_source',
}])
rids = set(r['resource_id'] for r in data)
self.assertEqual(set(['resource-id',
'resource-id2',
'resource-id3']), rids)
sids = set(r['source'] for r in data)
self.assertEqual(set(['test_source']), sids)
def test_with_source_and_metadata_query(self):
data = self.get_json('/meters/meter.mine',
q=[{'field': 'source',
'op': 'eq',
'value': 'test_source',
},
{'field': 'metadata.tag',
'op': 'eq',
'value': 'self.sample2',
}])
self.assertEqual(1, len(data))
self.assertEqual(set(['test_source']),
set(r['source'] for r in data))
self.assertEqual(set(['meter.mine']),
set(r['counter_name'] for r in data))
def test_with_source_non_existent(self):
data = self.get_json('/meters',
q=[{'field': 'source',
'value': 'test_source_doesnt_exist',
}],
)
self.assertIsEmpty(data)
def test_with_user(self):
data = self.get_json('/meters',
q=[{'field': 'user_id',
'value': 'user-id',
}],
)
uids = set(r['user_id'] for r in data)
self.assertEqual(set(['user-id']), uids)
nids = set(r['name'] for r in data)
self.assertEqual(set(['meter.mine', 'meter.test', 'meter.test.new']),
nids)
rids = set(r['resource_id'] for r in data)
self.assertEqual(set(['resource-id', 'resource-id2']), rids)
sids = set(r['source'] for r in data)
self.assertEqual(set(['test_source']), sids)
def test_with_user_and_source(self):
data = self.get_json('/meters',
q=[{'field': 'user_id',
'value': 'user-id4',
},
{'field': 'source',
'value': 'test_source1',
}],
)
uids = set(r['user_id'] for r in data)
self.assertEqual(set(['user-id4']), uids)
sids = set(r['source'] for r in data)
self.assertEqual(set(['test_source1']), sids)
def test_with_user_and_metadata_query(self):
data = self.get_json('/meters/meter.test',
q=[{'field': 'user_id',
'op': 'eq',
'value': 'user-id',
},
{'field': 'metadata.tag',
'op': 'eq',
'value': 'self.sample1',
}])
self.assertEqual(1, len(data))
self.assertEqual(set(['user-id']), set(r['user_id'] for r in data))
self.assertEqual(set(['meter.test']),
set(r['counter_name'] for r in data))
def test_with_user_non_existent(self):
data = self.get_json('/meters',
q=[{'field': 'user_id',
'value': 'user-id-foobar123',
}],
)
self.assertEqual([], data)
def test_with_project(self):
data = self.get_json('/meters',
q=[{'field': 'project_id',
'value': 'project-id2',
}],
)
rids = set(r['resource_id'] for r in data)
self.assertEqual(set(['resource-id3', 'resource-id4']), rids)
sids = set(r['source'] for r in data)
self.assertEqual(set(['test_source', 'test_source1']), sids)
def test_with_project_and_source(self):
data = self.get_json('/meters',
q=[{'field': 'project_id',
'value': 'project-id2',
},
{'field': 'source',
'value': 'test_source1',
}],
)
rids = set(r['resource_id'] for r in data)
self.assertEqual(set(['resource-id4']), rids)
sids = set(r['source'] for r in data)
self.assertEqual(set(['test_source1']), sids)
def test_with_project_and_metadata_query(self):
data = self.get_json('/meters/meter.test',
q=[{'field': 'project_id',
'op': 'eq',
'value': 'project-id',
},
{'field': 'metadata.tag',
'op': 'eq',
'value': 'self.sample1',
}])
self.assertEqual(1, len(data))
self.assertEqual(set(['project-id']),
set(r['project_id'] for r in data))
self.assertEqual(set(['meter.test']),
set(r['counter_name'] for r in data))
def test_with_project_non_existent(self):
data = self.get_json('/meters',
q=[{'field': 'project_id',
'value': 'jd-was-here',
}],
)
self.assertEqual([], data)
def test_list_meters_meter_id(self):
data = self.get_json('/meters')
for i in data:
meter_id = '%s+%s' % (i['resource_id'], i['name'])
expected = base64.b64encode(meter_id.encode('utf-8'))
if six.PY3:
expected = expected.decode('ascii')
self.assertEqual(expected, i['meter_id'])
self.assertFalse(i['meter_id'].endswith('\n'))
self.assertEqual(
[i['resource_id'], i['name']],
base64.b64decode(i['meter_id']).decode('utf-8').split('+'))

View File

@ -1,586 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test listing resources.
"""
import datetime
import json
import six
import webtest.app
from ceilometer.publisher import utils
from ceilometer import sample
from ceilometer.tests.functional.api import v2
class TestListResources(v2.FunctionalTest):
def test_empty(self):
data = self.get_json('/resources')
self.assertEqual([], data)
def _verify_resource_timestamps(self, res, first, last):
# Bounds need not be tight (see ceilometer bug #1288372)
self.assertIn('first_sample_timestamp', res)
self.assertGreaterEqual(first.isoformat(),
res['first_sample_timestamp'])
self.assertIn('last_sample_timestamp', res)
self.assertLessEqual(last.isoformat(), res['last_sample_timestamp'])
def test_instance_no_metadata(self):
timestamp = datetime.datetime(2012, 7, 2, 10, 40)
sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id',
timestamp=timestamp,
resource_metadata=None,
source='test',
)
msg = utils.meter_message_from_counter(
sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
data = self.get_json('/resources')
self.assertEqual(1, len(data))
self._verify_resource_timestamps(data[0], timestamp, timestamp)
def test_instances(self):
timestamps = {
'resource-id': datetime.datetime(2012, 7, 2, 10, 40),
'resource-id-alternate': datetime.datetime(2012, 7, 2, 10, 41),
}
sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id',
timestamp=timestamps['resource-id'],
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample',
},
source='test',
)
msg = utils.meter_message_from_counter(
sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
sample2 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id-alternate',
timestamp=timestamps['resource-id-alternate'],
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample2',
},
source='test',
)
msg2 = utils.meter_message_from_counter(
sample2, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg2)
data = self.get_json('/resources')
self.assertEqual(2, len(data))
for res in data:
timestamp = timestamps.get(res['resource_id'])
self._verify_resource_timestamps(res, timestamp, timestamp)
def test_instance_multiple_samples(self):
timestamps = [
datetime.datetime(2012, 7, 2, 10, 41),
datetime.datetime(2012, 7, 2, 10, 42),
datetime.datetime(2012, 7, 2, 10, 40),
]
for timestamp in timestamps:
datapoint = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id',
timestamp=timestamp,
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample-%s' % timestamp,
},
source='test',
)
msg = utils.meter_message_from_counter(
datapoint,
self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
data = self.get_json('/resources')
self.assertEqual(1, len(data))
self._verify_resource_timestamps(data[0],
timestamps[-1], timestamps[1])
def test_instances_one(self):
sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample',
},
source='test',
)
msg = utils.meter_message_from_counter(
sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
sample2 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id-alternate',
timestamp=datetime.datetime(2012, 7, 2, 10, 41),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample2',
},
source='test',
)
msg2 = utils.meter_message_from_counter(
sample2, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg2)
data = self.get_json('/resources/resource-id')
self.assertEqual('resource-id', data['resource_id'])
def test_with_source(self):
sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample',
},
source='test_list_resources',
)
msg = utils.meter_message_from_counter(
sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
sample2 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id2',
'project-id',
'resource-id-alternate',
timestamp=datetime.datetime(2012, 7, 2, 10, 41),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample2',
},
source='not-test',
)
msg2 = utils.meter_message_from_counter(
sample2, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg2)
data = self.get_json('/resources', q=[{'field': 'source',
'value': 'test_list_resources',
}])
ids = [r['resource_id'] for r in data]
self.assertEqual(['resource-id'], ids)
sources = [r['source'] for r in data]
self.assertEqual(['test_list_resources'], sources)
def test_resource_id_with_slash(self):
s = sample.Sample(
'storage.containers.objects',
'gauge',
'',
1,
'19fbed01c21f4912901057021b9e7111',
'45acc90399134206b3b41f3d3a0a06d6',
'29f809d9-88bb-4c40-b1ba-a77a1fcf8ceb/glance',
timestamp=datetime.datetime(2012, 7, 2, 10, 40).isoformat(),
resource_metadata={},
source='test_show_special_resource',
)
msg = utils.meter_message_from_counter(
s, self.CONF.publisher.telemetry_secret,
)
msg['timestamp'] = datetime.datetime(2012, 7, 2, 10, 40)
self.conn.record_metering_data(msg)
rid_encoded = '29f809d9-88bb-4c40-b1ba-a77a1fcf8ceb%252Fglance'
resp = self.get_json('/resources/%s' % rid_encoded)
self.assertEqual("19fbed01c21f4912901057021b9e7111", resp["user_id"])
self.assertEqual('29f809d9-88bb-4c40-b1ba-a77a1fcf8ceb/glance',
resp["resource_id"])
def test_with_invalid_resource_id(self):
sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id-1',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample',
},
source='test_list_resources',
)
msg = utils.meter_message_from_counter(
sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
sample2 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id2',
'project-id',
'resource-id-2',
timestamp=datetime.datetime(2012, 7, 2, 10, 41),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample2',
},
source='test_list_resources',
)
msg2 = utils.meter_message_from_counter(
sample2, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg2)
resp1 = self.get_json('/resources/resource-id-1')
self.assertEqual("resource-id-1", resp1["resource_id"])
resp2 = self.get_json('/resources/resource-id-2')
self.assertEqual("resource-id-2", resp2["resource_id"])
resp3 = self.get_json('/resources/resource-id-3', expect_errors=True)
self.assertEqual(404, resp3.status_code)
json_data = resp3.body
if six.PY3:
json_data = json_data.decode('utf-8')
self.assertEqual("Resource resource-id-3 Not Found",
json.loads(json_data)['error_message']
['faultstring'])
def test_with_user(self):
sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample',
},
source='test_list_resources',
)
msg = utils.meter_message_from_counter(
sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
sample2 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id2',
'project-id',
'resource-id-alternate',
timestamp=datetime.datetime(2012, 7, 2, 10, 41),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample2',
},
source='not-test',
)
msg2 = utils.meter_message_from_counter(
sample2, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg2)
data = self.get_json('/resources', q=[{'field': 'user_id',
'value': 'user-id',
}])
ids = [r['resource_id'] for r in data]
self.assertEqual(['resource-id'], ids)
def test_with_project(self):
sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample',
},
source='test_list_resources',
)
msg = utils.meter_message_from_counter(
sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
sample2 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id2',
'project-id2',
'resource-id-alternate',
timestamp=datetime.datetime(2012, 7, 2, 10, 41),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample2',
},
source='not-test',
)
msg2 = utils.meter_message_from_counter(
sample2, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg2)
data = self.get_json('/resources', q=[{'field': 'project_id',
'value': 'project-id',
}])
ids = [r['resource_id'] for r in data]
self.assertEqual(['resource-id'], ids)
def test_with_user_non_admin(self):
sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id2',
'project-id2',
'resource-id-alternate',
timestamp=datetime.datetime(2012, 7, 2, 10, 41),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample1',
},
source='not-test',
)
msg2 = utils.meter_message_from_counter(
sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg2)
data = self.get_json('/resources',
headers={"X-Roles": "Member",
"X-Project-Id": "project-id2"})
ids = set(r['resource_id'] for r in data)
self.assertEqual(set(['resource-id-alternate']), ids)
def test_with_user_wrong_tenant(self):
sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id2',
'project-id2',
'resource-id-alternate',
timestamp=datetime.datetime(2012, 7, 2, 10, 41),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample1',
},
source='not-test',
)
msg2 = utils.meter_message_from_counter(
sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg2)
data = self.get_json('/resources',
headers={"X-Roles": "Member",
"X-Project-Id": "project-wrong"})
ids = set(r['resource_id'] for r in data)
self.assertEqual(set(), ids)
def test_metadata(self):
sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample',
'dict_properties': {'key.$1': {'$key': 'val'}},
'not_ignored_list': ['returned'],
},
source='test',
)
msg = utils.meter_message_from_counter(
sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
data = self.get_json('/resources')
metadata = data[0]['metadata']
self.assertEqual([(u'dict_properties.key:$1:$key', u'val'),
(u'display_name', u'test-server'),
(u'not_ignored_list', u"['returned']"),
(u'tag', u'self.sample')],
list(sorted(six.iteritems(metadata))))
def test_resource_meter_links(self):
sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample',
},
source='test_list_resources',
)
msg = utils.meter_message_from_counter(
sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
data = self.get_json('/resources')
links = data[0]['links']
self.assertEqual(2, len(links))
self.assertEqual('self', links[0]['rel'])
self.assertIn((self.PATH_PREFIX + '/resources/resource-id'),
links[0]['href'])
self.assertEqual('instance', links[1]['rel'])
self.assertIn((self.PATH_PREFIX + '/meters/instance?'
'q.field=resource_id&q.value=resource-id'),
links[1]['href'])
def test_resource_skip_meter_links(self):
sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project-id',
'resource-id',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample',
},
source='test_list_resources',
)
msg = utils.meter_message_from_counter(
sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
data = self.get_json('/resources?meter_links=0')
links = data[0]['links']
self.assertEqual(len(links), 1)
self.assertEqual(links[0]['rel'], 'self')
self.assertIn((self.PATH_PREFIX + '/resources/resource-id'),
links[0]['href'])
class TestListResourcesRestriction(v2.FunctionalTest):
def setUp(self):
super(TestListResourcesRestriction, self).setUp()
self.CONF.set_override('default_api_return_limit', 10, group='api')
for i in range(20):
s = sample.Sample(
'volume.size',
'gauge',
'GiB',
5 + i,
'user-id',
'project1',
'resource-id%s' % i,
timestamp=(datetime.datetime(2012, 9, 25, 10, 30) +
datetime.timedelta(seconds=i)),
resource_metadata={'display_name': 'test-volume',
'tag': 'self.sample',
},
source='source1',
)
msg = utils.meter_message_from_counter(
s, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
def test_resource_limit(self):
data = self.get_json('/resources?limit=1')
self.assertEqual(1, len(data))
def test_resource_limit_negative(self):
self.assertRaises(webtest.app.AppError, self.get_json,
'/resources?limit=-2')
def test_resource_limit_bigger(self):
data = self.get_json('/resources?limit=42')
self.assertEqual(20, len(data))
def test_resource_default_limit(self):
data = self.get_json('/resources')
self.assertEqual(10, len(data))

View File

@ -1,156 +0,0 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test listing raw samples.
"""
import datetime
import mock
from oslo_utils import timeutils
import six
from ceilometer.publisher import utils
from ceilometer import sample
from ceilometer.tests.functional.api import v2
class TestListSamples(v2.FunctionalTest):
def setUp(self):
super(TestListSamples, self).setUp()
patcher = mock.patch.object(timeutils, 'utcnow')
self.addCleanup(patcher.stop)
self.mock_utcnow = patcher.start()
self.mock_utcnow.return_value = datetime.datetime(2014, 2, 11, 16, 42)
self.sample1 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id',
'project1',
'resource-id',
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample',
'dict_properties': {'key': 'value'},
'not_ignored_list': ['returned'],
},
source='test_source',
)
msg = utils.meter_message_from_counter(
self.sample1, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
self.sample2 = sample.Sample(
'instance',
'cumulative',
'',
1,
'user-id2',
'project2',
'resource-id-alternate',
timestamp=datetime.datetime(2012, 7, 2, 10, 41),
resource_metadata={'display_name': 'test-server',
'tag': 'self.sample2',
},
source='source2',
)
msg2 = utils.meter_message_from_counter(
self.sample2, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg2)
def test_all(self):
data = self.get_json('/meters/instance')
self.assertEqual(2, len(data))
for s in data:
self.assertEqual(timeutils.utcnow().isoformat(), s['recorded_at'])
def test_all_trailing_slash(self):
data = self.get_json('/meters/instance/')
self.assertEqual(2, len(data))
def test_empty_project(self):
data = self.get_json('/meters/instance',
q=[{'field': 'project_id',
'value': 'no-such-project',
}])
self.assertEqual([], data)
def test_by_project(self):
data = self.get_json('/meters/instance',
q=[{'field': 'project_id',
'value': 'project1',
}])
self.assertEqual(1, len(data))
def test_empty_resource(self):
data = self.get_json('/meters/instance',
q=[{'field': 'resource_id',
'value': 'no-such-resource',
}])
self.assertEqual([], data)
def test_by_resource(self):
data = self.get_json('/meters/instance',
q=[{'field': 'resource_id',
'value': 'resource-id',
}])
self.assertEqual(1, len(data))
def test_empty_source(self):
data = self.get_json('/meters/instance',
q=[{'field': 'source',
'value': 'no-such-source',
}])
self.assertEqual(0, len(data))
def test_by_source(self):
data = self.get_json('/meters/instance',
q=[{'field': 'source',
'value': 'test_source',
}])
self.assertEqual(1, len(data))
def test_empty_user(self):
data = self.get_json('/meters/instance',
q=[{'field': 'user_id',
'value': 'no-such-user',
}])
self.assertEqual([], data)
def test_by_user(self):
data = self.get_json('/meters/instance',
q=[{'field': 'user_id',
'value': 'user-id',
}])
self.assertEqual(1, len(data))
def test_metadata(self):
data = self.get_json('/meters/instance',
q=[{'field': 'resource_id',
'value': 'resource-id',
}])
sample = data[0]
self.assertIn('resource_metadata', sample)
self.assertEqual(
[('dict_properties.key', 'value'),
('display_name', 'test-server'),
('not_ignored_list', "['returned']"),
('tag', 'self.sample'),
],
list(sorted(six.iteritems(sample['resource_metadata']))))

View File

@ -1,374 +0,0 @@
#
# Copyright 2013 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test listing raw events.
"""
import copy
import datetime
import os
import fixtures
import mock
from oslo_utils import fileutils
from oslo_utils import timeutils
import six
from ceilometer.tests.functional.api import v2
class TestPostSamples(v2.FunctionalTest):
def fake_notifier_sample(self, ctxt, event_type, payload):
samples = payload['samples']
for m in samples:
del m['message_signature']
self.published.append(samples)
def _make_app(self, enable_acl=False):
content = ('{"context_is_project": "project_id:%(project_id)s",'
'"default" : "!",'
'"telemetry:create_samples": ""}')
if six.PY3:
content = content.encode('utf-8')
self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='policy',
suffix='.json')
self.CONF.set_override("policy_file", self.tempfile,
group='oslo_policy')
return super(TestPostSamples, self)._make_app()
def tearDown(self):
os.remove(self.tempfile)
super(TestPostSamples, self).tearDown()
def setUp(self):
self.published = []
notifier = mock.Mock()
notifier.sample.side_effect = self.fake_notifier_sample
self.useFixture(fixtures.MockPatch('oslo_messaging.Notifier',
return_value=notifier))
super(TestPostSamples, self).setUp()
def test_one(self):
s1 = [{'counter_name': 'apples',
'counter_type': 'gauge',
'counter_unit': 'instance',
'counter_volume': 1,
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
'project_id': '35b17138-b364-4e6a-a131-8f3099c5be68',
'user_id': 'efd87807-12d2-4b38-9c70-5f5c2ac427ff',
'resource_metadata': {'name1': 'value1',
'name2': 'value2'}}]
data = self.post_json('/meters/apples/', s1)
# timestamp not given so it is generated.
s1[0]['timestamp'] = data.json[0]['timestamp']
# Ignore message id that is randomly generated
s1[0]['message_id'] = data.json[0]['message_id']
# source is generated if not provided.
s1[0]['source'] = '%s:openstack' % s1[0]['project_id']
self.assertEqual(s1, data.json)
s1[0]["monotonic_time"] = None
self.assertEqual(s1[0], self.published[0][0])
def test_nested_metadata(self):
s1 = [{'counter_name': 'apples',
'counter_type': 'gauge',
'counter_unit': 'instance',
'counter_volume': 1,
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
'project_id': '35b17138-b364-4e6a-a131-8f3099c5be68',
'user_id': 'efd87807-12d2-4b38-9c70-5f5c2ac427ff',
'resource_metadata': {'nest.name1': 'value1',
'name2': 'value2',
'nest.name2': 'value3'}}]
data = self.post_json('/meters/apples/', s1)
# timestamp not given so it is generated.
s1[0]['timestamp'] = data.json[0]['timestamp']
# Ignore message id that is randomly generated
s1[0]['message_id'] = data.json[0]['message_id']
# source is generated if not provided.
s1[0]['source'] = '%s:openstack' % s1[0]['project_id']
unwound = copy.copy(s1[0])
unwound['resource_metadata'] = {'nest': {'name1': 'value1',
'name2': 'value3'},
'name2': 'value2'}
unwound["monotonic_time"] = None
# only the published sample should be unwound, not the representation
# in the API response
self.assertEqual(s1[0], data.json[0])
self.assertEqual(unwound, self.published[0][0])
def test_invalid_counter_type(self):
s1 = [{'counter_name': 'my_counter_name',
'counter_type': 'INVALID_TYPE',
'counter_unit': 'instance',
'counter_volume': 1,
'source': 'closedstack',
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
'project_id': '35b17138-b364-4e6a-a131-8f3099c5be68',
'user_id': 'efd87807-12d2-4b38-9c70-5f5c2ac427ff',
'resource_metadata': {'name1': 'value1',
'name2': 'value2'}}]
data = self.post_json('/meters/my_counter_name/', s1,
expect_errors=True)
self.assertEqual(400, data.status_int)
self.assertEqual(0, len(self.published))
def test_messsage_id_provided(self):
"""Do not accept sample with message_id."""
s1 = [{'counter_name': 'my_counter_name',
'counter_type': 'gauge',
'counter_unit': 'instance',
'counter_volume': 1,
'message_id': 'evil',
'source': 'closedstack',
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
'project_id': '35b17138-b364-4e6a-a131-8f3099c5be68',
'user_id': 'efd87807-12d2-4b38-9c70-5f5c2ac427ff',
'resource_metadata': {'name1': 'value1',
'name2': 'value2'}}]
data = self.post_json('/meters/my_counter_name/', s1,
expect_errors=True)
self.assertEqual(400, data.status_int)
self.assertEqual(0, len(self.published))
def test_wrong_project_id(self):
"""Do not accept cross posting samples to different projects."""
s1 = [{'counter_name': 'my_counter_name',
'counter_type': 'gauge',
'counter_unit': 'instance',
'counter_volume': 1,
'source': 'closedstack',
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
'project_id': '35b17138-b364-4e6a-a131-8f3099c5be68',
'user_id': 'efd87807-12d2-4b38-9c70-5f5c2ac427ff',
'resource_metadata': {'name1': 'value1',
'name2': 'value2'}}]
data = self.post_json('/meters/my_counter_name/', s1,
expect_errors=True,
headers={
"X-Roles": "Member",
"X-Tenant-Name": "lu-tenant",
"X-Project-Id":
"bc23a9d531064583ace8f67dad60f6bb",
})
self.assertEqual(400, data.status_int)
self.assertEqual(0, len(self.published))
def test_multiple_samples(self):
"""Send multiple samples.
The usecase here is to reduce the chatter and send the counters
at a slower cadence.
"""
samples = []
for x in range(6):
dt = datetime.datetime(2012, 8, 27, x, 0, tzinfo=None)
s = {'counter_name': 'apples',
'counter_type': 'gauge',
'counter_unit': 'instance',
'counter_volume': float(x * 3),
'source': 'evil',
'timestamp': dt.isoformat(),
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
'project_id': '35b17138-b364-4e6a-a131-8f3099c5be68',
'user_id': 'efd87807-12d2-4b38-9c70-5f5c2ac427ff',
'resource_metadata': {'name1': str(x),
'name2': str(x + 4)}}
samples.append(s)
data = self.post_json('/meters/apples/', samples)
for x, s in enumerate(samples):
# source is modified to include the project_id.
s['source'] = '%s:%s' % (s['project_id'],
s['source'])
# Ignore message id that is randomly generated
s['message_id'] = data.json[x]['message_id']
# remove tzinfo to compare generated timestamp
# with the provided one
c = data.json[x]
timestamp = timeutils.parse_isotime(c['timestamp'])
c['timestamp'] = timestamp.replace(tzinfo=None).isoformat()
# do the same on the pipeline
msg = self.published[0][x]
timestamp = timeutils.parse_isotime(msg['timestamp'])
msg['timestamp'] = timestamp.replace(tzinfo=None).isoformat()
self.assertEqual(s, c)
s["monotonic_time"] = None
self.assertEqual(s, self.published[0][x])
def test_missing_mandatory_fields(self):
"""Do not accept posting samples with missing mandatory fields."""
s1 = [{'counter_name': 'my_counter_name',
'counter_type': 'gauge',
'counter_unit': 'instance',
'counter_volume': 1,
'source': 'closedstack',
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
'project_id': '35b17138-b364-4e6a-a131-8f3099c5be68',
'user_id': 'efd87807-12d2-4b38-9c70-5f5c2ac427ff',
'resource_metadata': {'name1': 'value1',
'name2': 'value2'}}]
# one by one try posting without a mandatory field.
for m in ['counter_volume', 'counter_unit', 'counter_type',
'resource_id', 'counter_name']:
s_broke = copy.copy(s1)
del s_broke[0][m]
print('posting without %s' % m)
data = self.post_json('/meters/my_counter_name', s_broke,
expect_errors=True)
self.assertEqual(400, data.status_int)
def test_multiple_project_id_and_admin(self):
"""Allow admin is allowed to set multiple project_id."""
s1 = [{'counter_name': 'my_counter_name',
'counter_type': 'gauge',
'counter_unit': 'instance',
'counter_volume': 1,
'source': 'closedstack',
'project_id': '35b17138-b364-4e6a-a131-8f3099c5be68',
'user_id': 'efd87807-12d2-4b38-9c70-5f5c2ac427ff',
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
},
{'counter_name': 'my_counter_name',
'counter_type': 'gauge',
'counter_unit': 'instance',
'counter_volume': 2,
'source': 'closedstack',
'project_id': '4af38dca-f6fc-11e2-94f5-14dae9283f29',
'user_id': 'efd87807-12d2-4b38-9c70-5f5c2ac427ff',
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
'resource_metadata': {'name1': 'value1',
'name2': 'value2'}}]
data = self.post_json('/meters/my_counter_name/', s1,
headers={"X-Roles": "admin"})
self.assertEqual(201, data.status_int)
for x, s in enumerate(s1):
# source is modified to include the project_id.
s['source'] = '%s:%s' % (s['project_id'],
'closedstack')
# Ignore message id that is randomly generated
s['message_id'] = data.json[x]['message_id']
# timestamp not given so it is generated.
s['timestamp'] = data.json[x]['timestamp']
s.setdefault('resource_metadata', dict())
self.assertEqual(s, data.json[x])
s['monotonic_time'] = None
self.assertEqual(s, self.published[0][x])
def test_multiple_samples_multiple_sources(self):
"""Test posting with special conditions.
Do accept a single post with some multiples sources with some of them
null.
"""
s1 = [{'counter_name': 'my_counter_name',
'counter_type': 'gauge',
'counter_unit': 'instance',
'counter_volume': 1,
'source': 'paperstack',
'project_id': '35b17138-b364-4e6a-a131-8f3099c5be68',
'user_id': 'efd87807-12d2-4b38-9c70-5f5c2ac427ff',
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
},
{'counter_name': 'my_counter_name',
'counter_type': 'gauge',
'counter_unit': 'instance',
'counter_volume': 5,
'source': 'waterstack',
'project_id': '35b17138-b364-4e6a-a131-8f3099c5be68',
'user_id': 'efd87807-12d2-4b38-9c70-5f5c2ac427ff',
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
},
{'counter_name': 'my_counter_name',
'counter_type': 'gauge',
'counter_unit': 'instance',
'counter_volume': 2,
'project_id': '35b17138-b364-4e6a-a131-8f3099c5be68',
'user_id': 'efd87807-12d2-4b38-9c70-5f5c2ac427ff',
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
'resource_metadata': {'name1': 'value1',
'name2': 'value2'}}]
data = self.post_json('/meters/my_counter_name/', s1,
expect_errors=True)
self.assertEqual(201, data.status_int)
for x, s in enumerate(s1):
# source is modified to include the project_id.
s['source'] = '%s:%s' % (
s['project_id'],
s.get('source', self.CONF.sample_source)
)
# Ignore message id that is randomly generated
s['message_id'] = data.json[x]['message_id']
# timestamp not given so it is generated.
s['timestamp'] = data.json[x]['timestamp']
s.setdefault('resource_metadata', dict())
self.assertEqual(s, data.json[x])
s['monotonic_time'] = None
self.assertEqual(s, self.published[0][x])
def test_missing_project_user_id(self):
"""Ensure missing project & user IDs are defaulted appropriately."""
s1 = [{'counter_name': 'my_counter_name',
'counter_type': 'gauge',
'counter_unit': 'instance',
'counter_volume': 1,
'source': 'closedstack',
'resource_id': 'bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
'resource_metadata': {'name1': 'value1',
'name2': 'value2'}}]
project_id = 'bc23a9d531064583ace8f67dad60f6bb'
user_id = 'fd87807-12d2-4b38-9c70-5f5c2ac427ff'
data = self.post_json('/meters/my_counter_name/', s1,
expect_errors=True,
headers={
'X-Roles': 'chief-bottle-washer',
'X-Project-Id': project_id,
'X-User-Id': user_id,
})
self.assertEqual(201, data.status_int)
for x, s in enumerate(s1):
# source is modified to include the project_id.
s['source'] = '%s:%s' % (project_id,
s['source'])
# Ignore message id that is randomly generated
s['message_id'] = data.json[x]['message_id']
# timestamp not given so it is generated.
s['timestamp'] = data.json[x]['timestamp']
s['user_id'] = user_id
s['project_id'] = project_id
self.assertEqual(s, data.json[x])
s['monotonic_time'] = None
self.assertEqual(s, self.published[0][x])

File diff suppressed because it is too large Load Diff

View File

@ -1,65 +0,0 @@
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ceilometer.tests.functional import api
V2_MEDIA_TYPES = [
{
'base': 'application/json',
'type': 'application/vnd.openstack.telemetry-v2+json'
}, {
'base': 'application/xml',
'type': 'application/vnd.openstack.telemetry-v2+xml'
}
]
V2_HTML_DESCRIPTION = {
'href': 'http://docs.openstack.org/',
'rel': 'describedby',
'type': 'text/html',
}
V2_EXPECTED_RESPONSE = {
'id': 'v2',
'links': [
{
'rel': 'self',
'href': 'http://localhost/v2',
},
V2_HTML_DESCRIPTION
],
'media-types': V2_MEDIA_TYPES,
'status': 'stable',
'updated': '2013-02-13T00:00:00Z',
}
V2_VERSION_RESPONSE = {
"version": V2_EXPECTED_RESPONSE
}
VERSIONS_RESPONSE = {
"versions": {
"values": [
V2_EXPECTED_RESPONSE
]
}
}
class TestVersions(api.FunctionalTest):
def test_versions(self):
data = self.get_json('/')
self.assertEqual(VERSIONS_RESPONSE, data)

View File

@ -1,170 +0,0 @@
#
# Copyright 2015 Red Hat. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Fixtures used during Gabbi-based test runs."""
import datetime
import os
import random
from unittest import case
import uuid
from gabbi import fixture
from oslo_config import cfg
from oslo_utils import fileutils
import six
from six.moves.urllib import parse as urlparse
from ceilometer.api import app
from ceilometer.publisher import utils
from ceilometer import sample
from ceilometer import service
from ceilometer import storage
# TODO(chdent): For now only MongoDB is supported, because of easy
# database name handling and intentional focus on the API, not the
# data store.
ENGINES = ['mongodb']
# NOTE(chdent): Hack to restore semblance of global configuration to
# pass to the WSGI app used per test suite. LOAD_APP_KWARGS are the olso
# configuration, and the pecan application configuration of
# which the critical part is a reference to the current indexer.
LOAD_APP_KWARGS = None
def setup_app():
global LOAD_APP_KWARGS
return app.load_app(**LOAD_APP_KWARGS)
class ConfigFixture(fixture.GabbiFixture):
"""Establish the relevant configuration for a test run."""
def start_fixture(self):
"""Set up config."""
global LOAD_APP_KWARGS
self.conf = None
# Determine the database connection.
db_url = os.environ.get('PIFPAF_URL', "sqlite://").replace(
"mysql://", "mysql+pymysql://")
if not db_url:
raise case.SkipTest('No database connection configured')
engine = urlparse.urlparse(db_url).scheme
if engine not in ENGINES:
raise case.SkipTest('Database engine not supported')
self.conf = service.prepare_service([], [])
content = ('{"default": ""}')
if six.PY3:
content = content.encode('utf-8')
self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='policy',
suffix='.json')
self.conf.set_override("policy_file", self.tempfile,
group='oslo_policy')
self.conf.set_override(
'api_paste_config',
os.path.abspath(
'ceilometer/tests/functional/gabbi/gabbi_paste.ini')
)
# A special pipeline is required to use the direct publisher.
self.conf.set_override(
'pipeline_cfg_file',
'ceilometer/tests/functional/gabbi_pipeline.yaml')
database_name = '%s-%s' % (db_url, str(uuid.uuid4()))
self.conf.set_override('connection', database_name, group='database')
self.conf.set_override('metering_connection', '', group='database')
self.conf.set_override('gnocchi_is_enabled', False, group='api')
self.conf.set_override('aodh_is_enabled', False, group='api')
self.conf.set_override('panko_is_enabled', False, group='api')
LOAD_APP_KWARGS = {
'conf': self.conf,
}
def stop_fixture(self):
"""Reset the config and remove data."""
if self.conf:
storage.get_connection_from_config(self.conf).clear()
self.conf.reset()
class SampleDataFixture(fixture.GabbiFixture):
"""Instantiate some sample data for use in testing."""
def start_fixture(self):
"""Create some samples."""
global LOAD_APP_KWARGS
conf = LOAD_APP_KWARGS['conf']
self.conn = storage.get_connection_from_config(conf)
timestamp = datetime.datetime.utcnow()
project_id = str(uuid.uuid4())
self.source = str(uuid.uuid4())
resource_metadata = {'farmed_by': 'nancy'}
for name in ['cow', 'pig', 'sheep']:
resource_metadata.update({'breed': name}),
c = sample.Sample(name='livestock',
type='gauge',
unit='head',
volume=int(10 * random.random()),
user_id='farmerjon',
project_id=project_id,
resource_id=project_id,
timestamp=timestamp,
resource_metadata=resource_metadata,
source=self.source)
data = utils.meter_message_from_counter(
c, conf.publisher.telemetry_secret)
self.conn.record_metering_data(data)
def stop_fixture(self):
"""Destroy the samples."""
# NOTE(chdent): print here for sake of info during testing.
# This will go away eventually.
print('resource',
self.conn.db.resource.remove({'source': self.source}))
print('meter', self.conn.db.meter.remove({'source': self.source}))
class CORSConfigFixture(fixture.GabbiFixture):
"""Inject mock configuration for the CORS middleware."""
def start_fixture(self):
# Here we monkeypatch GroupAttr.__getattr__, necessary because the
# paste.ini method of initializing this middleware creates its own
# ConfigOpts instance, bypassing the regular config fixture.
def _mock_getattr(instance, key):
if key != 'allowed_origin':
return self._original_call_method(instance, key)
return "http://valid.example.com"
self._original_call_method = cfg.ConfigOpts.GroupAttr.__getattr__
cfg.ConfigOpts.GroupAttr.__getattr__ = _mock_getattr
def stop_fixture(self):
"""Remove the monkeypatch."""
cfg.ConfigOpts.GroupAttr.__getattr__ = self._original_call_method

View File

@ -1,24 +0,0 @@
# Ceilometer API WSGI Pipeline
# Define the filters that make up the pipeline for processing WSGI requests
# Note: This pipeline is PasteDeploy's term rather than Ceilometer's pipeline
# used for processing samples
#
# This version is specific for gabbi. It removes support for keystone while
# keeping support for CORS.
# Remove authtoken from the pipeline if you don't want to use keystone authentication
[pipeline:main]
pipeline = cors api-server
[app:api-server]
paste.app_factory = ceilometer.api.app:app_factory
[filter:authtoken]
paste.filter_factory = keystonemiddleware.auth_token:filter_factory
[filter:request_id]
paste.filter_factory = oslo_middleware:RequestId.factory
[filter:cors]
paste.filter_factory = oslo_middleware.cors:filter_factory
oslo_config_project = ceilometer

View File

@ -1,19 +0,0 @@
# A limited pipeline for use with the Gabbi spike.
# direct writes to the metering database without using an
# intermediary dispatcher.
#
# This is one of several things that will need some extensive
# tidying to be more right.
---
sources:
- name: meter_source
interval: 1
meters:
- "*"
sinks:
- meter_sink
sinks:
- name: meter_sink
transformers:
publishers:
- direct://

View File

@ -1,24 +0,0 @@
#
# Some simple tests just to confirm that the system works.
#
fixtures:
- ConfigFixture
tests:
# Root gives us some information on where to go from here.
- name: quick root check
GET: /
response_headers:
content-type: application/json
response_strings:
- '"base": "application/json"'
response_json_paths:
versions.values.[0].status: stable
versions.values.[0].media-types.[0].base: application/json
# NOTE(chdent): Ideally since / has a links ref to /v2, /v2 ought not 404!
- name: v2 visit
desc: this demonstrates a bug in the info in /
GET: $RESPONSE['versions.values.[0].links.[0].href']
status: 404

View File

@ -1,13 +0,0 @@
#
# Explore the capabilities API
#
fixtures:
- ConfigFixture
tests:
- name: get capabilities
desc: retrieve capabilities for the mongo store
GET: /v2/capabilities
response_json_paths:
$.storage.['storage:production_ready']: true

View File

@ -1,102 +0,0 @@
# Post a simple sample, sir, and the retrieve it in various ways.
fixtures:
- ConfigFixture
tests:
# POST one sample and verify its existence.
- name: post sample for meter
desc: post a single sample
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data: |
[
{
"counter_name": "apples",
"project_id": "35b17138-b364-4e6a-a131-8f3099c5be68",
"user_id": "efd87807-12d2-4b38-9c70-5f5c2ac427ff",
"counter_unit": "instance",
"counter_volume": 1,
"resource_id": "bd9431c1-8d69-4ad3-803a-8d4a6b89fd36",
"resource_metadata": {
"name2": "value2",
"name1": "value1"
},
"counter_type": "gauge"
}
]
response_json_paths:
$.[0].counter_name: apples
status: 201
response_headers:
content-type: application/json
# When POSTing a sample perhaps we should get back a location header
# with the URI of the posted sample
- name: post a sample expect location
desc: https://bugs.launchpad.net/ceilometer/+bug/1426426
xfail: true
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data:
- counter_name: apples
project_id: 35b17138-b364-4e6a-a131-8f3099c5be68
user_id: efd87807-12d2-4b38-9c70-5f5c2ac427ff
counter_unit: instance
counter_volume: 1
resource_id: bd9431c1-8d69-4ad3-803a-8d4a6b89fd36
resource_metadata:
name2: value2
name1: value1
counter_type: gauge
response_headers:
location: /$SCHEME://$NETLOC/
# GET all the samples created for the apples meter
- name: get samples for meter
desc: get all the samples at that meter
GET: /v2/meters/apples
response_json_paths:
$.[0].counter_name: apples
$.[0].counter_volume: 1
$.[0].resource_metadata.name2: value2
# POSTing a sample to a meter will implicitly create a resource
- name: get resources
desc: get the resources that exist because of the sample
GET: /v2/resources
response_json_paths:
$.[0].metadata.name2: value2
# NOTE(chdent): We assume that the first item in links is self.
# Need to determine how to express the more correct JSONPath here
# (if possible).
- name: get resource
desc: get just one of those resources via self
GET: $RESPONSE['$[0].links[0].href']
response_json_paths:
$.metadata.name2: value2
# GET the created samples
- name: get samples
desc: get all the created samples
GET: /v2/samples
response_json_paths:
$.[0].metadata.name2: value2
$.[0].meter: apples
- name: get one sample
desc: get the one sample that exists
GET: /v2/samples/$RESPONSE['$[0].id']
response_json_paths:
$.metadata.name2: value2
$.meter: apples

View File

@ -1,18 +0,0 @@
#
# Demonstrate a simple sample fixture.
#
fixtures:
- ConfigFixture
- SampleDataFixture
tests:
- name: get fixture samples
desc: get all the samples at livestock
GET: /v2/meters/livestock
response_json_paths:
$.[0].counter_name: livestock
$.[1].counter_name: livestock
$.[2].counter_name: livestock
$.[2].user_id: farmerjon
$.[0].resource_metadata.breed: cow
$.[1].resource_metadata.farmed_by: nancy

View File

@ -1,384 +0,0 @@
#
# Tests to explore and cover the /v2/meters section of the
# Ceilometer API.
#
fixtures:
- ConfigFixture
tests:
# Generic HTTP health explorations of all meters.
- name: empty meters list
GET: /v2/meters
response_headers:
content-type: /application/json/
response_strings:
- "[]"
- name: meters list bad accept
GET: /v2/meters
request_headers:
accept: text/plain
status: 406
- name: meters list bad method
POST: /v2/meters
status: 405
response_headers:
allow: GET
- name: try to delete meters
DELETE: /v2/meters
status: 405
response_headers:
allow: GET
# Generic HTTP health explorations of single meter.
- name: get non exist meter
GET: /v2/meters/noexist
response_strings:
- "[]"
- name: meter bad accept
GET: /v2/meters/noexist?direct=True
request_headers:
accept: text/plain
status: 406
- name: meter delete noexist
DELETE: /v2/meters/noexist
status: "404 || 405"
- name: post meter no data
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data: ""
status: 400
- name: post meter error is JSON
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data: ""
status: 400
response_headers:
content-type: /application/json/
response_json_paths:
$.error_message.faultstring: "Samples should be included in request body"
- name: post meter bad content-type
POST: /v2/meters/apples?direct=True
request_headers:
content-type: text/plain
data: hello
status: 415
- name: post bad samples to meter
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data:
samples:
- red
- blue
- yellow
status: 400
# POST variations on a malformed sample
- name: post limited counter to meter
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data:
- counter_unit: instance
counter_volume: 1
resource_id: bd9431c1-8d69-4ad3-803a-8d4a6b89fd36
status: 400
response_strings:
- "Invalid input for field/attribute counter_name"
- name: post mismatched counter name to meter
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data:
- counter_name: cars
counter_type: gauge
counter_unit: instance
counter_volume: 1
resource_id: bd9431c1-8d69-4ad3-803a-8d4a6b89fd36
status: 400
response_strings:
- "Invalid input for field/attribute counter_name"
- "should be apples"
- name: post counter no resource to meter
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data:
- counter_name: apples
counter_type: gauge
counter_unit: instance
counter_volume: 1
status: 400
response_strings:
- "Invalid input for field/attribute resource_id"
- "Mandatory field missing."
- name: post counter bad type to meter
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data:
- counter_name: apples
counter_type: elevation
counter_unit: instance
counter_volume: 1
resource_id: bd9431c1-8d69-4ad3-803a-8d4a6b89fd36
status: 400
response_strings:
- "Invalid input for field/attribute counter_type."
- "The counter type must be: gauge, delta, cumulative"
# Manipulate samples
- name: post counter to meter
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data:
- counter_name: apples
counter_type: gauge
counter_unit: instance
counter_volume: 1
resource_id: bd9431c1-8d69-4ad3-803a-8d4a6b89fd36
status: 201
- name: list apple samples
GET: /v2/meters/apples
response_json_paths:
$[0].counter_volume: 1.0
$[0].counter_name: apples
$[0].resource_id: bd9431c1-8d69-4ad3-803a-8d4a6b89fd36
- name: list meters
GET: /v2/meters
response_json_paths:
$[0].name: apples
$[0].resource_id: bd9431c1-8d69-4ad3-803a-8d4a6b89fd36
$[0].type: gauge
$[-1].name: apples
- name: negative limit on meter list
GET: /v2/meters/apples?limit=-5
status: 400
response_strings:
- Limit must be positive
- name: nan limit on meter list
GET: /v2/meters/apples?limit=NaN
status: 400
response_strings:
- unable to convert to int
- name: post counter to meter different resource
POST: /v2/meters/apples?direct=True
status: 201
request_headers:
content-type: application/json
data:
- counter_name: apples
counter_type: gauge
counter_unit: instance
counter_volume: 2
resource_id: aa9431c1-8d69-4ad3-803a-8d4a6b89fdaa
- name: query for resource
GET: /v2/meters/apples?q.field=resource_id&q.value=aa9431c1-8d69-4ad3-803a-8d4a6b89fdaa&q.op=eq
response_json_paths:
$[0].resource_id: aa9431c1-8d69-4ad3-803a-8d4a6b89fdaa
$[-1].resource_id: aa9431c1-8d69-4ad3-803a-8d4a6b89fdaa
# Explore posting samples with less than perfect data.
- name: post counter with bad timestamp
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data:
- counter_name: apples
counter_type: gauge
counter_unit: instance
counter_volume: 3
resource_id: aa9431c1-8d69-4ad3-803a-8d4a6b89fdaa
timestamp: "2013-01-bad 23:23:20"
status: 400
response_strings:
- 'Invalid input for field/attribute samples'
- name: post counter with good timestamp
POST: /v2/meters/apples?direct=True
status: 201
request_headers:
content-type: application/json
data:
- counter_name: apples
counter_type: gauge
counter_unit: instance
counter_volume: 3
resource_id: aa9431c1-8d69-4ad3-803a-8d4a6b89fdaa
timestamp: "2013-01-01 23:23:20"
- name: post counter with wrong metadata
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data:
- counter_name: apples
counter_type: gauge
counter_unit: instance
counter_volume: 3
resource_id: aa9431c1-8d69-4ad3-803a-8d4a6b89fdaa
timestamp: "2013-01-01 23:23:20"
resource_metadata: "a string"
status: 400
response_strings:
- "Invalid input for field/attribute samples"
- name: post counter with empty metadata
POST: /v2/meters/apples?direct=True
status: 201
request_headers:
content-type: application/json
data:
- counter_name: apples
counter_type: gauge
counter_unit: instance
counter_volume: 3
resource_id: aa9431c1-8d69-4ad3-803a-8d4a6b89fdaa
timestamp: "2013-01-01 23:23:20"
resource_metadata: {}
# Statistics
- name: get sample statistics
GET: /v2/meters/apples/statistics
response_json_paths:
$[0].groupby: null
$[0].unit: instance
$[0].sum: 9.0
$[0].min: 1.0
$[0].max: 3.0
$[0].count: 4
- name: get incorrectly grouped sample statistics
GET: /v2/meters/apples/statistics?groupby=house_id
status: 400
response_strings:
- Invalid groupby fields
- name: get grouped sample statistics
GET: /v2/meters/apples/statistics?groupby=resource_id
response_json_paths:
$[1].max: 3.0
$[0].max: 1.0
- name: get sample statistics bad period
GET: /v2/meters/apples/statistics?period=seven
status: 400
response_strings:
- unable to convert to int
- name: get sample statistics negative period
GET: /v2/meters/apples/statistics?period=-7
status: 400
response_strings:
- Period must be positive.
- name: get sample statistics 600 period
GET: /v2/meters/apples/statistics?period=600
response_json_paths:
$[0].period: 600
- name: get sample statistics time limit not time
GET: /v2/meters/apples/statistics?q.field=timestamp&q.op=gt&q.value=Remember%20Remember
status: 400
response_strings:
- invalid timestamp format
- name: get sample statistics time limit gt
GET: /v2/meters/apples/statistics?q.field=timestamp&q.op=gt&q.value=2014-01-01
response_json_paths:
$[0].count: 2
- name: get sample statistics time limit lt
GET: /v2/meters/apples/statistics?q.field=timestamp&q.op=lt&q.value=2014-01-01
response_json_paths:
$[0].count: 2
- name: get sample statistics time limit bounded
GET: /v2/meters/apples/statistics?q.field=timestamp&q.op=gt&q.value=2013-06-01&q.field=timestamp&q.op=lt&q.value=2014-01-01
response_strings:
- "[]"
- name: get sample statistics select aggregate bad format
GET: /v2/meters/apples/statistics?aggregate=max
status: 400
- name: get sample statistics select aggregate
GET: /v2/meters/apples/statistics?aggregate.func=max
response_json_paths:
$[0].aggregate.max: 3.0
- name: get sample statistics select aggregate multiple
GET: /v2/meters/apples/statistics?aggregate.func=max&aggregate.func=count
response_json_paths:
$[0].aggregate.max: 3.0
$[0].aggregate.count: 4
- name: get sample statistics select aggregate bad function
GET: /v2/meters/apples/statistics?aggregate.func=mmm
status: 400
response_strings:
- 'Invalid aggregation function: mmm'
- name: get sample statistics select aggregate good function and bad function
GET: /v2/meters/apples/statistics?aggregate.func=max&aggregate.func=mmm
status: 400
response_strings:
- 'Invalid aggregation function: mmm'
# limit meters results
- name: get meters ulimited
GET: /v2/meters
response_json_paths:
$.`len`: 2
- name: get meters limited
GET: /v2/meters?limit=1
response_json_paths:
$.`len`: 1
- name: get meters double limit
GET: /v2/meters?limit=1&limit=1
status: 400
- name: get meters filter limit
desc: expressing limit this way is now disallowed
GET: /v2/meters?q.field=limit&q.op=eq&q.type=&q.value=1
status: 400
response_strings:
- 'Unknown argument: \"limit\": unrecognized field in query'
- name: get meters filter limit and limit
GET: /v2/meters?q.field=limit&q.op=eq&q.type=&q.value=1&limit=1
status: 400
response_strings:
- 'Unknown argument: \"limit\": unrecognized field in query'

View File

@ -1,44 +0,0 @@
#
# Test the middlewares. Just CORS for now.
#
fixtures:
- ConfigFixture
- CORSConfigFixture
tests:
- name: valid cors options
OPTIONS: /
status: 200
request_headers:
origin: http://valid.example.com
access-control-request-method: GET
response_headers:
access-control-allow-origin: http://valid.example.com
- name: invalid cors options
OPTIONS: /
status: 200
request_headers:
origin: http://invalid.example.com
access-control-request-method: GET
response_forbidden_headers:
- access-control-allow-origin
- name: valid cors get
GET: /
status: 200
request_headers:
origin: http://valid.example.com
access-control-request-method: GET
response_headers:
access-control-allow-origin: http://valid.example.com
- name: invalid cors get
GET: /
status: 200
request_headers:
origin: http://invalid.example.com
response_forbidden_headers:
- access-control-allow-origin

View File

@ -1,59 +0,0 @@
#
# Explore and cover resources API with gabbi tests when there are no
# resources.
#
fixtures:
- ConfigFixture
tests:
# Check for a list of resources, modifying the request in various
# ways.
- name: list resources no extra
desc: Provide no additional header guidelines
GET: /v2/resources
response_headers:
content-type: /application/json/
response_strings:
- "[]"
- name: list resources but get url wrong
GET: /v2/resrces
status: 404
- name: list resources explicit accept
GET: /v2/resources
request_headers:
accept: application/json
response_strings:
- "[]"
- name: list resources bad accept
GET: /v2/resources
request_headers:
accept: text/plain
status: 406
- name: list resources with bad query field
GET: /v2/resources?q.field=id&q.value=cars
status: 400
response_strings:
- unrecognized field in query
- name: list resources with query
GET: /v2/resources?q.field=resource&q.value=cars
response_strings:
- "[]"
- name: list resource bad type meter links
GET: /v2/resources?meter_links=yes%20please
status: 400
response_strings:
- unable to convert to int
- name: list resource meter links int
GET: /v2/resources?meter_links=0
response_strings:
- "[]"

View File

@ -1,86 +0,0 @@
#
# Explore and cover resources API with gabbi tests when there are a
# small number of pre-existing resources
#
fixtures:
- ConfigFixture
- SampleDataFixture
tests:
- name: list all resources
GET: /v2/resources
response_json_paths:
$[0].user_id: farmerjon
$[0].links[1].rel: livestock
- name: get one resource
desc: get a resource via the links in the first resource listed above
GET: $RESPONSE['$[0].links[0].href']
response_json_paths:
$.resource_id: $RESPONSE['$[0].resource_id']
- name: list resources limit user_id
GET: /v2/resources?q.field=user_id&q.value=farmerjon
response_json_paths:
$[0].user_id: farmerjon
$[0].links[1].rel: livestock
- name: list resources limit metadata
GET: /v2/resources?q.field=metadata.breed&q.value=sheep
response_json_paths:
$[0].user_id: farmerjon
$[0].links[1].rel: livestock
- name: list resources limit metadata no match
GET: /v2/resources?q.field=metadata.breed&q.value=llamma
response_strings:
- "[]"
- name: fail to get one resource
GET: /v2/resources/nosirnothere
status: 404
- name: list resource meter links present
GET: /v2/resources?meter_links=1
response_json_paths:
$[0].links[0].rel: self
$[0].links[1].rel: livestock
$[0].links[-1].rel: livestock
- name: list resource meter links not present
GET: /v2/resources?meter_links=0
desc: there is only one links entry when meter_links is 0
response_json_paths:
$[0].links[0].rel: self
$[0].links[-1].rel: self
# limit resource results
- name: get resources ulimited
GET: /v2/resources
response_json_paths:
$.`len`: 1
- name: get resources limited
GET: /v2/resources?limit=1
response_json_paths:
$.`len`: 1
- name: get resources double limit
GET: /v2/resources?limit=1&limit=1
status: 400
- name: get resources filter limit
desc: expressing limit this way is now disallowed
GET: /v2/resources?q.field=limit&q.op=eq&q.type=&q.value=1
status: 400
response_strings:
- 'Unknown argument: \"limit\": unrecognized field in query'
- name: get resources filter limit and limit
GET: /v2/resources?q.field=limit&q.op=eq&q.type=&q.value=1&limit=1
status: 400
response_strings:
- 'Unknown argument: \"limit\": unrecognized field in query'

View File

@ -1,154 +0,0 @@
#
# Explore and test the samples controller, using samples supplied by
# the SampleDataFixture.
#
fixtures:
- ConfigFixture
- SampleDataFixture
tests:
# Confirm all the samples are there and expected requests behave.
# TODO(chdent): There's a danger here that the ordering of multiple
# samples will not be consistent.
- name: lists samples
GET: /v2/samples
response_headers:
content-type: /application/json/
response_json_paths:
$[0].meter: livestock
$[0].metadata.breed: cow
$[1].metadata.breed: pig
$[2].metadata.breed: sheep
- name: get just one
GET: /v2/samples/$RESPONSE['$[0].id']
response_json_paths:
$.meter: livestock
$.metadata.breed: cow
- name: list samples with limit
GET: /v2/samples?limit=1
response_json_paths:
$[0].meter: livestock
$[0].metadata.breed: cow
$[-1].metadata.breed: cow
- name: list zero samples with zero limit
GET: /v2/samples?limit=0
status: 400
- name: list samples with query
GET: /v2/samples?q.field=resource_metadata.breed&q.value=cow&q.op=eq
response_json_paths:
$[0].meter: livestock
$[0].metadata.breed: cow
$[-1].metadata.breed: cow
- name: query by user
GET: /v2/samples?q.field=user&q.value=$RESPONSE['$[0].user_id']&q.op=eq
response_json_paths:
$[0].user_id: $RESPONSE['$[0].user_id']
- name: query by user_id
GET: /v2/samples?q.field=user_id&q.value=$RESPONSE['$[0].user_id']&q.op=eq
response_json_paths:
$[0].user_id: $RESPONSE['$[0].user_id']
- name: query by project
GET: /v2/samples?q.field=project&q.value=$RESPONSE['$[0].project_id']&q.op=eq
response_json_paths:
$[0].project_id: $RESPONSE['$[0].project_id']
- name: query by project_id
GET: /v2/samples?q.field=project_id&q.value=$RESPONSE['$[0].project_id']&q.op=eq
response_json_paths:
$[0].project_id: $RESPONSE['$[0].project_id']
# Explore failure modes for listing samples
- name: list samples with bad field
GET: /v2/samples?q.field=harpoon&q.value=cow&q.op=eq
status: 400
response_strings:
- timestamp
- project
- unrecognized field in query
- name: list samples with bad metaquery field
GET: /v2/samples?q.field=metaquery&q.value=cow&q.op=eq
status: 400
response_strings:
- unrecognized field in query
- name: bad limit value
GET: /v2/samples?limit=happiness
status: 400
response_strings:
- Invalid input for field/attribute limit
- name: negative limit value 400
GET: /v2/samples?limit=-99
status: 400
- name: negative limit value error message
GET: /v2/samples?limit=-99
status: 400
response_headers:
content-type: /application/json/
response_json_paths:
$.error_message.faultstring: Limit must be positive
- name: bad accept
desc: try an unexpected content type
GET: /v2/samples
request_headers:
accept: text/plain
status: 406
- name: complex good accept
desc: client sends complex accept do we adapt
GET: /v2/samples
request_headers:
accept: text/plain, application/json; q=0.8
- name: complex bad accept
desc: client sends complex accept do we adapt
GET: /v2/samples
request_headers:
accept: text/plain, application/binary; q=0.8
status: 406
- name: bad method
POST: /v2/samples
status: 405
response_headers:
allow: GET
# Work with just one sample.
- name: list one of the samples
GET: /v2/samples?limit=1
- name: retrieve one sample
GET: /v2/samples/$RESPONSE['$[0].id']
response_headers:
content-type: /application/json/
response_json_paths:
$.meter: livestock
- name: retrieve sample with useless query
GET: /v2/samples/$RESPONSE['$.id']?limit=5
status: 400
response_strings:
- "Unknown argument:"
- name: attempt missing sample
GET: /v2/samples/davesnothere
status: 404
response_headers:
content-type: /application/json/
response_json_paths:
$.error_message.faultstring: Sample davesnothere Not Found

View File

@ -1,20 +0,0 @@
#
# Confirm root reports the right data including a prefixed URL
#
fixtures:
- ConfigFixture
tests:
# Root gives us some information on where to go from here.
- name: quick root check
GET: /
response_headers:
content-type: application/json
response_strings:
- '"base": "application/json"'
response_json_paths:
versions.values.[0].status: stable
versions.values.[0].media-types.[0].base: application/json
response_strings:
- /telemetry/

View File

@ -1,50 +0,0 @@
# Post a simple sample and confirm the created resource has
# reasonable URLs
fixtures:
- ConfigFixture
tests:
# POST one sample and verify its existence.
- name: post sample for meter
desc: post a single sample
POST: /v2/meters/apples?direct=True
request_headers:
content-type: application/json
data: |
[
{
"counter_name": "apples",
"project_id": "35b17138-b364-4e6a-a131-8f3099c5be68",
"user_id": "efd87807-12d2-4b38-9c70-5f5c2ac427ff",
"counter_unit": "instance",
"counter_volume": 1,
"resource_id": "bd9431c1-8d69-4ad3-803a-8d4a6b89fd36",
"resource_metadata": {
"name2": "value2",
"name1": "value1"
},
"counter_type": "gauge"
}
]
response_json_paths:
$.[0].counter_name: apples
status: 201
response_headers:
content-type: application/json
- name: get resources
desc: get the resources that exist because of the sample
GET: /v2/resources
response_json_paths:
$.[0].metadata.name2: value2
- name: get resource
desc: get just one of those resources via self
GET: $RESPONSE['$[0].links[0].href']
response_json_paths:
$.metadata.name2: value2
response_strings:
- /telemetry/

View File

@ -1,24 +0,0 @@
#
# Explore and cover resources API with gabbi tests when there are a
# small number of pre-existing resources
#
fixtures:
- ConfigFixture
- SampleDataFixture
tests:
- name: list all resources
GET: /v2/resources
response_json_paths:
$[0].user_id: farmerjon
$[0].links[1].rel: livestock
response_strings:
- /telemetry/
- name: get one resource
desc: get a resource via the links in the first resource listed above
GET: $RESPONSE['$[0].links[0].href']
response_json_paths:
$.resource_id: $RESPONSE['$[0].resource_id']

View File

@ -1,35 +0,0 @@
#
# Copyright 2015 Red Hat. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A test module to exercise the Ceilometer API with gabbi
For the sake of exploratory development.
"""
import os
from gabbi import driver
from ceilometer.tests.functional.gabbi import fixtures as fixture_module
TESTS_DIR = 'gabbits'
def load_tests(loader, tests, pattern):
"""Provide a TestSuite to the discovery process."""
test_dir = os.path.join(os.path.dirname(__file__), TESTS_DIR)
return driver.build_tests(test_dir, loader, host=None,
intercept=fixture_module.setup_app,
fixture_module=fixture_module)

View File

@ -1,33 +0,0 @@
#
# Copyright 2015 Red Hat. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A module to exercise the Ceilometer API with gabbi with a URL prefix"""
import os
from gabbi import driver
from ceilometer.tests.functional.gabbi import fixtures as fixture_module
TESTS_DIR = 'gabbits_prefix'
def load_tests(loader, tests, pattern):
"""Provide a TestSuite to the discovery process."""
test_dir = os.path.join(os.path.dirname(__file__), TESTS_DIR)
return driver.build_tests(test_dir, loader, host=None,
prefix='/telemetry',
intercept=fixture_module.setup_app,
fixture_module=fixture_module)

View File

@ -1,145 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from oslo_utils import timeutils
from tempest.common import compute
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import exceptions as lib_exc
import tempest.test
from ceilometer.tests.tempest.service import client
CONF = config.CONF
class ClientManager(client.Manager):
load_clients = [
'servers_client',
'compute_networks_client',
'compute_floating_ips_client',
'flavors_client',
'image_client_v2',
'telemetry_client',
]
class BaseTelemetryTest(tempest.test.BaseTestCase):
"""Base test case class for all Telemetry API tests."""
credentials = ['primary']
client_manager = ClientManager
@classmethod
def skip_checks(cls):
super(BaseTelemetryTest, cls).skip_checks()
if (not CONF.service_available.ceilometer or
not CONF.telemetry.deprecated_api_enabled):
raise cls.skipException("Ceilometer API support is required")
@classmethod
def setup_credentials(cls):
cls.set_network_resources()
super(BaseTelemetryTest, cls).setup_credentials()
@classmethod
def setup_clients(cls):
super(BaseTelemetryTest, cls).setup_clients()
cls.telemetry_client = cls.os_primary.telemetry_client
cls.servers_client = cls.os_primary.servers_client
cls.flavors_client = cls.os_primary.flavors_client
cls.image_client_v2 = cls.os_primary.image_client_v2
@classmethod
def resource_setup(cls):
super(BaseTelemetryTest, cls).resource_setup()
cls.nova_notifications = ['memory', 'vcpus', 'disk.root.size',
'disk.ephemeral.size']
cls.glance_v2_notifications = ['image.download', 'image.serve']
cls.server_ids = []
cls.image_ids = []
@classmethod
def create_server(cls):
tenant_network = cls.get_tenant_network()
body, server = compute.create_test_server(
cls.os_primary,
tenant_network=tenant_network,
name=data_utils.rand_name('ceilometer-instance'),
wait_until='ACTIVE')
cls.server_ids.append(body['id'])
return body
@classmethod
def create_image(cls, client, **kwargs):
body = client.create_image(name=data_utils.rand_name('image'),
container_format='bare',
disk_format='raw',
**kwargs)
# TODO(jswarren) Move ['image'] up to initial body value assignment
# once both v1 and v2 glance clients include the full response
# object.
if 'image' in body:
body = body['image']
cls.image_ids.append(body['id'])
return body
@staticmethod
def cleanup_resources(method, list_of_ids):
for resource_id in list_of_ids:
try:
method(resource_id)
except lib_exc.NotFound:
pass
@classmethod
def resource_cleanup(cls):
cls.cleanup_resources(cls.servers_client.delete_server, cls.server_ids)
cls.cleanup_resources(cls.image_client_v2.delete_image, cls.image_ids)
super(BaseTelemetryTest, cls).resource_cleanup()
def await_samples(self, metric, query):
"""This method is to wait for sample to add it to database.
There are long time delays when using Postgresql (or Mysql)
database as ceilometer backend
"""
timeout = CONF.compute.build_timeout
start = timeutils.utcnow()
while timeutils.delta_seconds(start, timeutils.utcnow()) < timeout:
body = self.telemetry_client.list_samples(metric, query)
if body:
return body
time.sleep(CONF.compute.build_interval)
raise lib_exc.TimeoutException(
'Sample for metric:%s with query:%s has not been added to the '
'database within %d seconds' % (metric, query,
CONF.compute.build_timeout))
class BaseTelemetryAdminTest(BaseTelemetryTest):
"""Base test case class for admin Telemetry API tests."""
credentials = ['primary', 'admin']
@classmethod
def setup_clients(cls):
super(BaseTelemetryAdminTest, cls).setup_clients()
cls.telemetry_admin_client = cls.os_admin.telemetry_client

View File

@ -1,87 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Change-Id: I14e16a1a7d9813b324ee40545c07f0e88fb637b7
import six
import testtools
from ceilometer.tests.tempest.api import base
from tempest.common import utils
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
CONF = config.CONF
class TelemetryNotificationAPITest(base.BaseTelemetryTest):
@classmethod
def skip_checks(cls):
super(TelemetryNotificationAPITest, cls).skip_checks()
if ("gnocchi" in CONF.service_available and
CONF.service_available.gnocchi):
skip_msg = ("%s skipped as gnocchi is enabled" %
cls.__name__)
raise cls.skipException(skip_msg)
@decorators.idempotent_id('d7f8c1c8-d470-4731-8604-315d3956caae')
@utils.services('compute')
def test_check_nova_notification(self):
body = self.create_server()
query = ('resource', 'eq', body['id'])
for metric in self.nova_notifications:
self.await_samples(metric, query)
@decorators.idempotent_id('c240457d-d943-439b-8aea-85e26d64fe8f')
@utils.services("image")
@testtools.skipIf(not CONF.image_feature_enabled.api_v2,
"Glance api v2 is disabled")
def test_check_glance_v2_notifications(self):
body = self.create_image(self.image_client_v2, visibility='private')
file_content = data_utils.random_bytes()
image_file = six.BytesIO(file_content)
self.image_client_v2.store_image_file(body['id'], image_file)
self.image_client_v2.show_image_file(body['id'])
query = 'resource', 'eq', body['id']
for metric in self.glance_v2_notifications:
self.await_samples(metric, query)
class TelemetryNotificationAdminAPITest(base.BaseTelemetryAdminTest):
@classmethod
def skip_checks(cls):
super(TelemetryNotificationAdminAPITest, cls).skip_checks()
if ("gnocchi" in CONF.service_available and
CONF.service_available.gnocchi):
skip_msg = ("%s skipped as gnocchi is enabled" %
cls.__name__)
raise cls.skipException(skip_msg)
@decorators.idempotent_id('29604198-8b45-4fc0-8af8-1cae4f94ebea')
@utils.services('compute')
def test_check_nova_notification_event_and_meter(self):
body = self.create_server()
query = ('resource', 'eq', body['id'])
for metric in self.nova_notifications:
self.await_samples(metric, query)

View File

@ -25,20 +25,6 @@ telemetry_group = cfg.OptGroup(name='telemetry',
title='Telemetry Service Options')
TelemetryGroup = [
cfg.StrOpt('catalog_type',
default='metering',
help="Catalog type of the Telemetry service."),
cfg.StrOpt('endpoint_type',
default='publicURL',
choices=['public', 'admin', 'internal',
'publicURL', 'adminURL', 'internalURL'],
help="The endpoint type to use for the telemetry service."),
cfg.BoolOpt('event_enabled',
default=True,
help="Runs Ceilometer event-related tests"),
cfg.BoolOpt('deprecated_api_enabled',
default=True,
help="Runs Ceilometer deprecated API tests"),
cfg.IntOpt('notification_wait',
default=120,
help="The seconds to wait for notifications which "

View File

@ -1,146 +0,0 @@
# Copyright 2014 Red Hat
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tempest.common import utils
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib.common.utils import test_utils
from tempest.lib import decorators
from tempest import test
from ceilometer.tests.tempest.service import client
CONF = config.CONF
LOG = logging.getLogger(__name__)
class ClientManager(client.Manager):
load_clients = [
'telemetry_client',
'container_client',
'object_client',
]
class TestObjectStorageTelemetry(test.BaseTestCase):
"""Test that swift uses the ceilometer middleware.
* create container.
* upload a file to the created container.
* retrieve the file from the created container.
* wait for notifications from ceilometer.
"""
credentials = ['primary']
client_manager = ClientManager
@classmethod
def skip_checks(cls):
super(TestObjectStorageTelemetry, cls).skip_checks()
if ("gnocchi" in CONF.service_available and
CONF.service_available.gnocchi):
skip_msg = ("%s skipped as gnocchi is enabled" %
cls.__name__)
raise cls.skipException(skip_msg)
if not CONF.service_available.swift:
skip_msg = ("%s skipped as swift is not available" %
cls.__name__)
raise cls.skipException(skip_msg)
if not CONF.service_available.ceilometer:
skip_msg = ("%s skipped as ceilometer is not available" %
cls.__name__)
raise cls.skipException(skip_msg)
@classmethod
def setup_credentials(cls):
cls.set_network_resources()
super(TestObjectStorageTelemetry, cls).setup_credentials()
@classmethod
def setup_clients(cls):
super(TestObjectStorageTelemetry, cls).setup_clients()
cls.telemetry_client = cls.os_primary.telemetry_client
cls.container_client = cls.os_primary.container_client
cls.object_client = cls.os_primary.object_client
def _confirm_notifications(self, container_name, obj_name):
# NOTE: Loop seeking for appropriate notifications about the containers
# and objects sent to swift.
def _check_samples():
# NOTE: Return True only if we have notifications about some
# containers and some objects and the notifications are about
# the expected containers and objects.
# Otherwise returning False will case _check_samples to be
# called again.
results = self.telemetry_client.list_samples(
'storage.objects.incoming.bytes')
LOG.debug('got samples %s', results)
# Extract container info from samples.
containers, objects = [], []
for sample in results:
meta = sample['resource_metadata']
if meta.get('container') and meta['container'] != 'None':
containers.append(meta['container'])
elif (meta.get('target.metadata:container') and
meta['target.metadata:container'] != 'None'):
containers.append(meta['target.metadata:container'])
if meta.get('object') and meta['object'] != 'None':
objects.append(meta['object'])
elif (meta.get('target.metadata:object') and
meta['target.metadata:object'] != 'None'):
objects.append(meta['target.metadata:object'])
return (container_name in containers and obj_name in objects)
self.assertTrue(
test_utils.call_until_true(_check_samples,
CONF.telemetry.notification_wait,
CONF.telemetry.notification_sleep),
'Correct notifications were not received after '
'%s seconds.' % CONF.telemetry.notification_wait)
def create_container(self):
name = data_utils.rand_name('swift-scenario-container')
self.container_client.create_container(name)
# look for the container to assure it is created
self.container_client.list_container_objects(name)
LOG.debug('Container %s created' % (name))
self.addCleanup(self.container_client.delete_container,
name)
return name
def upload_object_to_container(self, container_name):
obj_name = data_utils.rand_name('swift-scenario-object')
obj_data = data_utils.arbitrary_string()
self.object_client.create_object(container_name, obj_name, obj_data)
self.addCleanup(self.object_client.delete_object,
container_name,
obj_name)
return obj_name
@decorators.idempotent_id('6d6b88e5-3e38-41bc-b34a-79f713a6cb85')
@utils.services('object_storage')
def test_swift_middleware_notifies(self):
container_name = self.create_container()
obj_name = self.upload_object_to_container(container_name)
self._confirm_notifications(container_name, obj_name)

View File

@ -1,110 +0,0 @@
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest import clients
from tempest import config
from tempest.lib.common import rest_client
CONF = config.CONF
class TelemetryClient(rest_client.RestClient):
version = '2'
uri_prefix = "v2"
def deserialize(self, body):
return json.loads(body.replace("\n", ""))
def serialize(self, body):
return json.dumps(body)
def create_sample(self, meter_name, sample_list):
uri = "%s/meters/%s" % (self.uri_prefix, meter_name)
body = self.serialize(sample_list)
resp, body = self.post(uri, body)
self.expected_success(200, resp.status)
body = self.deserialize(body)
return rest_client.ResponseBody(resp, body)
def _helper_list(self, uri, query=None, period=None):
uri_dict = {}
if query:
uri_dict = {'q.field': query[0],
'q.op': query[1],
'q.value': query[2]}
if period:
uri_dict['period'] = period
if uri_dict:
uri += "?%s" % urllib.urlencode(uri_dict)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
body = self.deserialize(body)
return rest_client.ResponseBodyList(resp, body)
def list_resources(self, query=None):
uri = '%s/resources' % self.uri_prefix
return self._helper_list(uri, query)
def list_meters(self, query=None):
uri = '%s/meters' % self.uri_prefix
return self._helper_list(uri, query)
def list_statistics(self, meter, period=None, query=None):
uri = "%s/meters/%s/statistics" % (self.uri_prefix, meter)
return self._helper_list(uri, query, period)
def list_samples(self, meter_id, query=None):
uri = '%s/meters/%s' % (self.uri_prefix, meter_id)
return self._helper_list(uri, query)
def show_resource(self, resource_id):
uri = '%s/resources/%s' % (self.uri_prefix, resource_id)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
body = self.deserialize(body)
return rest_client.ResponseBody(resp, body)
class Manager(clients.Manager):
default_params = config.service_client_config()
telemetry_params = {
'service': CONF.telemetry.catalog_type,
'region': CONF.identity.region,
'endpoint_type': CONF.telemetry.endpoint_type,
}
telemetry_params.update(default_params)
def __init__(self, credentials):
# TODO(andreaf) Overriding Manager is a workaround. The "proper" way
# would it to expose the ceilometer service client via the plugin
# interface, use tempest.lib.clients and tempest master.
# Then ceilometer service client would be loaded and configured
# automatically into ServiceClients.
# In any case we're about to declare clients.Manager a stable
# interface for plugins and we won't change it, so this code won't
# break.
super(Manager, self).__init__(credentials=credentials)
self.set_telemetry_client()
def set_telemetry_client(self):
self.telemetry_client = TelemetryClient(self.auth_provider,
**self.telemetry_params)

View File

@ -1,35 +0,0 @@
# Copyright 2014 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from ceilometer.api import app
from ceilometer import service
from ceilometer.tests import base
class TestApp(base.BaseTestCase):
def setUp(self):
super(TestApp, self).setUp()
self.CONF = service.prepare_service([], [])
def test_api_paste_file_not_exist(self):
self.CONF.set_override('api_paste_config', 'non-existent-file')
with mock.patch.object(self.CONF, 'find_file') as ff:
ff.return_value = None
self.assertRaises(cfg.ConfigFilesNotFoundError, app.load_app,
self.CONF)

Some files were not shown because too many files have changed in this diff Show More