Initial commit for MagnetoDB client

Implements: bp magnetodb-python-client
This commit is contained in:
Andrei V. Ostapenko 2014-04-24 18:59:37 +03:00
commit c014b7938e
42 changed files with 4166 additions and 0 deletions

7
.coveragerc Normal file
View File

@ -0,0 +1,7 @@
[run]
branch = True
source = magnetodbclient
omit = magnetodbclient/openstack/*,magnetodbclient/tests/*
[report]
ignore-errors = True

21
.gitignore vendored Normal file
View File

@ -0,0 +1,21 @@
*.pyc
*.DS_Store
*.egg
*.sw?
AUTHORS
ChangeLog
build/*
build-stamp
cover/*
doc/build/
doc/source/api/
python_magnetodbclient.egg-info/*
magnetodb/vcsversion.py
magnetodbclient/versioninfo
run_tests.err.log
run_tests.log
.autogenerated
.coverage
.testrepository/
.tox/
.venv/

4
.gitreview Normal file
View File

@ -0,0 +1,4 @@
[gerrit]
host=review.openstack.org
port=29418
project=openstack/python-magnetodbclient.git

42
.pylintrc Normal file
View File

@ -0,0 +1,42 @@
# The format of this file isn't really documented; just use --generate-rcfile
[MASTER]
# Add <file or directory> to the black list. It should be a base name, not a
# path. You may set this option multiple times.
ignore=test
[Messages Control]
# NOTE(justinsb): We might want to have a 2nd strict pylintrc in future
# C0111: Don't require docstrings on every method
# W0511: TODOs in code comments are fine.
# W0142: *args and **kwargs are fine.
# W0622: Redefining id is fine.
disable=C0111,W0511,W0142,W0622
[Basic]
# Variable names can be 1 to 31 characters long, with lowercase and underscores
variable-rgx=[a-z_][a-z0-9_]{0,30}$
# Argument names can be 2 to 31 characters long, with lowercase and underscores
argument-rgx=[a-z_][a-z0-9_]{1,30}$
# Method names should be at least 3 characters long
# and be lowecased with underscores
method-rgx=([a-z_][a-z0-9_]{2,50}|setUp|tearDown)$
# Module names matching quantum-* are ok (files in bin/)
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+)|(quantum-[a-z0-9_-]+))$
# Don't require docstrings on tests.
no-docstring-rgx=((__.*__)|([tT]est.*)|setUp|tearDown)$
[Design]
max-public-methods=100
min-public-methods=0
max-args=6
[Variables]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
# _ is used by our localization
additional-builtins=_

4
.testr.conf Normal file
View File

@ -0,0 +1,4 @@
[DEFAULT]
test_command=OS_STDOUT_CAPTURE=1 OS_STDERR_CAPTURE=1 ${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION
test_id_option=--load-list $IDFILE
test_list_option=--list

26
HACKING.rst Normal file
View File

@ -0,0 +1,26 @@
MagnetoDB Style Commandments
================================
- Step 1: Read the OpenStack Style Commandments
http://docs.openstack.org/developer/hacking/
- Step 2: Read on
Running Tests
-------------
The testing system is based on a combination of tox and testr. The canonical
approach to running tests is to simply run the command `tox`. This will
create virtual environments, populate them with depenedencies and run all of
the tests that OpenStack CI systems run. Behind the scenes, tox is running
`testr run --parallel`, but is set up such that you can supply any additional
testr arguments that are needed to tox. For example, you can run:
`tox -- --analyze-isolation` to cause tox to tell testr to add
--analyze-isolation to its argument list.
It is also possible to run the tests inside of a virtual environment
you have created, or it is possible that you have all of the dependencies
installed locally already. In this case, you can interact with the testr
command directly. Running `testr run` will run the entire test suite. `testr
run --parallel` will run it in parallel (this is the default incantation tox
uses.) More information about testr can be found at:
http://wiki.openstack.org/testr

176
LICENSE Normal file
View File

@ -0,0 +1,176 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.

6
MANIFEST.in Normal file
View File

@ -0,0 +1,6 @@
include tox.ini
include LICENSE README.rst HACKING.rst
include AUTHORS
include ChangeLog
include tools/*
recursive-include tests *

1
README.rst Normal file
View File

@ -0,0 +1 @@
This is the client API library for MagnetoDB.

55
doc/source/conf.py Normal file
View File

@ -0,0 +1,55 @@
# -*- coding: utf-8 -*-
#
project = 'python-magnetodbclient'
# -- General configuration ---------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = u'OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output ---------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'nature'
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author,
# documentclass [howto/manual]).
latex_documents = [
('index',
'%s.tex' % project,
u'%s Documentation' % project,
u'OpenStack Foundation', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}

31
doc/source/index.rst Normal file
View File

@ -0,0 +1,31 @@
Python bindings to the OpenStack KeyValue Storage API
============================================
In order to use the python magnetodb client directly, you must first obtain an auth token and identify which endpoint you wish to speak to. Once you have done so, you can use the API like so::
>>> import logging
>>> from magnetodbclient.magnetodb import client
>>> logging.basicConfig(level=logging.DEBUG)
>>> magnetodb = client.Client('2.0', endpoint_url=OS_URL, token=OS_TOKEN)
>>> magnetodb.format = 'json'
Command-line Tool
=================
In order to use the CLI, you must provide your OpenStack username, password, tenant, and auth endpoint. Use the corresponding configuration options (``--os-username``, ``--os-password``, ``--os-tenant-name``, and ``--os-auth-url``) or set them in environment variables::
export OS_USERNAME=user
export OS_PASSWORD=pass
export OS_TENANT_NAME=tenant
export OS_AUTH_URL=http://auth.example.com:5000/v2.0
The command line tool will attempt to reauthenticate using your provided credentials for every request. You can override this behavior by manually supplying an auth token using ``--os-url`` and ``--os-auth-token``. You can alternatively set these environment variables::
export OS_URL=http://magnetodb.example.org:9696/
export OS_TOKEN=3bcc3d3a03f44e3d8377f9247b0ad155
If magnetodb server does not require authentication, besides these two arguments or environment variables (We can use any value as token.), we need manually supply ``--os-auth-strategy`` or set the environment variable::
export OS_AUTH_STRATEGY=noauth
Once you've configured your authentication parameters, you can run ``magnetodb -h`` to see a complete listing of available commands.

View File

312
magnetodbclient/client.py Normal file
View File

@ -0,0 +1,312 @@
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
try:
import json
except ImportError:
import simplejson as json
import logging
import os
import httplib2
from magnetodbclient.common import exceptions
from magnetodbclient.common import utils
from magnetodbclient.openstack.common.gettextutils import _
_logger = logging.getLogger(__name__)
# httplib2 retries requests on socket.timeout which
# is not idempotent and can lead to orhan objects.
# See: https://code.google.com/p/httplib2/issues/detail?id=124
httplib2.RETRIES = 1
if os.environ.get('MAGNETODBCLIENT_DEBUG'):
ch = logging.StreamHandler()
_logger.setLevel(logging.DEBUG)
_logger.addHandler(ch)
class ServiceCatalog(object):
"""Helper methods for dealing with a Keystone Service Catalog."""
def __init__(self, resource_dict):
self.catalog = resource_dict
def get_token(self):
"""Fetch token details from service catalog."""
token = {'id': self.catalog['access']['token']['id'],
'expires': self.catalog['access']['token']['expires'], }
try:
token['user_id'] = self.catalog['access']['user']['id']
token['tenant_id'] = (
self.catalog['access']['token']['tenant']['id'])
except Exception:
# just leave the tenant and user out if it doesn't exist
pass
return token
def url_for(self, attr=None, filter_value=None,
service_type='kv-storage', endpoint_type='publicURL'):
"""Fetch the URL from the MagnetoDB service for
a particular endpoint type. If none given, return
publicURL.
"""
catalog = self.catalog['access'].get('serviceCatalog', [])
matching_endpoints = []
for service in catalog:
if service['type'] != service_type:
continue
endpoints = service['endpoints']
for endpoint in endpoints:
if not filter_value or endpoint.get(attr) == filter_value:
matching_endpoints.append(endpoint)
if not matching_endpoints:
raise exceptions.EndpointNotFound()
elif len(matching_endpoints) > 1:
raise exceptions.AmbiguousEndpoints(
matching_endpoints=matching_endpoints)
else:
if endpoint_type not in matching_endpoints[0]:
raise exceptions.EndpointTypeNotFound(type_=endpoint_type)
return matching_endpoints[0][endpoint_type]
class HTTPClient(httplib2.Http):
"""Handles the REST calls and responses, include authn."""
USER_AGENT = 'python-magnetodbclient'
def __init__(self, username=None, tenant_name=None, tenant_id=None,
password=None, auth_url=None,
token=None, region_name=None, timeout=None,
endpoint_url=None, insecure=False,
endpoint_type='publicURL',
auth_strategy='keystone', ca_cert=None, log_credentials=False,
service_type='kv-storage',
**kwargs):
super(HTTPClient, self).__init__(timeout=timeout, ca_certs=ca_cert)
self.username = username
self.tenant_name = tenant_name
self.tenant_id = tenant_id
self.password = password
self.auth_url = auth_url.rstrip('/') if auth_url else None
self.service_type = service_type
self.endpoint_type = endpoint_type
self.region_name = region_name
self.auth_token = token
self.auth_tenant_id = None
self.auth_user_id = None
self.content_type = 'application/json'
self.endpoint_url = endpoint_url
self.auth_strategy = auth_strategy
self.log_credentials = log_credentials
# httplib2 overrides
self.disable_ssl_certificate_validation = insecure
def _cs_request(self, *args, **kwargs):
kargs = {}
kargs.setdefault('headers', kwargs.get('headers', {}))
kargs['headers']['User-Agent'] = self.USER_AGENT
if 'content_type' in kwargs:
kargs['headers']['Content-Type'] = kwargs['content_type']
kargs['headers']['Accept'] = kwargs['content_type']
else:
kargs['headers']['Content-Type'] = self.content_type
kargs['headers']['Accept'] = self.content_type
if 'body' in kwargs:
kargs['body'] = kwargs['body']
args = utils.safe_encode_list(args)
kargs = utils.safe_encode_dict(kargs)
if self.log_credentials:
log_kargs = kargs
else:
log_kargs = self._strip_credentials(kargs)
utils.http_log_req(_logger, args, log_kargs)
try:
resp, body = self.request(*args, **kargs)
except httplib2.SSLHandshakeError as e:
raise exceptions.SslCertificateValidationError(reason=e)
except Exception as e:
# Wrap the low-level connection error (socket timeout, redirect
# limit, decompression error, etc) into our custom high-level
# connection exception (it is excepted in the upper layers of code)
_logger.debug("throwing ConnectionFailed : %s", e)
raise exceptions.ConnectionFailed(reason=e)
finally:
# Temporary Fix for gate failures. RPC calls and HTTP requests
# seem to be stepping on each other resulting in bogus fd's being
# picked up for making http requests
self.connections.clear()
utils.http_log_resp(_logger, resp, body)
status_code = self.get_status_code(resp)
if status_code == 401:
raise exceptions.Unauthorized(message=body)
return resp, body
def _strip_credentials(self, kwargs):
if kwargs.get('body') and self.password:
log_kwargs = kwargs.copy()
log_kwargs['body'] = kwargs['body'].replace(self.password,
'REDACTED')
return log_kwargs
else:
return kwargs
def authenticate_and_fetch_endpoint_url(self):
if not self.auth_token:
self.authenticate()
elif not self.endpoint_url:
self.endpoint_url = self._get_endpoint_url()
def do_request(self, url, method, **kwargs):
self.authenticate_and_fetch_endpoint_url()
# Perform the request once. If we get a 401 back then it
# might be because the auth token expired, so try to
# re-authenticate and try again. If it still fails, bail.
try:
kwargs.setdefault('headers', {})
if self.auth_token is None:
self.auth_token = ""
kwargs['headers']['X-Auth-Token'] = self.auth_token
resp, body = self._cs_request(self.endpoint_url + url, method,
**kwargs)
return resp, body
except exceptions.Unauthorized:
self.authenticate()
kwargs.setdefault('headers', {})
kwargs['headers']['X-Auth-Token'] = self.auth_token
resp, body = self._cs_request(
self.endpoint_url + url, method, **kwargs)
return resp, body
def _extract_service_catalog(self, body):
"""Set the client's service catalog from the response data."""
self.service_catalog = ServiceCatalog(body)
try:
sc = self.service_catalog.get_token()
self.auth_token = sc['id']
self.auth_tenant_id = sc.get('tenant_id')
self.auth_user_id = sc.get('user_id')
except KeyError:
raise exceptions.Unauthorized()
if not self.endpoint_url:
self.endpoint_url = self.service_catalog.url_for(
attr='region', filter_value=self.region_name,
service_type=self.service_type,
endpoint_type=self.endpoint_type)
def _authenticate_keystone(self):
if self.tenant_id:
body = {'auth': {'passwordCredentials':
{'username': self.username,
'password': self.password, },
'tenantId': self.tenant_id, }, }
else:
body = {'auth': {'passwordCredentials':
{'username': self.username,
'password': self.password, },
'tenantName': self.tenant_name, }, }
if self.auth_url is None:
raise exceptions.NoAuthURLProvided()
token_url = self.auth_url + "/tokens"
# Make sure we follow redirects when trying to reach Keystone
tmp_follow_all_redirects = self.follow_all_redirects
self.follow_all_redirects = True
try:
resp, resp_body = self._cs_request(token_url, "POST",
body=json.dumps(body),
content_type="application/json")
finally:
self.follow_all_redirects = tmp_follow_all_redirects
status_code = self.get_status_code(resp)
if status_code != 200:
raise exceptions.Unauthorized(message=resp_body)
if resp_body:
try:
resp_body = json.loads(resp_body)
except ValueError:
pass
else:
resp_body = None
self._extract_service_catalog(resp_body)
def _authenticate_noauth(self):
if not self.endpoint_url:
message = _('For "noauth" authentication strategy, the endpoint '
'must be specified either in the constructor or '
'using --os-url')
raise exceptions.Unauthorized(message=message)
def authenticate(self):
if self.auth_strategy == 'keystone':
self._authenticate_keystone()
elif self.auth_strategy == 'noauth':
self._authenticate_noauth()
else:
err_msg = _('Unknown auth strategy: %s') % self.auth_strategy
raise exceptions.Unauthorized(message=err_msg)
def _get_endpoint_url(self):
if self.auth_url is None:
raise exceptions.NoAuthURLProvided()
url = self.auth_url + '/tokens/%s/endpoints' % self.auth_token
try:
resp, body = self._cs_request(url, "GET")
except exceptions.Unauthorized:
self.authenticate()
return self.endpoint_url
body = json.loads(body)
for endpoint in body.get('endpoints', []):
if (endpoint['type'] == 'kv-storage' and
endpoint.get('region') == self.region_name):
if self.endpoint_type not in endpoint:
raise exceptions.EndpointTypeNotFound(
type_=self.endpoint_type)
return endpoint[self.endpoint_type]
raise exceptions.EndpointNotFound()
def get_auth_info(self):
return {'auth_token': self.auth_token,
'auth_tenant_id': self.auth_tenant_id,
'auth_user_id': self.auth_user_id,
'endpoint_url': self.endpoint_url}
def get_status_code(self, response):
"""Returns the integer status code from the response.
Either a Webob.Response (used in testing) or httplib.Response
is returned.
"""
if hasattr(response, 'status_int'):
return response.status_int
else:
return response.status

View File

@ -0,0 +1,26 @@
# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import gettext
t = gettext.translation('magnetodbclient', fallback=True)
try:
ugettext = t.ugettext # Python 2
except AttributeError:
ugettext = t.gettext # Python 3
def _(msg):
return ugettext(msg)

View File

@ -0,0 +1,100 @@
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Manage access to the clients, including authenticating when needed.
"""
import logging
from magnetodbclient import client
from magnetodbclient.magnetodb import client as magnetodb_client
LOG = logging.getLogger(__name__)
class ClientCache(object):
"""Descriptor class for caching created client handles.
"""
def __init__(self, factory):
self.factory = factory
self._handle = None
def __get__(self, instance, owner):
# Tell the ClientManager to login to keystone
if self._handle is None:
self._handle = self.factory(instance)
return self._handle
class ClientManager(object):
"""Manages access to API clients, including authentication.
"""
magnetodb = ClientCache(magnetodb_client.make_client)
# Provide support for old quantum commands (for example
# in stable versions)
quantum = magnetodb
def __init__(self, token=None, url=None,
auth_url=None,
endpoint_type=None,
tenant_name=None, tenant_id=None,
username=None, password=None,
region_name=None,
api_version=None,
auth_strategy=None,
insecure=False,
ca_cert=None,
log_credentials=False,
service_type=None,
):
self._token = token
self._url = url
self._auth_url = auth_url
self._service_type = service_type
self._endpoint_type = endpoint_type
self._tenant_name = tenant_name
self._tenant_id = tenant_id
self._username = username
self._password = password
self._region_name = region_name
self._api_version = api_version
self._service_catalog = None
self._auth_strategy = auth_strategy
self._insecure = insecure
self._ca_cert = ca_cert
self._log_credentials = log_credentials
return
def initialize(self):
if not self._url:
httpclient = client.HTTPClient(
username=self._username,
tenant_name=self._tenant_name,
tenant_id=self._tenant_id,
password=self._password,
region_name=self._region_name,
auth_url=self._auth_url,
service_type=self._service_type,
endpoint_type=self._endpoint_type,
insecure=self._insecure,
ca_cert=self._ca_cert,
log_credentials=self._log_credentials)
httpclient.authenticate()
# Populate other password flow attributes
self._token = httpclient.auth_token
self._url = httpclient.endpoint_url

View File

@ -0,0 +1,40 @@
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
OpenStack base command
"""
from cliff import command
class OpenStackCommand(command.Command):
"""Base class for OpenStack commands
"""
api = None
def run(self, parsed_args):
if not self.api:
return
else:
return super(OpenStackCommand, self).run(parsed_args)
def get_data(self, parsed_args):
pass
def take_action(self, parsed_args):
return self.get_data(parsed_args)

View File

@ -0,0 +1,219 @@
# Copyright 2011 VMware, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutronclient.common import _
"""
Neutron base exception handling.
Exceptions are classified into three categories:
* Exceptions corresponding to exceptions from neutron server:
This type of exceptions should inherit one of exceptions
in HTTP_EXCEPTION_MAP.
* Exceptions from client library:
This type of exceptions should inherit NeutronClientException.
* Exceptions from CLI code:
This type of exceptions should inherit NeutronCLIError.
"""
class NeutronException(Exception):
"""Base Neutron Exception
To correctly use this class, inherit from it and define
a 'message' property. That message will get printf'd
with the keyword arguments provided to the constructor.
"""
message = _("An unknown exception occurred.")
def __init__(self, message=None, **kwargs):
if message:
self.message = message
try:
self._error_string = self.message % kwargs
except Exception:
# at least get the core message out if something happened
self._error_string = self.message
def __str__(self):
return self._error_string
class NeutronClientException(NeutronException):
"""Base exception which exceptions from Neutron are mapped into.
NOTE: on the client side, we use different exception types in order
to allow client library users to handle server exceptions in try...except
blocks. The actual error message is the one generated on the server side.
"""
def __init__(self, message=None, **kwargs):
if 'status_code' in kwargs:
self.status_code = kwargs['status_code']
super(NeutronClientException, self).__init__(message, **kwargs)
# Base exceptions from Neutron
class BadRequest(NeutronClientException):
status_code = 400
class Unauthorized(NeutronClientException):
status_code = 401
message = _("Unauthorized: bad credentials.")
class Forbidden(NeutronClientException):
status_code = 403
message = _("Forbidden: your credentials don't give you access to this "
"resource.")
class NotFound(NeutronClientException):
status_code = 404
class Conflict(NeutronClientException):
status_code = 409
class InternalServerError(NeutronClientException):
status_code = 500
class ServiceUnavailable(NeutronClientException):
status_code = 503
HTTP_EXCEPTION_MAP = {
400: BadRequest,
401: Unauthorized,
403: Forbidden,
404: NotFound,
409: Conflict,
500: InternalServerError,
503: ServiceUnavailable,
}
# Exceptions mapped to Neutron server exceptions
# These are defined if a user of client library needs specific exception.
# Exception name should be <Neutron Exception Name> + 'Client'
# e.g., NetworkNotFound -> NetworkNotFoundClient
class NetworkNotFoundClient(NotFound):
pass
class PortNotFoundClient(NotFound):
pass
class StateInvalidClient(BadRequest):
pass
class NetworkInUseClient(Conflict):
pass
class PortInUseClient(Conflict):
pass
class IpAddressInUseClient(Conflict):
pass
# TODO(amotoki): It is unused in Neutron, but it is referred to
# in Horizon code. After Horizon code is updated, remove it.
class AlreadyAttachedClient(Conflict):
pass
class IpAddressGenerationFailureClient(Conflict):
pass
class ExternalIpAddressExhaustedClient(BadRequest):
pass
# Exceptions from client library
class NoAuthURLProvided(Unauthorized):
message = _("auth_url was not provided to the Neutron client")
class EndpointNotFound(NeutronClientException):
message = _("Could not find Service or Region in Service Catalog.")
class EndpointTypeNotFound(NeutronClientException):
message = _("Could not find endpoint type %(type_)s in Service Catalog.")
class AmbiguousEndpoints(NeutronClientException):
message = _("Found more than one matching endpoint in Service Catalog: "
"%(matching_endpoints)")
class RequestURITooLong(NeutronClientException):
"""Raised when a request fails with HTTP error 414."""
def __init__(self, **kwargs):
self.excess = kwargs.get('excess', 0)
super(RequestURITooLong, self).__init__(**kwargs)
class ConnectionFailed(NeutronClientException):
message = _("Connection to neutron failed: %(reason)s")
class SslCertificateValidationError(NeutronClientException):
message = _("SSL certificate validation has failed: %(reason)s")
class MalformedResponseBody(NeutronClientException):
message = _("Malformed response body: %(reason)s")
class InvalidContentType(NeutronClientException):
message = _("Invalid content type %(content_type)s.")
# Command line exceptions
class NeutronCLIError(NeutronException):
"""Exception raised when command line parsing fails."""
pass
class CommandError(NeutronCLIError):
pass
class UnsupportedVersion(NeutronCLIError):
"""Indicates that the user is trying to use an unsupported
version of the API
"""
pass
class NeutronClientNoUniqueMatch(NeutronCLIError):
message = _("Multiple %(resource)s matches found for name '%(name)s',"
" use an ID to be more specific.")

View File

@ -0,0 +1,125 @@
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import logging
from magnetodbclient.common import exceptions as exception
from magnetodbclient.openstack.common.gettextutils import _
from magnetodbclient.openstack.common import jsonutils
LOG = logging.getLogger(__name__)
class ActionDispatcher(object):
"""Maps method name to local methods through action name."""
def dispatch(self, *args, **kwargs):
"""Find and call local method."""
action = kwargs.pop('action', 'default')
action_method = getattr(self, str(action), self.default)
return action_method(*args, **kwargs)
def default(self, data):
raise NotImplementedError()
class DictSerializer(ActionDispatcher):
"""Default request body serialization."""
def serialize(self, data, action='default'):
return self.dispatch(data, action=action)
def default(self, data):
return ""
class JSONDictSerializer(DictSerializer):
"""Default JSON request body serialization."""
def default(self, data):
def sanitizer(obj):
return unicode(obj)
return jsonutils.dumps(data, default=sanitizer)
class TextDeserializer(ActionDispatcher):
"""Default request body deserialization."""
def deserialize(self, datastring, action='default'):
return self.dispatch(datastring, action=action)
def default(self, datastring):
return {}
class JSONDeserializer(TextDeserializer):
def _from_json(self, datastring):
try:
return jsonutils.loads(datastring)
except ValueError:
msg = _("Cannot understand JSON")
raise exception.MalformedResponseBody(reason=msg)
def default(self, datastring):
return {'body': self._from_json(datastring)}
class Serializer(object):
"""Serializes and deserializes dictionaries to certain MIME types."""
def __init__(self, metadata=None, default_xmlns=None):
"""Create a serializer based on the given WSGI environment.
'metadata' is an optional dict mapping MIME types to information
needed to serialize a dictionary to that type.
"""
self.metadata = metadata or {}
self.default_xmlns = default_xmlns
def _get_serialize_handler(self, content_type):
handlers = {
'application/json': JSONDictSerializer(),
}
try:
return handlers[content_type]
except Exception:
raise exception.InvalidContentType(content_type=content_type)
def serialize(self, data, content_type):
"""Serialize a dictionary into the specified content type."""
return self._get_serialize_handler(content_type).serialize(data)
def deserialize(self, datastring, content_type):
"""Deserialize a string to a dictionary.
The string must be in the format of a supported MIME type.
"""
return self.get_deserialize_handler(content_type).deserialize(
datastring)
def get_deserialize_handler(self, content_type):
handlers = {
'application/json': JSONDeserializer(),
}
try:
return handlers[content_type]
except Exception:
raise exception.InvalidContentType(content_type=content_type)

View File

@ -0,0 +1,202 @@
# Copyright 2011, VMware, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Borrowed from nova code base, more utilities will be added/borrowed as and
# when needed.
"""Utilities and helper functions."""
import datetime
import json
import logging
import os
import sys
from magnetodbclient.common import _
from magnetodbclient.common import exceptions
from magnetodbclient.openstack.common import strutils
def env(*vars, **kwargs):
"""Returns the first environment variable set.
if none are non-empty, defaults to '' or keyword arg default.
"""
for v in vars:
value = os.environ.get(v)
if value:
return value
return kwargs.get('default', '')
def to_primitive(value):
if isinstance(value, list) or isinstance(value, tuple):
o = []
for v in value:
o.append(to_primitive(v))
return o
elif isinstance(value, dict):
o = {}
for k, v in value.iteritems():
o[k] = to_primitive(v)
return o
elif isinstance(value, datetime.datetime):
return str(value)
elif hasattr(value, 'iteritems'):
return to_primitive(dict(value.iteritems()))
elif hasattr(value, '__iter__'):
return to_primitive(list(value))
else:
return value
def dumps(value, indent=None):
try:
return json.dumps(value, indent=indent)
except TypeError:
pass
return json.dumps(to_primitive(value))
def loads(s):
return json.loads(s)
def import_class(import_str):
"""Returns a class from a string including module and class.
:param import_str: a string representation of the class name
:rtype: the requested class
"""
mod_str, _sep, class_str = import_str.rpartition('.')
__import__(mod_str)
return getattr(sys.modules[mod_str], class_str)
def get_client_class(api_name, version, version_map):
"""Returns the client class for the requested API version
:param api_name: the name of the API, e.g. 'compute', 'image', etc
:param version: the requested API version
:param version_map: a dict of client classes keyed by version
:rtype: a client class for the requested API version
"""
try:
client_path = version_map[str(version)]
except (KeyError, ValueError):
msg = _("Invalid %(api_name)s client version '%(version)s'. must be "
"one of: %(map_keys)s")
msg = msg % {'api_name': api_name, 'version': version,
'map_keys': ', '.join(version_map.keys())}
raise exceptions.UnsupportedVersion(msg)
return import_class(client_path)
def get_item_properties(item, fields, mixed_case_fields=[], formatters={}):
"""Return a tuple containing the item properties.
:param item: a single item resource (e.g. Server, Tenant, etc)
:param fields: tuple of strings with the desired field names
:param mixed_case_fields: tuple of field names to preserve case
:param formatters: dictionary mapping field names to callables
to format the values
"""
row = []
for field in fields:
if field in formatters:
row.append(formatters[field](item))
else:
if field in mixed_case_fields:
field_name = field.replace(' ', '_')
else:
field_name = field.lower().replace(' ', '_')
if not hasattr(item, field_name) and isinstance(item, dict):
data = item[field_name]
else:
data = getattr(item, field_name, '')
if data is None:
data = ''
row.append(data)
return tuple(row)
def str2bool(strbool):
if strbool is None:
return None
else:
return strbool.lower() == 'true'
def str2dict(strdict):
'''Convert key1=value1,key2=value2,... string into dictionary.
:param strdict: key1=value1,key2=value2
'''
_info = {}
if not strdict:
return _info
for kv_str in strdict.split(","):
k, v = kv_str.split("=", 1)
_info.update({k: v})
return _info
def http_log_req(_logger, args, kwargs):
if not _logger.isEnabledFor(logging.DEBUG):
return
string_parts = ['curl -i']
for element in args:
if element in ('GET', 'POST', 'DELETE', 'PUT'):
string_parts.append(' -X %s' % element)
else:
string_parts.append(' %s' % element)
for element in kwargs['headers']:
header = ' -H "%s: %s"' % (element, kwargs['headers'][element])
string_parts.append(header)
if 'body' in kwargs and kwargs['body']:
string_parts.append(" -d '%s'" % (kwargs['body']))
string_parts = safe_encode_list(string_parts)
_logger.debug(_("\nREQ: %s\n"), "".join(string_parts))
def http_log_resp(_logger, resp, body):
if not _logger.isEnabledFor(logging.DEBUG):
return
_logger.debug(_("RESP:%(resp)s %(body)s\n"), {'resp': resp, 'body': body})
def _safe_encode_without_obj(data):
if isinstance(data, basestring):
return strutils.safe_encode(data)
return data
def safe_encode_list(data):
return map(_safe_encode_without_obj, data)
def safe_encode_dict(data):
def _encode_item((k, v)):
if isinstance(v, list):
return (k, safe_encode_list(v))
elif isinstance(v, dict):
return (k, safe_encode_dict(v))
return (k, _safe_encode_without_obj(v))
return dict(map(_encode_item, data.items()))

View File

@ -0,0 +1,55 @@
# Copyright 2014 NEC Corporation
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from magnetodbclient.common import exceptions
from magnetodbclient.openstack.common.gettextutils import _
def validate_int_range(parsed_args, attr_name, min_value=None, max_value=None):
val = getattr(parsed_args, attr_name, None)
if val is None:
return
try:
if not isinstance(val, int):
int_val = int(val, 0)
else:
int_val = val
if ((min_value is None or min_value <= int_val) and
(max_value is None or int_val <= max_value)):
return
except (ValueError, TypeError):
pass
if min_value is not None and max_value is not None:
msg = (_('%(attr_name)s "%(val)s" should be an integer '
'[%(min)i:%(max)i].') %
{'attr_name': attr_name.replace('_', '-'),
'val': val, 'min': min_value, 'max': max_value})
elif min_value is not None:
msg = (_('%(attr_name)s "%(val)s" should be an integer '
'greater than or equal to %(min)i.') %
{'attr_name': attr_name.replace('_', '-'),
'val': val, 'min': min_value})
elif max_value is not None:
msg = (_('%(attr_name)s "%(val)s" should be an integer '
'smaller than or equal to %(max)i.') %
{'attr_name': attr_name.replace('_', '-'),
'val': val, 'max': max_value})
else:
msg = (_('%(attr_name)s "%(val)s" should be an integer.') %
{'attr_name': attr_name.replace('_', '-'),
'val': val})
raise exceptions.CommandError(msg)

View File

View File

@ -0,0 +1,65 @@
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from magnetodbclient.common import exceptions
from magnetodbclient.common import utils
from magnetodbclient.openstack.common.gettextutils import _
API_NAME = 'keyvalue'
API_VERSIONS = {
'1': 'magnetodbclient.v1.client.Client',
}
def make_client(instance):
"""Returns an magnetodb client.
"""
magnetodb_client = utils.get_client_class(
API_NAME,
instance._api_version[API_NAME],
API_VERSIONS,
)
instance.initialize()
url = instance._url
url = url.rstrip("/")
if '1' == instance._api_version[API_NAME]:
client = magnetodb_client(username=instance._username,
tenant_name=instance._tenant_name,
password=instance._password,
region_name=instance._region_name,
auth_url=instance._auth_url,
endpoint_url=url,
token=instance._token,
auth_strategy=instance._auth_strategy,
insecure=instance._insecure,
ca_cert=instance._ca_cert)
return client
else:
raise exceptions.UnsupportedVersion(_("API version %s is not "
"supported") %
instance._api_version[API_NAME])
def Client(api_version, *args, **kwargs):
"""Return an magnetodb client.
"""
magnetodb_client = utils.get_client_class(
API_NAME,
api_version,
API_VERSIONS,
)
return magnetodb_client(*args, **kwargs)

View File

@ -0,0 +1,661 @@
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from __future__ import print_function
import argparse
import logging
import re
from cliff.formatters import table
from cliff import lister
from cliff import show
from magnetodbclient.common import command
from magnetodbclient.common import exceptions
from magnetodbclient.common import utils
from magnetodbclient.openstack.common.gettextutils import _
HEX_ELEM = '[0-9A-Fa-f]'
UUID_PATTERN = '-'.join([HEX_ELEM + '{8}', HEX_ELEM + '{4}',
HEX_ELEM + '{4}', HEX_ELEM + '{4}',
HEX_ELEM + '{12}'])
def _get_resource_plural(resource, client):
plurals = getattr(client, 'EXTED_PLURALS', [])
for k in plurals:
if plurals[k] == resource:
return k
return resource + 's'
def find_resourceid_by_id(client, resource, resource_id):
resource_plural = _get_resource_plural(resource, client)
obj_lister = getattr(client, "list_%s" % resource_plural)
# perform search by id only if we are passing a valid UUID
match = re.match(UUID_PATTERN, resource_id)
collection = resource_plural
if match:
data = obj_lister(id=resource_id, fields='id')
if data and data[collection]:
return data[collection][0]['id']
not_found_message = (_("Unable to find %(resource)s with id "
"'%(id)s'") %
{'resource': resource, 'id': resource_id})
# 404 is used to simulate server side behavior
raise exceptions.MagnetoDBClientException(
message=not_found_message, status_code=404)
def _find_resourceid_by_name(client, resource, name):
resource_plural = _get_resource_plural(resource, client)
obj_lister = getattr(client, "list_%s" % resource_plural)
data = obj_lister(name=name, fields='id')
collection = resource_plural
info = data[collection]
if len(info) > 1:
raise exceptions.MagnetoDBClientNoUniqueMatch(resource=resource,
name=name)
elif len(info) == 0:
not_found_message = (_("Unable to find %(resource)s with name "
"'%(name)s'") %
{'resource': resource, 'name': name})
# 404 is used to simulate server side behavior
raise exceptions.MagnetoDBClientException(
message=not_found_message, status_code=404)
else:
return info[0]['id']
def find_resourceid_by_name_or_id(client, resource, name_or_id):
try:
return find_resourceid_by_id(client, resource, name_or_id)
except exceptions.MagnetoDBClientException:
return _find_resourceid_by_name(client, resource, name_or_id)
def add_show_list_common_argument(parser):
parser.add_argument(
'-D', '--show-details',
help=_('Show detailed info'),
action='store_true',
default=False, )
parser.add_argument(
'--show_details',
action='store_true',
help=argparse.SUPPRESS)
parser.add_argument(
'--fields',
help=argparse.SUPPRESS,
action='append',
default=[])
parser.add_argument(
'-F', '--field',
dest='fields', metavar='FIELD',
help=_('Specify the field(s) to be returned by server,'
' can be repeated'),
action='append',
default=[])
def add_pagination_argument(parser):
parser.add_argument(
'-P', '--page-size',
dest='page_size', metavar='SIZE', type=int,
help=_("Specify retrieve unit of each request, then split one request "
"to several requests"),
default=None)
def add_sorting_argument(parser):
parser.add_argument(
'--sort-key',
dest='sort_key', metavar='FIELD',
action='append',
help=_("Sort list by specified fields (This option can be repeated), "
"The number of sort_dir and sort_key should match each other, "
"more sort_dir specified will be omitted, less will be filled "
"with asc as default direction "),
default=[])
parser.add_argument(
'--sort-dir',
dest='sort_dir', metavar='{asc,desc}',
help=_("Sort list in specified directions "
"(This option can be repeated)"),
action='append',
default=[],
choices=['asc', 'desc'])
def is_number(s):
try:
float(s) # for int, long and float
except ValueError:
try:
complex(s) # for complex
except ValueError:
return False
return True
def _process_previous_argument(current_arg, _value_number, current_type_str,
_list_flag, _values_specs, _clear_flag,
values_specs):
if current_arg is not None:
if _value_number == 0 and (current_type_str or _list_flag):
# This kind of argument should have value
raise exceptions.CommandError(
_("Invalid values_specs %s") % ' '.join(values_specs))
if _value_number > 1 or _list_flag or current_type_str == 'list':
current_arg.update({'nargs': '+'})
elif _value_number == 0:
if _clear_flag:
# if we have action=clear, we use argument's default
# value None for argument
_values_specs.pop()
else:
# We assume non value argument as bool one
current_arg.update({'action': 'store_true'})
def parse_args_to_dict(values_specs):
'''It is used to analyze the extra command options to command.
Besides known options and arguments, our commands also support user to
put more options to the end of command line. For example,
list_nets -- --tag x y --key1 value1, where '-- --tag x y --key1 value1'
is extra options to our list_nets. This feature can support V1 API's
fields selection and filters. For example, to list tables which has name
'test4', we can have list_tables -- --name=test4.
value spec is: --key type=int|bool|... value. Type is one of Python
built-in types. By default, type is string. The key without value is
a bool option. Key with two values will be a list option.
'''
# values_specs for example: '-- --tag x y --key1 type=int value1'
# -- is a pseudo argument
values_specs_copy = values_specs[:]
if values_specs_copy and values_specs_copy[0] == '--':
del values_specs_copy[0]
# converted ArgumentParser arguments for each of the options
_options = {}
# the argument part for current option in _options
current_arg = None
# the string after remove meta info in values_specs
# for example, '--tag x y --key1 value1'
_values_specs = []
# record the count of values for an option
# for example: for '--tag x y', it is 2, while for '--key1 value1', it is 1
_value_number = 0
# list=true
_list_flag = False
# action=clear
_clear_flag = False
# the current item in values_specs
current_item = None
# the str after 'type='
current_type_str = None
for _item in values_specs_copy:
if _item.startswith('--'):
# Deal with previous argument if any
_process_previous_argument(
current_arg, _value_number, current_type_str,
_list_flag, _values_specs, _clear_flag, values_specs)
# Init variables for current argument
current_item = _item
_list_flag = False
_clear_flag = False
current_type_str = None
if "=" in _item:
_value_number = 1
_item = _item.split('=')[0]
else:
_value_number = 0
if _item in _options:
raise exceptions.CommandError(
_("Duplicated options %s") % ' '.join(values_specs))
else:
_options.update({_item: {}})
current_arg = _options[_item]
_item = current_item
elif _item.startswith('type='):
if current_arg is None:
raise exceptions.CommandError(
_("Invalid values_specs %s") % ' '.join(values_specs))
if 'type' not in current_arg:
current_type_str = _item.split('=', 2)[1]
current_arg.update({'type': eval(current_type_str)})
if current_type_str == 'bool':
current_arg.update({'type': utils.str2bool})
elif current_type_str == 'dict':
current_arg.update({'type': utils.str2dict})
continue
elif _item == 'list=true':
_list_flag = True
continue
elif _item == 'action=clear':
_clear_flag = True
continue
if not _item.startswith('--'):
# All others are value items
# Make sure '--' occurs first and allow minus value
if (not current_item or '=' in current_item or
_item.startswith('-') and not is_number(_item)):
raise exceptions.CommandError(
_("Invalid values_specs %s") % ' '.join(values_specs))
_value_number += 1
_values_specs.append(_item)
# Deal with last one argument
_process_previous_argument(
current_arg, _value_number, current_type_str,
_list_flag, _values_specs, _clear_flag, values_specs)
# populate the parser with arguments
_parser = argparse.ArgumentParser(add_help=False)
for opt, optspec in _options.iteritems():
_parser.add_argument(opt, **optspec)
_args = _parser.parse_args(_values_specs)
result_dict = {}
for opt in _options.iterkeys():
_opt = opt.split('--', 2)[1]
_opt = _opt.replace('-', '_')
_value = getattr(_args, _opt)
result_dict.update({_opt: _value})
return result_dict
def _merge_args(qCmd, parsed_args, _extra_values, value_specs):
"""Merge arguments from _extra_values into parsed_args.
If an argument value are provided in both and it is a list,
the values in _extra_values will be merged into parsed_args.
@param parsed_args: the parsed args from known options
@param _extra_values: the other parsed arguments in unknown parts
@param values_specs: the unparsed unknown parts
"""
temp_values = _extra_values.copy()
for key, value in temp_values.iteritems():
if hasattr(parsed_args, key):
arg_value = getattr(parsed_args, key)
if arg_value is not None and value is not None:
if isinstance(arg_value, list):
if value and isinstance(value, list):
if (not arg_value or
type(arg_value[0]) == type(value[0])):
arg_value.extend(value)
_extra_values.pop(key)
def update_dict(obj, dict, attributes):
"""Update dict with fields from obj.attributes
:param obj: the object updated into dict
:param dict: the result dictionary
:param attributes: a list of attributes belonging to obj
"""
for attribute in attributes:
if hasattr(obj, attribute) and getattr(obj, attribute) is not None:
dict[attribute] = getattr(obj, attribute)
class TableFormater(table.TableFormatter):
"""This class is used to keep consistency with prettytable 0.6.
https://bugs.launchpad.net/python-magnetodbclient/+bug/1165962
"""
def emit_list(self, column_names, data, stdout, parsed_args):
if column_names:
super(TableFormater, self).emit_list(column_names, data, stdout,
parsed_args)
else:
stdout.write('\n')
class MagnetoDBCommand(command.OpenStackCommand):
api = 'keyvalue'
log = logging.getLogger(__name__ + '.MagnetoDBCommand')
values_specs = []
json_indent = None
def __init__(self, app, app_args):
super(MagnetoDBCommand, self).__init__(app, app_args)
# NOTE(markmcclain): This is no longer supported in cliff version 1.5.2
# see https://bugs.launchpad.net/python-neutronclient/+bug/1265926
#if hasattr(self, 'formatters'):
#self.formatters['table'] = TableFormater()
def get_client(self):
return self.app.client_manager.magnetodb
def get_parser(self, prog_name):
parser = super(MagnetoDBCommand, self).get_parser(prog_name)
parser.add_argument(
'--request-format',
help=_('The xml or json request format'),
default='json',
choices=['json', 'xml', ], )
parser.add_argument(
'--request_format',
choices=['json', 'xml', ],
help=argparse.SUPPRESS)
return parser
def format_output_data(self, data):
# Modify data to make it more readable
if self.resource in data:
for k, v in data[self.resource].iteritems():
if isinstance(v, list):
value = '\n'.join(utils.dumps(
i, indent=self.json_indent) if isinstance(i, dict)
else str(i) for i in v)
data[self.resource][k] = value
elif isinstance(v, dict):
value = utils.dumps(v, indent=self.json_indent)
data[self.resource][k] = value
elif v is None:
data[self.resource][k] = ''
def add_known_arguments(self, parser):
pass
def args2body(self, parsed_args):
return {}
class CreateCommand(MagnetoDBCommand, show.ShowOne):
"""Create a resource for a given tenant
"""
api = 'keyvalue'
resource = None
log = None
def get_parser(self, prog_name):
parser = super(CreateCommand, self).get_parser(prog_name)
parser.add_argument(
'--tenant-id', metavar='TENANT_ID',
help=_('The owner tenant ID'), )
parser.add_argument(
'--tenant_id',
help=argparse.SUPPRESS)
self.add_known_arguments(parser)
return parser
def get_data(self, parsed_args):
self.log.debug('get_data(%s)' % parsed_args)
magnetodb_client = self.get_client()
magnetodb_client.format = parsed_args.request_format
_extra_values = parse_args_to_dict(self.values_specs)
_merge_args(self, parsed_args, _extra_values,
self.values_specs)
body = self.args2body(parsed_args)
body[self.resource].update(_extra_values)
obj_creator = getattr(magnetodb_client,
"create_%s" % self.resource)
data = obj_creator(body)
self.format_output_data(data)
info = self.resource in data and data[self.resource] or None
if info:
print(_('Created a new %s:') % self.resource,
file=self.app.stdout)
else:
info = {'': ''}
return zip(*sorted(info.iteritems()))
class UpdateCommand(MagnetoDBCommand):
"""Update resource's information
"""
api = 'keyvalue'
resource = None
log = None
allow_names = True
def get_parser(self, prog_name):
parser = super(UpdateCommand, self).get_parser(prog_name)
parser.add_argument(
'id', metavar=self.resource.upper(),
help=_('ID or name of %s to update') % self.resource)
self.add_known_arguments(parser)
return parser
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
magnetodb_client = self.get_client()
magnetodb_client.format = parsed_args.request_format
_extra_values = parse_args_to_dict(self.values_specs)
_merge_args(self, parsed_args, _extra_values,
self.values_specs)
body = self.args2body(parsed_args)
if self.resource in body:
body[self.resource].update(_extra_values)
else:
body[self.resource] = _extra_values
if not body[self.resource]:
raise exceptions.CommandError(
_("Must specify new values to update %s") % self.resource)
if self.allow_names:
_id = find_resourceid_by_name_or_id(
magnetodb_client, self.resource, parsed_args.id)
else:
_id = find_resourceid_by_id(
magnetodb_client, self.resource, parsed_args.id)
obj_updator = getattr(magnetodb_client,
"update_%s" % self.resource)
obj_updator(_id, body)
print((_('Updated %(resource)s: %(id)s') %
{'id': parsed_args.id, 'resource': self.resource}),
file=self.app.stdout)
return
class DeleteCommand(MagnetoDBCommand):
"""Delete a given resource
"""
api = 'keyvalue'
resource = None
log = None
allow_names = True
def get_parser(self, prog_name):
parser = super(DeleteCommand, self).get_parser(prog_name)
if self.allow_names:
help_str = _('ID or name of %s to delete')
else:
help_str = _('ID of %s to delete')
parser.add_argument(
'id', metavar=self.resource.upper(),
help=help_str % self.resource)
return parser
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
magnetodb_client = self.get_client()
magnetodb_client.format = parsed_args.request_format
obj_deleter = getattr(magnetodb_client,
"delete_%s" % self.resource)
if self.allow_names:
_id = find_resourceid_by_name_or_id(magnetodb_client,
self.resource,
parsed_args.id)
else:
_id = parsed_args.id
obj_deleter(_id)
print((_('Deleted %(resource)s: %(id)s')
% {'id': parsed_args.id,
'resource': self.resource}),
file=self.app.stdout)
return
class ListCommand(MagnetoDBCommand, lister.Lister):
"""List resources that belong to a given tenant
"""
api = 'keyvalue'
resource = None
log = None
_formatters = {}
list_columns = []
unknown_parts_flag = True
pagination_support = False
sorting_support = False
def get_parser(self, prog_name):
parser = super(ListCommand, self).get_parser(prog_name)
add_show_list_common_argument(parser)
if self.pagination_support:
add_pagination_argument(parser)
if self.sorting_support:
add_sorting_argument(parser)
return parser
def args2search_opts(self, parsed_args):
search_opts = {}
fields = parsed_args.fields
if parsed_args.fields:
search_opts.update({'fields': fields})
if parsed_args.show_details:
search_opts.update({'verbose': 'True'})
return search_opts
def call_server(self, magnetodb_client, search_opts, parsed_args):
resource_plural = _get_resource_plural(self.resource, magnetodb_client)
obj_lister = getattr(magnetodb_client, "list_%s" % resource_plural)
data = obj_lister(**search_opts)
return data
def retrieve_list(self, parsed_args):
"""Retrieve a list of resources from MagnetoDB server"""
magnetodb_client = self.get_client()
magnetodb_client.format = parsed_args.request_format
_extra_values = parse_args_to_dict(self.values_specs)
_merge_args(self, parsed_args, _extra_values,
self.values_specs)
search_opts = self.args2search_opts(parsed_args)
search_opts.update(_extra_values)
if self.pagination_support:
page_size = parsed_args.page_size
if page_size:
search_opts.update({'limit': page_size})
if self.sorting_support:
keys = parsed_args.sort_key
if keys:
search_opts.update({'sort_key': keys})
dirs = parsed_args.sort_dir
len_diff = len(keys) - len(dirs)
if len_diff > 0:
dirs += ['asc'] * len_diff
elif len_diff < 0:
dirs = dirs[:len(keys)]
if dirs:
search_opts.update({'sort_dir': dirs})
data = self.call_server(magnetodb_client, search_opts, parsed_args)
collection = _get_resource_plural(self.resource, magnetodb_client)
return data.get(collection, [])
def extend_list(self, data, parsed_args):
"""Update a retrieved list.
"""
pass
def setup_columns(self, info, parsed_args):
_columns = len(info) > 0 and sorted(info[0].keys()) or []
if not _columns:
# clean the parsed_args.columns so that cliff will not break
parsed_args.columns = []
elif parsed_args.columns:
_columns = [x for x in parsed_args.columns if x in _columns]
elif self.list_columns:
# if no -c(s) by user and list_columns, we use columns in
# both list_columns and returned resource.
# Also Keep their order the same as in list_columns
_columns = [x for x in self.list_columns if x in _columns]
return (_columns, (utils.get_item_properties(
s, _columns, formatters=self._formatters, )
for s in info), )
def get_data(self, parsed_args):
self.log.debug('get_data(%s)', parsed_args)
data = self.retrieve_list(parsed_args)
self.extend_list(data, parsed_args)
return self.setup_columns(data, parsed_args)
class ShowCommand(MagnetoDBCommand, show.ShowOne):
"""Show information of a given resource
"""
api = 'keyvalue'
resource = None
log = None
allow_names = True
def get_parser(self, prog_name):
parser = super(ShowCommand, self).get_parser(prog_name)
add_show_list_common_argument(parser)
if self.allow_names:
help_str = _('ID or name of %s to look up')
else:
help_str = _('ID of %s to look up')
parser.add_argument(
'id', metavar=self.resource.upper(),
help=help_str % self.resource)
return parser
def get_data(self, parsed_args):
self.log.debug('get_data(%s)', parsed_args)
magnetodb_client = self.get_client()
magnetodb_client.format = parsed_args.request_format
params = {}
if parsed_args.show_details:
params = {'verbose': 'True'}
if parsed_args.fields:
params = {'fields': parsed_args.fields}
if self.allow_names:
_id = find_resourceid_by_name_or_id(magnetodb_client,
self.resource,
parsed_args.id)
else:
_id = parsed_args.id
obj_shower = getattr(magnetodb_client, "show_%s" % self.resource)
data = obj_shower(_id, **params)
self.format_output_data(data)
resource = data[self.resource]
if self.resource in data:
return zip(*sorted(resource.iteritems()))
else:
return None

View File

View File

@ -0,0 +1,320 @@
# Copyright 2012 Red Hat, Inc.
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
gettext for openstack-common modules.
Usual usage in an openstack.common module:
from magnetodbclient.openstack.common.gettextutils import _
"""
import copy
import gettext
import logging
import os
import re
import UserString
from babel import localedata
import six
_localedir = os.environ.get('magnetodbclient'.upper() + '_LOCALEDIR')
_t = gettext.translation('magnetodbclient', localedir=_localedir, fallback=True)
_AVAILABLE_LANGUAGES = {}
USE_LAZY = False
def enable_lazy():
"""Convenience function for configuring _() to use lazy gettext
Call this at the start of execution to enable the gettextutils._
function to use lazy gettext functionality. This is useful if
your project is importing _ directly instead of using the
gettextutils.install() way of importing the _ function.
"""
global USE_LAZY
USE_LAZY = True
def _(msg):
if USE_LAZY:
return Message(msg, 'magnetodbclient')
else:
return _t.ugettext(msg)
def install(domain, lazy=False):
"""Install a _() function using the given translation domain.
Given a translation domain, install a _() function using gettext's
install() function.
The main difference from gettext.install() is that we allow
overriding the default localedir (e.g. /usr/share/locale) using
a translation-domain-specific environment variable (e.g.
NOVA_LOCALEDIR).
:param domain: the translation domain
:param lazy: indicates whether or not to install the lazy _() function.
The lazy _() introduces a way to do deferred translation
of messages by installing a _ that builds Message objects,
instead of strings, which can then be lazily translated into
any available locale.
"""
if lazy:
# NOTE(mrodden): Lazy gettext functionality.
#
# The following introduces a deferred way to do translations on
# messages in OpenStack. We override the standard _() function
# and % (format string) operation to build Message objects that can
# later be translated when we have more information.
#
# Also included below is an example LocaleHandler that translates
# Messages to an associated locale, effectively allowing many logs,
# each with their own locale.
def _lazy_gettext(msg):
"""Create and return a Message object.
Lazy gettext function for a given domain, it is a factory method
for a project/module to get a lazy gettext function for its own
translation domain (i.e. nova, glance, cinder, etc.)
Message encapsulates a string so that we can translate
it later when needed.
"""
return Message(msg, domain)
import __builtin__
__builtin__.__dict__['_'] = _lazy_gettext
else:
localedir = '%s_LOCALEDIR' % domain.upper()
gettext.install(domain,
localedir=os.environ.get(localedir),
unicode=True)
class Message(UserString.UserString, object):
"""Class used to encapsulate translatable messages."""
def __init__(self, msg, domain):
# _msg is the gettext msgid and should never change
self._msg = msg
self._left_extra_msg = ''
self._right_extra_msg = ''
self.params = None
self.locale = None
self.domain = domain
@property
def data(self):
# NOTE(mrodden): this should always resolve to a unicode string
# that best represents the state of the message currently
localedir = os.environ.get(self.domain.upper() + '_LOCALEDIR')
if self.locale:
lang = gettext.translation(self.domain,
localedir=localedir,
languages=[self.locale],
fallback=True)
else:
# use system locale for translations
lang = gettext.translation(self.domain,
localedir=localedir,
fallback=True)
full_msg = (self._left_extra_msg +
lang.ugettext(self._msg) +
self._right_extra_msg)
if self.params is not None:
full_msg = full_msg % self.params
return six.text_type(full_msg)
def _save_dictionary_parameter(self, dict_param):
full_msg = self.data
# look for %(blah) fields in string;
# ignore %% and deal with the
# case where % is first character on the line
keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', full_msg)
# if we don't find any %(blah) blocks but have a %s
if not keys and re.findall('(?:[^%]|^)%[a-z]', full_msg):
# apparently the full dictionary is the parameter
params = copy.deepcopy(dict_param)
else:
params = {}
for key in keys:
try:
params[key] = copy.deepcopy(dict_param[key])
except TypeError:
# cast uncopyable thing to unicode string
params[key] = unicode(dict_param[key])
return params
def _save_parameters(self, other):
# we check for None later to see if
# we actually have parameters to inject,
# so encapsulate if our parameter is actually None
if other is None:
self.params = (other, )
elif isinstance(other, dict):
self.params = self._save_dictionary_parameter(other)
else:
# fallback to casting to unicode,
# this will handle the problematic python code-like
# objects that cannot be deep-copied
try:
self.params = copy.deepcopy(other)
except TypeError:
self.params = unicode(other)
return self
# overrides to be more string-like
def __unicode__(self):
return self.data
def __str__(self):
return self.data.encode('utf-8')
def __getstate__(self):
to_copy = ['_msg', '_right_extra_msg', '_left_extra_msg',
'domain', 'params', 'locale']
new_dict = self.__dict__.fromkeys(to_copy)
for attr in to_copy:
new_dict[attr] = copy.deepcopy(self.__dict__[attr])
return new_dict
def __setstate__(self, state):
for (k, v) in state.items():
setattr(self, k, v)
# operator overloads
def __add__(self, other):
copied = copy.deepcopy(self)
copied._right_extra_msg += other.__str__()
return copied
def __radd__(self, other):
copied = copy.deepcopy(self)
copied._left_extra_msg += other.__str__()
return copied
def __mod__(self, other):
# do a format string to catch and raise
# any possible KeyErrors from missing parameters
self.data % other
copied = copy.deepcopy(self)
return copied._save_parameters(other)
def __mul__(self, other):
return self.data * other
def __rmul__(self, other):
return other * self.data
def __getitem__(self, key):
return self.data[key]
def __getslice__(self, start, end):
return self.data.__getslice__(start, end)
def __getattribute__(self, name):
# NOTE(mrodden): handle lossy operations that we can't deal with yet
# These override the UserString implementation, since UserString
# uses our __class__ attribute to try and build a new message
# after running the inner data string through the operation.
# At that point, we have lost the gettext message id and can just
# safely resolve to a string instead.
ops = ['capitalize', 'center', 'decode', 'encode',
'expandtabs', 'ljust', 'lstrip', 'replace', 'rjust', 'rstrip',
'strip', 'swapcase', 'title', 'translate', 'upper', 'zfill']
if name in ops:
return getattr(self.data, name)
else:
return UserString.UserString.__getattribute__(self, name)
def get_available_languages(domain):
"""Lists the available languages for the given translation domain.
:param domain: the domain to get languages for
"""
if domain in _AVAILABLE_LANGUAGES:
return copy.copy(_AVAILABLE_LANGUAGES[domain])
localedir = '%s_LOCALEDIR' % domain.upper()
find = lambda x: gettext.find(domain,
localedir=os.environ.get(localedir),
languages=[x])
# NOTE(mrodden): en_US should always be available (and first in case
# order matters) since our in-line message strings are en_US
language_list = ['en_US']
# NOTE(luisg): Babel <1.0 used a function called list(), which was
# renamed to locale_identifiers() in >=1.0, the requirements master list
# requires >=0.9.6, uncapped, so defensively work with both. We can remove
# this check when the master list updates to >=1.0, and all projects udpate
list_identifiers = (getattr(localedata, 'list', None) or
getattr(localedata, 'locale_identifiers'))
locale_identifiers = list_identifiers()
for i in locale_identifiers:
if find(i) is not None:
language_list.append(i)
_AVAILABLE_LANGUAGES[domain] = language_list
return copy.copy(language_list)
def get_localized_message(message, user_locale):
"""Gets a localized version of the given message in the given locale."""
if isinstance(message, Message):
if user_locale:
message.locale = user_locale
return unicode(message)
else:
return message
class LocaleHandler(logging.Handler):
"""Handler that can have a locale associated to translate Messages.
A quick example of how to utilize the Message class above.
LocaleHandler takes a locale and a target logging.Handler object
to forward LogRecord objects to after translating the internal Message.
"""
def __init__(self, locale, target):
"""Initialize a LocaleHandler
:param locale: locale to use for translating messages
:param target: logging.Handler object to forward
LogRecord objects to after translation
"""
logging.Handler.__init__(self)
self.locale = locale
self.target = target
def emit(self, record):
if isinstance(record.msg, Message):
# set the locale and resolve to a string
record.msg.locale = self.locale
self.target.emit(record)

View File

@ -0,0 +1,67 @@
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Import related utilities and helper functions.
"""
import sys
import traceback
from magnetodbclient.openstack.common.gettextutils import _
def import_class(import_str):
"""Returns a class from a string including module and class."""
mod_str, _sep, class_str = import_str.rpartition('.')
try:
__import__(mod_str)
return getattr(sys.modules[mod_str], class_str)
except (ValueError, AttributeError):
raise ImportError(_('Class %s cannot be found (%s)') %
(class_str,
traceback.format_exception(*sys.exc_info())))
def import_object(import_str, *args, **kwargs):
"""Import a class and return an instance of it."""
return import_class(import_str)(*args, **kwargs)
def import_object_ns(name_space, import_str, *args, **kwargs):
"""Tries to import object from default namespace.
Imports a class and return an instance of it, first by trying
to find the class in a default namespace, then failing back to
a full path if not found in the default namespace.
"""
import_value = "%s.%s" % (name_space, import_str)
try:
return import_class(import_value)(*args, **kwargs)
except ImportError:
return import_class(import_str)(*args, **kwargs)
def import_module(import_str):
"""Import a module."""
__import__(import_str)
return sys.modules[import_str]
def try_import(import_str, default=None):
"""Try to import a module and if it fails return default."""
try:
return import_module(import_str)
except ImportError:
return default

View File

@ -0,0 +1,171 @@
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
JSON related utilities.
This module provides a few things:
1) A handy function for getting an object down to something that can be
JSON serialized. See to_primitive().
2) Wrappers around loads() and dumps(). The dumps() wrapper will
automatically use to_primitive() for you if needed.
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
is available.
'''
import datetime
import functools
import inspect
import itertools
import json
import types
import xmlrpclib
import six
from magnetodbclient.openstack.common import importutils
from magnetodbclient.openstack.common import timeutils
netaddr = importutils.try_import("netaddr")
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
inspect.isfunction, inspect.isgeneratorfunction,
inspect.isgenerator, inspect.istraceback, inspect.isframe,
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
inspect.isabstract]
_simple_types = (types.NoneType, int, basestring, bool, float, long)
def to_primitive(value, convert_instances=False, convert_datetime=True,
level=0, max_depth=3):
"""Convert a complex object into primitives.
Handy for JSON serialization. We can optionally handle instances,
but since this is a recursive function, we could have cyclical
data structures.
To handle cyclical data structures we could track the actual objects
visited in a set, but not all objects are hashable. Instead we just
track the depth of the object inspections and don't go too deep.
Therefore, convert_instances=True is lossy ... be aware.
"""
# handle obvious types first - order of basic types determined by running
# full tests on nova project, resulting in the following counts:
# 572754 <type 'NoneType'>
# 460353 <type 'int'>
# 379632 <type 'unicode'>
# 274610 <type 'str'>
# 199918 <type 'dict'>
# 114200 <type 'datetime.datetime'>
# 51817 <type 'bool'>
# 26164 <type 'list'>
# 6491 <type 'float'>
# 283 <type 'tuple'>
# 19 <type 'long'>
if isinstance(value, _simple_types):
return value
if isinstance(value, datetime.datetime):
if convert_datetime:
return timeutils.strtime(value)
else:
return value
# value of itertools.count doesn't get caught by nasty_type_tests
# and results in infinite loop when list(value) is called.
if type(value) == itertools.count:
return six.text_type(value)
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
# tests that raise an exception in a mocked method that
# has a @wrap_exception with a notifier will fail. If
# we up the dependency to 0.5.4 (when it is released) we
# can remove this workaround.
if getattr(value, '__module__', None) == 'mox':
return 'mock'
if level > max_depth:
return '?'
# The try block may not be necessary after the class check above,
# but just in case ...
try:
recursive = functools.partial(to_primitive,
convert_instances=convert_instances,
convert_datetime=convert_datetime,
level=level,
max_depth=max_depth)
if isinstance(value, dict):
return dict((k, recursive(v)) for k, v in value.iteritems())
elif isinstance(value, (list, tuple)):
return [recursive(lv) for lv in value]
# It's not clear why xmlrpclib created their own DateTime type, but
# for our purposes, make it a datetime type which is explicitly
# handled
if isinstance(value, xmlrpclib.DateTime):
value = datetime.datetime(*tuple(value.timetuple())[:6])
if convert_datetime and isinstance(value, datetime.datetime):
return timeutils.strtime(value)
elif hasattr(value, 'iteritems'):
return recursive(dict(value.iteritems()), level=level + 1)
elif hasattr(value, '__iter__'):
return recursive(list(value))
elif convert_instances and hasattr(value, '__dict__'):
# Likely an instance of something. Watch for cycles.
# Ignore class member vars.
return recursive(value.__dict__, level=level + 1)
elif netaddr and isinstance(value, netaddr.IPAddress):
return six.text_type(value)
else:
if any(test(value) for test in _nasty_type_tests):
return six.text_type(value)
return value
except TypeError:
# Class objects are tricky since they may define something like
# __iter__ defined but it isn't callable as list().
return six.text_type(value)
def dumps(value, default=to_primitive, **kwargs):
return json.dumps(value, default=default, **kwargs)
def loads(s):
return json.loads(s)
def load(s):
return json.load(s)
try:
import anyjson
except ImportError:
pass
else:
anyjson._modules.append((__name__, 'dumps', TypeError,
'loads', ValueError, 'load'))
anyjson.force_implementation(__name__)

View File

@ -0,0 +1,216 @@
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
System-level utilities and helper functions.
"""
import re
import sys
import unicodedata
import six
from magnetodbclient.openstack.common.gettextutils import _ # noqa
# Used for looking up extensions of text
# to their 'multiplied' byte amount
BYTE_MULTIPLIERS = {
'': 1,
't': 1024 ** 4,
'g': 1024 ** 3,
'm': 1024 ** 2,
'k': 1024,
}
BYTE_REGEX = re.compile(r'(^-?\d+)(\D*)')
TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes')
FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no')
SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]")
SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+")
def int_from_bool_as_string(subject):
"""Interpret a string as a boolean and return either 1 or 0.
Any string value in:
('True', 'true', 'On', 'on', '1')
is interpreted as a boolean True.
Useful for JSON-decoded stuff and config file parsing
"""
return bool_from_string(subject) and 1 or 0
def bool_from_string(subject, strict=False):
"""Interpret a string as a boolean.
A case-insensitive match is performed such that strings matching 't',
'true', 'on', 'y', 'yes', or '1' are considered True and, when
`strict=False`, anything else is considered False.
Useful for JSON-decoded stuff and config file parsing.
If `strict=True`, unrecognized values, including None, will raise a
ValueError which is useful when parsing values passed in from an API call.
Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'.
"""
if not isinstance(subject, six.string_types):
subject = str(subject)
lowered = subject.strip().lower()
if lowered in TRUE_STRINGS:
return True
elif lowered in FALSE_STRINGS:
return False
elif strict:
acceptable = ', '.join(
"'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS))
msg = _("Unrecognized value '%(val)s', acceptable values are:"
" %(acceptable)s") % {'val': subject,
'acceptable': acceptable}
raise ValueError(msg)
else:
return False
def safe_decode(text, incoming=None, errors='strict'):
"""Decodes incoming str using `incoming` if they're not already unicode.
:param incoming: Text's current encoding
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: text or a unicode `incoming` encoded
representation of it.
:raises TypeError: If text is not an isntance of str
"""
if not isinstance(text, six.string_types):
raise TypeError("%s can't be decoded" % type(text))
if isinstance(text, six.text_type):
return text
if not incoming:
incoming = (sys.stdin.encoding or
sys.getdefaultencoding())
try:
return text.decode(incoming, errors)
except UnicodeDecodeError:
# Note(flaper87) If we get here, it means that
# sys.stdin.encoding / sys.getdefaultencoding
# didn't return a suitable encoding to decode
# text. This happens mostly when global LANG
# var is not set correctly and there's no
# default encoding. In this case, most likely
# python will use ASCII or ANSI encoders as
# default encodings but they won't be capable
# of decoding non-ASCII characters.
#
# Also, UTF-8 is being used since it's an ASCII
# extension.
return text.decode('utf-8', errors)
def safe_encode(text, incoming=None,
encoding='utf-8', errors='strict'):
"""Encodes incoming str/unicode using `encoding`.
If incoming is not specified, text is expected to be encoded with
current python's default encoding. (`sys.getdefaultencoding`)
:param incoming: Text's current encoding
:param encoding: Expected encoding for text (Default UTF-8)
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: text or a bytestring `encoding` encoded
representation of it.
:raises TypeError: If text is not an isntance of str
"""
if not isinstance(text, six.string_types):
raise TypeError(_("%s can't be encoded") % type(text).capitalize())
if not incoming:
incoming = (sys.stdin.encoding or
sys.getdefaultencoding())
if isinstance(text, six.text_type):
return text.encode(encoding, errors)
elif text and encoding != incoming:
# Decode text before encoding it with `encoding`
text = safe_decode(text, incoming, errors)
return text.encode(encoding, errors)
return text
def to_bytes(text, default=0):
"""Converts a string into an integer of bytes.
Looks at the last characters of the text to determine
what conversion is needed to turn the input text into a byte number.
Supports "B, K(B), M(B), G(B), and T(B)". (case insensitive)
:param text: String input for bytes size conversion.
:param default: Default return value when text is blank.
"""
match = BYTE_REGEX.search(text)
if match:
magnitude = int(match.group(1))
mult_key_org = match.group(2)
if not mult_key_org:
return magnitude
elif text:
msg = _('Invalid string format: %s') % text
raise TypeError(msg)
else:
return default
mult_key = mult_key_org.lower().replace('b', '', 1)
multiplier = BYTE_MULTIPLIERS.get(mult_key)
if multiplier is None:
msg = _('Unknown byte multiplier: %s') % mult_key_org
raise TypeError(msg)
return magnitude * multiplier
def to_slug(value, incoming=None, errors="strict"):
"""Normalize string.
Convert to lowercase, remove non-word characters, and convert spaces
to hyphens.
Inspired by Django's `slugify` filter.
:param value: Text to slugify
:param incoming: Text's current encoding
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: slugified unicode representation of `value`
:raises TypeError: If text is not an instance of str
"""
value = safe_decode(value, incoming, errors)
# NOTE(aababilov): no need to use safe_(encode|decode) here:
# encodings are always "ascii", error handling is always "ignore"
# and types are always known (first: unicode; second: str)
value = unicodedata.normalize("NFKD", value).encode(
"ascii", "ignore").decode("ascii")
value = SLUGIFY_STRIP_RE.sub("", value).strip().lower()
return SLUGIFY_HYPHENATE_RE.sub("-", value)

View File

@ -0,0 +1,186 @@
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Time related utilities and helper functions.
"""
import calendar
import datetime
import iso8601
import six
# ISO 8601 extended time format with microseconds
_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND
def isotime(at=None, subsecond=False):
"""Stringify time in ISO 8601 format."""
if not at:
at = utcnow()
st = at.strftime(_ISO8601_TIME_FORMAT
if not subsecond
else _ISO8601_TIME_FORMAT_SUBSECOND)
tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
st += ('Z' if tz == 'UTC' else tz)
return st
def parse_isotime(timestr):
"""Parse time from ISO 8601 format."""
try:
return iso8601.parse_date(timestr)
except iso8601.ParseError as e:
raise ValueError(unicode(e))
except TypeError as e:
raise ValueError(unicode(e))
def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
"""Returns formatted utcnow."""
if not at:
at = utcnow()
return at.strftime(fmt)
def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
"""Turn a formatted time back into a datetime."""
return datetime.datetime.strptime(timestr, fmt)
def normalize_time(timestamp):
"""Normalize time in arbitrary timezone to UTC naive object."""
offset = timestamp.utcoffset()
if offset is None:
return timestamp
return timestamp.replace(tzinfo=None) - offset
def is_older_than(before, seconds):
"""Return True if before is older than seconds."""
if isinstance(before, six.string_types):
before = parse_strtime(before).replace(tzinfo=None)
return utcnow() - before > datetime.timedelta(seconds=seconds)
def is_newer_than(after, seconds):
"""Return True if after is newer than seconds."""
if isinstance(after, six.string_types):
after = parse_strtime(after).replace(tzinfo=None)
return after - utcnow() > datetime.timedelta(seconds=seconds)
def utcnow_ts():
"""Timestamp version of our utcnow function."""
return calendar.timegm(utcnow().timetuple())
def utcnow():
"""Overridable version of utils.utcnow."""
if utcnow.override_time:
try:
return utcnow.override_time.pop(0)
except AttributeError:
return utcnow.override_time
return datetime.datetime.utcnow()
def iso8601_from_timestamp(timestamp):
"""Returns a iso8601 formated date from timestamp."""
return isotime(datetime.datetime.utcfromtimestamp(timestamp))
utcnow.override_time = None
def set_time_override(override_time=datetime.datetime.utcnow()):
"""Overrides utils.utcnow.
Make it return a constant time or a list thereof, one at a time.
"""
utcnow.override_time = override_time
def advance_time_delta(timedelta):
"""Advance overridden time using a datetime.timedelta."""
assert(not utcnow.override_time is None)
try:
for dt in utcnow.override_time:
dt += timedelta
except TypeError:
utcnow.override_time += timedelta
def advance_time_seconds(seconds):
"""Advance overridden time by seconds."""
advance_time_delta(datetime.timedelta(0, seconds))
def clear_time_override():
"""Remove the overridden time."""
utcnow.override_time = None
def marshall_now(now=None):
"""Make an rpc-safe datetime with microseconds.
Note: tzinfo is stripped, but not required for relative times.
"""
if not now:
now = utcnow()
return dict(day=now.day, month=now.month, year=now.year, hour=now.hour,
minute=now.minute, second=now.second,
microsecond=now.microsecond)
def unmarshall_time(tyme):
"""Unmarshall a datetime dict."""
return datetime.datetime(day=tyme['day'],
month=tyme['month'],
year=tyme['year'],
hour=tyme['hour'],
minute=tyme['minute'],
second=tyme['second'],
microsecond=tyme['microsecond'])
def delta_seconds(before, after):
"""Return the difference between two timing objects.
Compute the difference in seconds between two date, time, or
datetime objects (as a float, to microsecond resolution).
"""
delta = after - before
try:
return delta.total_seconds()
except AttributeError:
return ((delta.days * 24 * 3600) + delta.seconds +
float(delta.microseconds) / (10 ** 6))
def is_soon(dt, window):
"""Determines if time is going to happen in the next window seconds.
:params dt: the time
:params window: minimum seconds to remain to consider the time not soon
:return: True if expiration is within the given duration
"""
soon = (utcnow() + datetime.timedelta(seconds=window))
return normalize_time(dt) <= soon

488
magnetodbclient/shell.py Normal file
View File

@ -0,0 +1,488 @@
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Command-line interface to the MagnetoDB APIs
"""
from __future__ import print_function
import argparse
import logging
import os
import sys
from cliff import app
from cliff import commandmanager
from magnetodbclient.common import clientmanager
from magnetodbclient.common import exceptions as exc
from magnetodbclient.common import utils
from magnetodbclient.openstack.common.gettextutils import _
from magnetodbclient.openstack.common import strutils
from magnetodbclient.version import __version__
VERSION = '1'
MAGNETODB_API_VERSION = '1'
def run_command(cmd, cmd_parser, sub_argv):
_argv = sub_argv
index = -1
values_specs = []
if '--' in sub_argv:
index = sub_argv.index('--')
_argv = sub_argv[:index]
values_specs = sub_argv[index:]
known_args, _values_specs = cmd_parser.parse_known_args(_argv)
cmd.values_specs = (index == -1 and _values_specs or values_specs)
return cmd.run(known_args)
def env(*_vars, **kwargs):
"""Search for the first defined of possibly many env vars.
Returns the first environment variable defined in vars, or
returns the default defined in kwargs.
"""
for v in _vars:
value = os.environ.get(v, None)
if value:
return value
return kwargs.get('default', '')
COMMAND_V1 = {}
COMMANDS = {'1': COMMAND_V1}
class HelpAction(argparse.Action):
"""Provide a custom action so the -h and --help options
to the main app will print a list of the commands.
The commands are determined by checking the CommandManager
instance, passed in as the "default" value for the action.
"""
def __call__(self, parser, namespace, values, option_string=None):
outputs = []
max_len = 0
app = self.default
parser.print_help(app.stdout)
app.stdout.write(_('\nCommands for API v%s:\n') % app.api_version)
command_manager = app.command_manager
for name, ep in sorted(command_manager):
factory = ep.load()
cmd = factory(self, None)
one_liner = cmd.get_description().split('\n')[0]
outputs.append((name, one_liner))
max_len = max(len(name), max_len)
for (name, one_liner) in outputs:
app.stdout.write(' %s %s\n' % (name.ljust(max_len), one_liner))
sys.exit(0)
class MagnetoDBShell(app.App):
# verbose logging levels
WARNING_LEVEL = 0
INFO_LEVEL = 1
DEBUG_LEVEL = 2
CONSOLE_MESSAGE_FORMAT = '%(message)s'
DEBUG_MESSAGE_FORMAT = '%(levelname)s: %(name)s %(message)s'
log = logging.getLogger(__name__)
def __init__(self, apiversion):
super(MagnetoDBShell, self).__init__(
description=__doc__.strip(),
version=VERSION,
command_manager=commandmanager.CommandManager('magnetodb.cli'), )
self.commands = COMMANDS
for k, v in self.commands[apiversion].items():
self.command_manager.add_command(k, v)
# This is instantiated in initialize_app() only when using
# password flow auth
self.auth_client = None
self.api_version = apiversion
def build_option_parser(self, description, version):
"""Return an argparse option parser for this application.
Subclasses may override this method to extend
the parser with more global options.
:param description: full description of the application
:paramtype description: str
:param version: version number for the application
:paramtype version: str
"""
parser = argparse.ArgumentParser(
description=description,
add_help=False, )
parser.add_argument(
'--version',
action='version',
version=__version__, )
parser.add_argument(
'-v', '--verbose', '--debug',
action='count',
dest='verbose_level',
default=self.DEFAULT_VERBOSE_LEVEL,
help=_('Increase verbosity of output and show tracebacks on'
' errors. Can be repeated.'))
parser.add_argument(
'-q', '--quiet',
action='store_const',
dest='verbose_level',
const=0,
help=_('Suppress output except warnings and errors'))
parser.add_argument(
'-h', '--help',
action=HelpAction,
nargs=0,
default=self, # tricky
help=_("Show this help message and exit"))
# Global arguments
parser.add_argument(
'--os-auth-strategy', metavar='<auth-strategy>',
default=env('OS_AUTH_STRATEGY', default='keystone'),
help=_('Authentication strategy (Env: OS_AUTH_STRATEGY'
', default keystone). For now, any other value will'
' disable the authentication'))
parser.add_argument(
'--os_auth_strategy',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-auth-url', metavar='<auth-url>',
default=env('OS_AUTH_URL'),
help=_('Authentication URL (Env: OS_AUTH_URL)'))
parser.add_argument(
'--os_auth_url',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-tenant-name', metavar='<auth-tenant-name>',
default=env('OS_TENANT_NAME'),
help=_('Authentication tenant name (Env: OS_TENANT_NAME)'))
parser.add_argument(
'--os_tenant_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-tenant-id', metavar='<auth-tenant-id>',
default=env('OS_TENANT_ID'),
help=_('Authentication tenant name (Env: OS_TENANT_ID)'))
parser.add_argument(
'--os-username', metavar='<auth-username>',
default=utils.env('OS_USERNAME'),
help=_('Authentication username (Env: OS_USERNAME)'))
parser.add_argument(
'--os_username',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-password', metavar='<auth-password>',
default=utils.env('OS_PASSWORD'),
help=_('Authentication password (Env: OS_PASSWORD)'))
parser.add_argument(
'--os_password',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-region-name', metavar='<auth-region-name>',
default=env('OS_REGION_NAME'),
help=_('Authentication region name (Env: OS_REGION_NAME)'))
parser.add_argument(
'--os_region_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-token', metavar='<token>',
default=env('OS_TOKEN'),
help=_('Defaults to env[OS_TOKEN]'))
parser.add_argument(
'--os_token',
help=argparse.SUPPRESS)
parser.add_argument(
'--service-type', metavar='<service-type>',
default=env('OS_KEYVALUE_SERVICE_TYPE', default='kv-storage'),
help=_('Defaults to env[OS_KEYVALUE_SERVICE_TYPE].'))
parser.add_argument(
'--endpoint-type', metavar='<endpoint-type>',
default=env('OS_ENDPOINT_TYPE', default='publicURL'),
help=_('Defaults to env[OS_ENDPOINT_TYPE] or publicURL.'))
parser.add_argument(
'--os-url', metavar='<url>',
default=env('OS_URL'),
help=_('Defaults to env[OS_URL]'))
parser.add_argument(
'--os_url',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-cacert',
metavar='<ca-certificate>',
default=env('OS_CACERT', default=None),
help=_("Specify a CA bundle file to use in "
"verifying a TLS (https) server certificate. "
"Defaults to env[OS_CACERT]"))
parser.add_argument(
'--insecure',
action='store_true',
default=env('MAGNETODBCLIENT_INSECURE', default=False),
help=_("Explicitly allow magnetodbclient to perform \"insecure\" "
"SSL (https) requests. The server's certificate will "
"not be verified against any certificate authorities. "
"This option should be used with caution."))
return parser
def _bash_completion(self):
"""Prints all of the commands and options for bash-completion."""
commands = set()
options = set()
for option, _action in self.parser._option_string_actions.items():
options.add(option)
for command_name, command in self.command_manager:
commands.add(command_name)
cmd_factory = command.load()
cmd = cmd_factory(self, None)
cmd_parser = cmd.get_parser('')
for option, _action in cmd_parser._option_string_actions.items():
options.add(option)
print(' '.join(commands | options))
def run(self, argv):
"""Equivalent to the main program for the application.
:param argv: input arguments and options
:paramtype argv: list of str
"""
try:
index = 0
command_pos = -1
help_pos = -1
help_command_pos = -1
for arg in argv:
if arg == 'bash-completion':
self._bash_completion()
return 0
if arg in self.commands[self.api_version]:
if command_pos == -1:
command_pos = index
elif arg in ('-h', '--help'):
if help_pos == -1:
help_pos = index
elif arg == 'help':
if help_command_pos == -1:
help_command_pos = index
index = index + 1
if command_pos > -1 and help_pos > command_pos:
argv = ['help', argv[command_pos]]
if help_command_pos > -1 and command_pos == -1:
argv[help_command_pos] = '--help'
self.options, remainder = self.parser.parse_known_args(argv)
self.configure_logging()
self.interactive_mode = not remainder
self.initialize_app(remainder)
except Exception as err:
if self.options.verbose_level == self.DEBUG_LEVEL:
self.log.exception(unicode(err))
raise
else:
self.log.error(unicode(err))
return 1
result = 1
if self.interactive_mode:
_argv = [sys.argv[0]]
sys.argv = _argv
result = self.interact()
else:
result = self.run_subcommand(remainder)
return result
def run_subcommand(self, argv):
subcommand = self.command_manager.find_command(argv)
cmd_factory, cmd_name, sub_argv = subcommand
cmd = cmd_factory(self, self.options)
err = None
result = 1
try:
self.prepare_to_run_command(cmd)
full_name = (cmd_name
if self.interactive_mode
else ' '.join([self.NAME, cmd_name])
)
cmd_parser = cmd.get_parser(full_name)
return run_command(cmd, cmd_parser, sub_argv)
except Exception as err:
if self.options.verbose_level == self.DEBUG_LEVEL:
self.log.exception(unicode(err))
else:
self.log.error(unicode(err))
try:
self.clean_up(cmd, result, err)
except Exception as err2:
if self.options.verbose_level == self.DEBUG_LEVEL:
self.log.exception(unicode(err2))
else:
self.log.error(_('Could not clean up: %s'), unicode(err2))
if self.options.verbose_level == self.DEBUG_LEVEL:
raise
else:
try:
self.clean_up(cmd, result, None)
except Exception as err3:
if self.options.verbose_level == self.DEBUG_LEVEL:
self.log.exception(unicode(err3))
else:
self.log.error(_('Could not clean up: %s'), unicode(err3))
return result
def authenticate_user(self):
"""Make sure the user has provided all of the authentication
info we need.
"""
if self.options.os_auth_strategy == 'keystone':
if self.options.os_token or self.options.os_url:
# Token flow auth takes priority
if not self.options.os_token:
raise exc.CommandError(
_("You must provide a token via"
" either --os-token or env[OS_TOKEN]"))
if not self.options.os_url:
raise exc.CommandError(
_("You must provide a service URL via"
" either --os-url or env[OS_URL]"))
else:
# Validate password flow auth
if not self.options.os_username:
raise exc.CommandError(
_("You must provide a username via"
" either --os-username or env[OS_USERNAME]"))
if not self.options.os_password:
raise exc.CommandError(
_("You must provide a password via"
" either --os-password or env[OS_PASSWORD]"))
if (not self.options.os_tenant_name
and not self.options.os_tenant_id):
raise exc.CommandError(
_("You must provide a tenant_name or tenant_id via"
" --os-tenant-name, env[OS_TENANT_NAME]"
" --os-tenant-id, or via env[OS_TENANT_ID]"))
if not self.options.os_auth_url:
raise exc.CommandError(
_("You must provide an auth url via"
" either --os-auth-url or via env[OS_AUTH_URL]"))
else: # not keystone
if not self.options.os_url:
raise exc.CommandError(
_("You must provide a service URL via"
" either --os-url or env[OS_URL]"))
self.client_manager = clientmanager.ClientManager(
token=self.options.os_token,
url=self.options.os_url,
auth_url=self.options.os_auth_url,
tenant_name=self.options.os_tenant_name,
tenant_id=self.options.os_tenant_id,
username=self.options.os_username,
password=self.options.os_password,
region_name=self.options.os_region_name,
api_version=self.api_version,
auth_strategy=self.options.os_auth_strategy,
service_type=self.options.service_type,
endpoint_type=self.options.endpoint_type,
insecure=self.options.insecure,
ca_cert=self.options.os_cacert,
log_credentials=True)
return
def initialize_app(self, argv):
"""Global app init bits:
* set up API versions
* validate authentication info
"""
super(MagnetoDBShell, self).initialize_app(argv)
self.api_version = {'keyvalue': self.api_version}
# If the user is not asking for help, make sure they
# have given us auth.
cmd_name = None
if argv:
cmd_info = self.command_manager.find_command(argv)
cmd_factory, cmd_name, sub_argv = cmd_info
if self.interactive_mode or cmd_name != 'help':
self.authenticate_user()
def clean_up(self, cmd, result, err):
self.log.debug('clean_up %s', cmd.__class__.__name__)
if err:
self.log.debug(_('Got an error: %s'), unicode(err))
def configure_logging(self):
"""Create logging handlers for any log output."""
root_logger = logging.getLogger('')
# Set up logging to a file
root_logger.setLevel(logging.DEBUG)
# Send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {self.WARNING_LEVEL: logging.WARNING,
self.INFO_LEVEL: logging.INFO,
self.DEBUG_LEVEL: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
console.setLevel(console_level)
if logging.DEBUG == console_level:
formatter = logging.Formatter(self.DEBUG_MESSAGE_FORMAT)
else:
formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def main(argv=sys.argv[1:]):
try:
return MagnetoDBShell(MAGNETODB_API_VERSION).run(
map(strutils.safe_decode, argv))
except exc.MagnetoDBClientException:
return 1
except Exception as e:
print(unicode(e))
return 1
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))

View File

View File

View File

@ -0,0 +1,363 @@
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import httplib
import logging
import time
import urllib
import six.moves.urllib.parse as urlparse
from magnetodbclient import client
from magnetodbclient.common import _
from magnetodbclient.common import exceptions
from magnetodbclient.common import serializer
from magnetodbclient.common import utils
_logger = logging.getLogger(__name__)
def exception_handler_v1(status_code, error_content):
"""Exception handler for API v1 client
This routine generates the appropriate
MagnetoDB exception according to the contents of the
response body
:param status_code: HTTP error status code
:param error_content: deserialized body of error response
"""
error_dict = None
if isinstance(error_content, dict):
error_dict = error_content.get('MagnetoDBError')
# Find real error type
bad_magnetodb_error_flag = False
if error_dict:
# If MagnetoDB key is found, it will definitely contain
# a 'message' and 'type' keys?
try:
error_type = error_dict['type']
error_message = error_dict['message']
if error_dict['detail']:
error_message += "\n" + error_dict['detail']
except Exception:
bad_magnetodb_error_flag = True
if not bad_magnetodb_error_flag:
# If corresponding exception is defined, use it.
client_exc = getattr(exceptions, '%sClient' % error_type, None)
# Otherwise look up per status-code client exception
if not client_exc:
client_exc = exceptions.HTTP_EXCEPTION_MAP.get(status_code)
if client_exc:
raise client_exc(message=error_message,
status_code=status_code)
else:
raise exceptions.MagnetoDBClientException(
status_code=status_code, message=error_message)
else:
raise exceptions.MagnetoDBClientException(status_code=status_code,
message=error_dict)
else:
message = None
if isinstance(error_content, dict):
message = error_content.get('message')
if message:
raise exceptions.MagnetoDBClientException(status_code=status_code,
message=message)
# If we end up here the exception was not a magnetodb error
msg = "%s-%s" % (status_code, error_content)
raise exceptions.MagnetoDBClientException(status_code=status_code,
message=msg)
class Client(object):
"""Client for the OpenStack MagnetoDB v1 API.
:param string username: Username for authentication. (optional)
:param string password: Password for authentication. (optional)
:param string token: Token for authentication. (optional)
:param string tenant_name: Tenant name. (optional)
:param string tenant_id: Tenant id. (optional)
:param string auth_url: Keystone service endpoint for authorization.
:param string service_type: Keyvalue service type to pull from the
keystone catalog (e.g. 'keyvalue') (optional)
:param string endpoint_type: Keyvalue service endpoint type to pull from
the keystone catalog (e.g. 'publicURL',
'internalURL', or 'adminURL') (optional)
:param string region_name: Name of a region to select when choosing an
endpoint from the service catalog.
:param string endpoint_url: A user-supplied endpoint URL for the magnetodb
service. Lazy-authentication is possible for API
service calls if endpoint is set at
instantiation.(optional)
:param integer timeout: Allows customization of the timeout for client
http requests. (optional)
:param bool insecure: SSL certificate validation. (optional)
:param string ca_cert: SSL CA bundle file to use. (optional)
Example::
from magnetodbclient.v1 import client
magnetodb = client.Client(username=USER,
password=PASS,
tenant_name=TENANT_NAME,
auth_url=KEYSTONE_URL)
tables = magnetodb.list_tables()
...
"""
base_path = "/%(tenant_id)s/data"
tables_path = "/tables"
table_path = "/tables/%s"
put_item_path = "/put_item"
delete_item_path = "/delete_item"
get_item_path = "/get_item"
query_path = "/query"
scan_path = "/scan"
batch_write_item_path = "/batch_write_item"
batch_read_item_path = "/batch_read_item"
# 8192 Is the default max URI len for eventlet.wsgi.server
MAX_URI_LEN = 8192
def get_base_path(self, tenant_id):
return self.base_path % {'tenant_id': tenant_id}
def create_table(self, tenant_id, request_body):
"""Create table."""
url = self.get_base_path(tenant_id) + self.tables_path
return self.post(url, request_body)
def delete_table(self, tenant_id, table_name):
"""Delete the specified table."""
url = self.get_base_path(tenant_id) + self.table_path % table_name
return self.delete(url)
def list_tables(self, tenant_id):
"""List tables."""
url = self.get_base_path(tenant_id) + self.tables_path
return self.get(url)
def describe_table(self, tenant_id, table_name):
"""Describe the specified table."""
url = self.get_base_path(tenant_id) + self.table_path % table_name
return self.get(url)
def put_item(self, tenant_id, table_name, request_body):
"""Put item to the specified table."""
url = self.get_base_path(tenant_id) + self.table_path % table_name
url += self.put_item_path
return self.post(url, request_body)
def update_item(self, tenant_id, table_name, request_body):
"""Update item."""
url = self.get_base_path(tenant_id) + self.table_path % table_name
url += self.put_item_path
return self.post(url, request_body)
def delete_item(self, tenant_id, table_name, request_body):
"""Delete item."""
url = self.get_base_path(tenant_id) + self.table_path % table_name
url += self.delete_item_path
return self.post(url, request_body)
def get_item(self, tenant_id, table_name, request_body):
"""Get item."""
url = self.get_base_path(tenant_id) + self.table_path % table_name
url += self.get_item_path
return self.post(url, request_body)
def query(self, tenant_id, table_name, request_body):
"""Query the specified table."""
url = self.get_base_path(tenant_id) + self.table_path % table_name
url += self.query_path
return self.post(url, request_body)
def scan(self, tenant_id, table_name, request_body):
"""Scan the specified table."""
url = self.get_base_path(tenant_id) + self.table_path % table_name
url += self.scan_path
return self.post(url, request_body)
def batch_write_item(self, tenant_id, request_items):
"""Batch write item."""
url = self.get_base_path(tenant_id)
url += self.batch_write_item_path
return self.post(url, request_items)
def batch_read_item(self, tenant_id, request_items):
"""Batch read item."""
url = self.get_base_path(tenant_id)
url += self.batch_read_item_path
return self.post(url, request_items)
def __init__(self, **kwargs):
"""Initialize a new client for the MagnetoDB v1 API."""
super(Client, self).__init__()
self.httpclient = client.HTTPClient(**kwargs)
self.version = '1'
self.content_type = 'application/json'
self.action_prefix = "/v%s" % (self.version)
self.retries = 0
self.retry_interval = 1
def _handle_fault_response(self, status_code, response_body):
# Create exception with HTTP status code and message
_logger.debug(_("Error message: %s"), response_body)
# Add deserialized error message to exception arguments
try:
des_error_body = self.deserialize(response_body, status_code)
except Exception:
# If unable to deserialized body it is probably not a
# MagnetoDB error
des_error_body = {'message': response_body}
# Raise the appropriate exception
exception_handler_v1(status_code, des_error_body)
def _check_uri_length(self, action):
uri_len = len(self.httpclient.endpoint_url) + len(action)
if uri_len > self.MAX_URI_LEN:
raise exceptions.RequestURITooLong(
excess=uri_len - self.MAX_URI_LEN)
def do_request(self, method, action, body=None, headers=None, params=None):
action = self.action_prefix + action
if type(params) is dict and params:
params = utils.safe_encode_dict(params)
action += '?' + urllib.urlencode(params, doseq=1)
# Ensure client always has correct uri - do not guesstimate anything
self.httpclient.authenticate_and_fetch_endpoint_url()
self._check_uri_length(action)
if body:
body = self.serialize(body)
self.httpclient.content_type = self.content_type
resp, replybody = self.httpclient.do_request(action, method, body=body)
status_code = self.get_status_code(resp)
if status_code in (httplib.OK,
httplib.CREATED,
httplib.ACCEPTED,
httplib.NO_CONTENT):
return self.deserialize(replybody, status_code)
else:
if not replybody:
replybody = resp.reason
self._handle_fault_response(status_code, replybody)
def get_auth_info(self):
return self.httpclient.get_auth_info()
def get_status_code(self, response):
"""Returns the integer status code from the response.
Either a Webob.Response (used in testing) or httplib.Response
is returned.
"""
if hasattr(response, 'status_int'):
return response.status_int
else:
return response.status
def serialize(self, data):
"""Serializes a dictionary into either xml or json.
A dictionary with a single key can be passed and
it can contain any structure.
"""
if data is None:
return None
elif type(data) is dict:
return serializer.Serializer().serialize(data, self.content_type)
else:
raise Exception(_("Unable to serialize object of type = '%s'") %
type(data))
def deserialize(self, data, status_code):
"""Deserializes an xml or json string into a dictionary."""
if status_code == 204:
return data
return serializer.Serializer().deserialize(
data, self.content_type)['body']
def retry_request(self, method, action, body=None,
headers=None, params=None):
"""Call do_request with the default retry configuration.
Only idempotent requests should retry failed connection attempts.
:raises: ConnectionFailed if the maximum # of retries is exceeded
"""
max_attempts = self.retries + 1
for i in range(max_attempts):
try:
return self.do_request(method, action, body=body,
headers=headers, params=params)
except exceptions.ConnectionFailed:
# Exception has already been logged by do_request()
if i < self.retries:
_logger.debug(_('Retrying connection '
'to MagnetoDB service'))
time.sleep(self.retry_interval)
raise exceptions.ConnectionFailed(reason=_("Maximum attempts reached"))
def delete(self, action, body=None, headers=None, params=None):
return self.retry_request("DELETE", action, body=body,
headers=headers, params=params)
def get(self, action, body=None, headers=None, params=None):
return self.retry_request("GET", action, body=body,
headers=headers, params=params)
def post(self, action, body=None, headers=None, params=None):
# Do not retry POST requests to avoid the orphan objects problem.
return self.do_request("POST", action, body=body,
headers=headers, params=params)
def put(self, action, body=None, headers=None, params=None):
return self.retry_request("PUT", action, body=body,
headers=headers, params=params)
def list(self, collection, path, retrieve_all=True, **params):
if retrieve_all:
res = []
for r in self._pagination(collection, path, **params):
res.extend(r[collection])
return {collection: res}
else:
return self._pagination(collection, path, **params)
def _pagination(self, collection, path, **params):
if params.get('page_reverse', False):
linkrel = 'previous'
else:
linkrel = 'next'
next = True
while next:
res = self.get(path, params=params)
yield res
next = False
try:
for link in res['%s_links' % collection]:
if link['rel'] == linkrel:
query_str = urlparse.urlparse(link['href']).query
params = urlparse.parse_qs(query_str)
next = True
break
except KeyError:
break

View File

@ -0,0 +1,22 @@
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# @author: Carl Baldwin, Hewlett-Packard
import pbr.version
__version__ = pbr.version.VersionInfo('python-neutronclient').version_string()

7
openstack-common.conf Normal file
View File

@ -0,0 +1,7 @@
[DEFAULT]
# The list of modules to copy from openstack-common
modules=gettextutils,jsonutils,strutils,timeutils
# The base module to hold the copy of openstack.common
base=magnetodbclient

9
requirements.txt Normal file
View File

@ -0,0 +1,9 @@
pbr>=0.6,!=0.7,<1.0
argparse
cliff>=1.4.3
httplib2>=0.7.5
iso8601>=0.1.9
netaddr>=0.7.6
simplejson>=2.0.9
six>=1.5.2
Babel>=1.3

39
setup.cfg Normal file
View File

@ -0,0 +1,39 @@
[metadata]
name = python-magnetodbclient
summary = CLI and Client Library for OpenStack KeyValue Storage
description-file =
README.rst
author = OpenStack
author-email = openstack-dev@lists.openstack.org
home-page = http://www.openstack.org/
classifier =
Environment :: OpenStack
Intended Audience :: Developers
Intended Audience :: Information Technology
Intended Audience :: System Administrators
License :: OSI Approved :: Apache Software License
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 2.6
[files]
packages =
magnetodbclient
[global]
setup-hooks =
pbr.hooks.setup_hook
[entry_points]
console_scripts =
magnetodb = magnetodbclient.shell:main
[build_sphinx]
all_files = 1
build-dir = doc/build
source-dir = doc/source
[wheel]
universal = 1

22
setup.py Normal file
View File

@ -0,0 +1,22 @@
#!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
setuptools.setup(
setup_requires=['pbr'],
pbr=True)

11
test-requirements.txt Normal file
View File

@ -0,0 +1,11 @@
hacking>=0.8.0,<0.9
cliff-tablib>=1.0
coverage>=3.6
discover
fixtures>=0.3.14
mox>=0.5.3
python-subunit>=0.0.18
sphinx>=1.1.2,<1.2
testrepository>=0.0.18
testtools>=0.9.34

View File

@ -0,0 +1,27 @@
_magnetodb_opts="" # lazy init
_magnetodb_flags="" # lazy init
_magnetodb_opts_exp="" # lazy init
_magnetodb()
{
local cur prev nbc cflags
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
if [ "x$_magnetodb_opts" == "x" ] ; then
nbc="`magnetodb bash-completion`"
_magnetodb_opts="`echo "$nbc" | sed -e "s/--[a-z0-9_-]*//g" -e "s/\s\s*/ /g"`"
_magnetodb_flags="`echo " $nbc" | sed -e "s/ [^-][^-][a-z0-9_-]*//g" -e "s/\s\s*/ /g"`"
_magnetodb_opts_exp="`echo "$_magnetodb_opts" | sed -e "s/\s/|/g"`"
fi
if [[ " ${COMP_WORDS[@]} " =~ " "($_magnetodb_opts_exp)" " && "$prev" != "help" ]] ; then
COMPLETION_CACHE=~/.magnetodbclient/*/*-cache
cflags="$_magnetodb_flags "$(cat $COMPLETION_CACHE 2> /dev/null | tr '\n' ' ')
COMPREPLY=($(compgen -W "${cflags}" -- ${cur}))
else
COMPREPLY=($(compgen -W "${_magnetodb_opts}" -- ${cur}))
fi
return 0
}
complete -F _magnetodb magnetodb

40
tox.ini Normal file
View File

@ -0,0 +1,40 @@
[tox]
envlist = py26,py27,py33,pypy,pep8
minversion = 1.6
skipsdist = True
[testenv]
setenv = VIRTUAL_ENV={envdir}
LANG=en_US.UTF-8
LANGUAGE=en_US:en
LC_ALL=C
usedevelop = True
install_command = pip install -U {opts} {packages}
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands = python setup.py testr --testr-args='{posargs}'
[testenv:pypy]
deps = setuptools<3.2
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
[testenv:pep8]
commands = flake8
distribute = false
[testenv:venv]
commands = {posargs}
[testenv:cover]
commands = python setup.py testr --coverage --testr-args='{posargs}'
[tox:jenkins]
downloadcache = ~/cache/pip
[flake8]
# E125 continuation line does not distinguish itself from next logical line
# H302 import only modules
ignore = E125,H302
show-source = true
exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,tools