freezer api support for client registration

Adds an endpoint to the api for the registration of freezer clients

Change-Id: I1ca2a5e0021d383df70dfd001ab12967714c35bc
Implements: blueprint freezerclient-registration
This commit is contained in:
Fabrizio Vanni 2015-04-27 17:23:13 +01:00
parent 94d558c25a
commit c86f04978e
17 changed files with 785 additions and 428 deletions

View File

@ -94,6 +94,14 @@ GET /v1/backups/{backup_id} Get backup details
UPDATE /v1/backups/{backup_id} Updates the specified backup
DELETE /v1/backups/{backup_id} Deletes the specified backup
Freezer clients management
--------------------------
GET /v1/clients(?limit,offset) Lists registered clients
POST /v1/clients Creates client entry
GET /v1/clients/{freezerc_id} Get client details
UPDATE /v1/clients/{freezerc_id} Updates the specified client information
DELETE /v1/clients/{freezerc_id} Deletes the specified client information
Data Structures
===============
@ -146,3 +154,24 @@ It stores and returns the information provided in this form:
...
}
}
Freezer Client document structure
---------------------------------
Identifies a freezer client for the purpose of sending action
# client_info document contains information relevant for client identification
client_info:=
{
"client_id": string actually a concatenation "tenant-id_hostname"
"description": string
"config_id": string # configuration in use by the client
}
# client_type document embeds the client_info and adds user_id
client_type :=
{
"client" : client_info document,
"user_id": string, # owner of the information (OS X-User-Id, keystone provided, added by api)
}

View File

@ -20,6 +20,7 @@ Hudson (tjh@cryptsoft.com).
"""
from freezer_api.api.v1 import backups
from freezer_api.api.v1 import clients
from freezer_api.api.v1 import homedoc
VERSION = {
@ -44,5 +45,12 @@ def public_endpoints(storage_driver):
backups.BackupsCollectionResource(storage_driver)),
('/backups/{backup_id}',
backups.BackupsResource(storage_driver))
backups.BackupsResource(storage_driver)),
('/clients',
clients.ClientsCollectionResource(storage_driver)),
('/clients/{client_id}',
clients.ClientsResource(storage_driver)),
]

View File

@ -21,6 +21,7 @@ Hudson (tjh@cryptsoft.com).
import falcon
from freezer_api.common import exceptions
import logging
class BackupsCollectionResource(object):
@ -31,9 +32,13 @@ class BackupsCollectionResource(object):
self.db = storage_driver
def on_get(self, req, resp):
# GET /v1/backups(?limit,marker) Lists backups
# GET /v1/backups(?limit,offset) Lists backups
user_id = req.get_header('X-User-ID')
obj_list = self.db.get_backup_list(user_id=user_id)
offset = req.get_param_as_int('offset') or 0
limit = req.get_param_as_int('limit') or 10
search = req.context.get('doc', {})
obj_list = self.db.get_backup(user_id=user_id, offset=offset,
limit=limit, search=search)
req.context['result'] = {'backups': obj_list}
def on_post(self, req, resp):
@ -47,7 +52,7 @@ class BackupsCollectionResource(object):
user_name = req.get_header('X-User-Name')
user_id = req.get_header('X-User-ID')
backup_id = self.db.add_backup(
user_id=user_id, user_name=user_name, data=doc)
user_id=user_id, user_name=user_name, doc=doc)
resp.status = falcon.HTTP_201
req.context['result'] = {'backup_id': backup_id}
@ -63,7 +68,10 @@ class BackupsResource(object):
# GET /v1/backups/{backup_id} Get backup details
user_id = req.get_header('X-User-ID')
obj = self.db.get_backup(user_id=user_id, backup_id=backup_id)
req.context['result'] = obj
if obj:
req.context['result'] = obj
else:
resp.status = falcon.HTTP_404
def on_delete(self, req, resp, backup_id):
# DELETE /v1/backups/{backup_id} Deletes the specified backup

View File

@ -0,0 +1,80 @@
"""
Copyright 2014 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import falcon
from freezer_api.common import exceptions
class ClientsCollectionResource(object):
"""
Handler for endpoint: /v1/clients
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_get(self, req, resp):
# GET /v1/clients(?limit,offset) Lists backups
user_id = req.get_header('X-User-ID')
offset = req.get_param_as_int('offset') or 0
limit = req.get_param_as_int('limit') or 10
search = req.context.get('doc', {})
obj_list = self.db.get_client(user_id=user_id, offset=offset,
limit=limit, search=search)
req.context['result'] = {'clients': obj_list}
def on_post(self, req, resp):
# POST /v1/clients Creates client entry
try:
doc = req.context['doc']
except KeyError:
raise exceptions.BadDataFormat(
message='Missing request body',
resp_body={'error': 'missing request body'})
user_id = req.get_header('X-User-ID')
client_id = self.db.add_client(
user_id=user_id, doc=doc)
resp.status = falcon.HTTP_201
req.context['result'] = {'client_id': client_id}
class ClientsResource(object):
"""
Handler for endpoint: /v1/clients/{client_id}
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_get(self, req, resp, client_id):
# GET /v1/clients(?limit,offset)
# search in body
user_id = req.get_header('X-User-ID') or ''
obj = self.db.get_client(user_id=user_id, client_id=client_id)
if obj:
req.context['result'] = obj[0]
else:
resp.status = falcon.HTTP_404
def on_delete(self, req, resp, client_id):
# DELETE /v1/clients/{client_id} Deletes the specified backup
user_id = req.get_header('X-User-ID')
self.db.delete_client(
user_id=user_id, client_id=client_id)
req.context['result'] = {'client_id': client_id}
resp.status = falcon.HTTP_204

View File

@ -43,14 +43,6 @@ class FreezerAPIException(Exception):
req.context['result'] = {'error': 'internal server error'}
class ObjectNotFound(FreezerAPIException):
@staticmethod
def handle(ex, req, resp, params):
resp.status = falcon.HTTP_404
ex.resp_body.update({'found': False})
req.context['result'] = ex.resp_body
class BadDataFormat(FreezerAPIException):
@staticmethod
def handle(ex, req, resp, params):
@ -76,7 +68,6 @@ class StorageEngineError(FreezerAPIException):
exception_handlers_catalog = [
ObjectNotFound,
BadDataFormat,
DocumentExists,
StorageEngineError

View File

@ -30,9 +30,8 @@ opt_group = cfg.OptGroup(name='storage',
storage_opts = [
cfg.StrOpt('db',
default='simpledict',
help='specify the storage db to use: simpledoct (default),'
' elasticsearch'),
default='elasticsearch',
help='specify the storage db to use: elasticsearch (default)'),
cfg.StrOpt('endpoint',
default='http://localhost:9200',
help='specify the storage endpoint')
@ -45,10 +44,7 @@ CONF.register_opts(storage_opts, opt_group)
def get_db():
db_engine = CONF.storage.db
if db_engine == 'simpledict':
logging.info('Storage backend: simple dictionary')
db = simpledict.SimpleDictStorageEngine()
elif db_engine == 'elasticsearch':
if db_engine == 'elasticsearch':
endpoint = CONF.storage.endpoint
logging.info('Storage backend: Elasticsearch at {0}'.format(endpoint))
db = elastic.ElasticSearchEngine(endpoint)

View File

@ -25,23 +25,36 @@ from freezer_api.common.utils import BackupMetadataDoc
from freezer_api.common import exceptions
class ElasticSearchEngine(object):
class TypeManager:
def __init__(self, es, doc_type, index):
self.es = es
self.index = index
self.doc_type = doc_type
def __init__(self, hosts):
# logging.getLogger('elasticsearch').addHandler(logging.NullHandler())
self.es = elasticsearch.Elasticsearch(hosts)
logging.info('Using Elasticsearch host {0}'.format(hosts))
self.index = "freezer"
@staticmethod
def get_base_search_filter(user_id, search={}):
user_id_filter = {"term": {"user_id": user_id}}
base_filter = [user_id_filter]
match_list = [{"match": m} for m in search.get('match', [])]
if match_list:
base_filter.append({"query": {"bool": {"must": match_list}}})
return base_filter
def _get_backup(self, user_id, backup_id=None):
# raises only on severe engine errors
if backup_id:
query = '+user_id:{0} +backup_id:{1}'.format(user_id, backup_id)
else:
query = '+user_id:{0}'.format(user_id)
@staticmethod
def get_search_query(user_id, doc_id, search={}):
base_filter = TypeManager.get_base_search_filter(user_id, search)
return {"filter": {"bool": {"must": base_filter}}}
def search(self, user_id, doc_id, search={}, offset=0, limit=10):
try:
res = self.es.search(index=self.index, doc_type='backups',
q=query)
query_dsl = self.get_search_query(user_id, doc_id, search)
except:
raise exceptions.StorageEngineError(
message='search operation failed: query not valid',
resp_body={'engine exception': 'invalid query'})
try:
res = self.es.search(index=self.index, doc_type=self.doc_type,
size=limit, from_=offset, body=query_dsl)
except Exception as e:
raise exceptions.StorageEngineError(
message='search operation failed',
@ -49,10 +62,9 @@ class ElasticSearchEngine(object):
hit_list = res['hits']['hits']
return [x['_source'] for x in hit_list]
def _index(self, doc):
# raises only on severe engine errors
def insert(self, doc):
try:
res = self.es.index(index=self.index, doc_type='backups',
res = self.es.index(index=self.index, doc_type=self.doc_type,
body=doc)
except Exception as e:
raise exceptions.StorageEngineError(
@ -60,44 +72,86 @@ class ElasticSearchEngine(object):
resp_body={'engine exception': '{0}'.format(e)})
return res['created']
def _delete_backup(self, user_id, backup_id):
query = '+user_id:{0} +backup_id:{1}'.format(user_id, backup_id)
def delete(self, user_id, doc_id):
try:
query_dsl = self.get_search_query(user_id, doc_id)
except:
raise exceptions.StorageEngineError(
message='delete operation failed: query not valid',
resp_body={'engine exception': 'invalid query'})
try:
self.es.delete_by_query(index=self.index,
doc_type='backups',
q=query)
doc_type=self.doc_type,
body=query_dsl)
except Exception as e:
raise exceptions.StorageEngineError(
message='search operation failed',
message='delete operation failed',
resp_body={'engine exception': '{0}'.format(e)})
return doc_id
def get_backup(self, user_id, backup_id):
# raises is data not found, so reply will be HTTP_404
backup_metadata = self._get_backup(user_id, backup_id)
if not backup_metadata:
raise exceptions.ObjectNotFound(
message='Requested backup data not found: {0}'.
format(backup_id),
resp_body={'backup_id': backup_id})
return backup_metadata
def get_backup_list(self, user_id):
# TODO: elasticsearch reindex for paging
return self._get_backup(user_id)
class BackupTypeManager(TypeManager):
def __init__(self, es, doc_type, index='freezer'):
TypeManager.__init__(self, es, doc_type, index=index)
def add_backup(self, user_id, user_name, data):
@staticmethod
def get_search_query(user_id, doc_id, search={}):
base_filter = TypeManager.get_base_search_filter(user_id, search)
if doc_id is not None:
base_filter.append({"term": {"backup_id": doc_id}})
if 'time_after' in search:
base_filter.append(
{"range": {"timestamp": {"gte": int(search['time_after'])}}}
)
if 'time_before' in search:
base_filter.append(
{"range": {"timestamp": {"lte": int(search['time_before'])}}}
)
return {"filter": {"bool": {"must": base_filter}}}
class ClientTypeManager(TypeManager):
def __init__(self, es, doc_type, index='freezer'):
TypeManager.__init__(self, es, doc_type, index=index)
@staticmethod
def get_search_query(user_id, doc_id, search={}):
base_filter = TypeManager.get_base_search_filter(user_id, search)
if doc_id is not None:
base_filter.append({"term": {"client_id": doc_id}})
return {"filter": {"bool": {"must": base_filter}}}
class ElasticSearchEngine(object):
def __init__(self, hosts, index='freezer'):
self.index = index
self.es = elasticsearch.Elasticsearch(hosts)
logging.info('Using Elasticsearch host {0}'.format(hosts))
self.backup_manager = BackupTypeManager(self.es, 'backups')
self.client_manager = ClientTypeManager(self.es, 'clients')
def get_backup(self, user_id, backup_id=None, offset=0, limit=10, search={}):
return self.backup_manager.search(user_id,
backup_id,
search=search,
offset=offset,
limit=limit)
def add_backup(self, user_id, user_name, doc):
# raises if data is malformed (HTTP_400) or already present (HTTP_409)
backup_metadata_doc = BackupMetadataDoc(user_id, user_name, data)
backup_metadata_doc = BackupMetadataDoc(user_id, user_name, doc)
if not backup_metadata_doc.is_valid():
raise exceptions.BadDataFormat(message='Bad Data Format')
backup_id = backup_metadata_doc.backup_id
existing_data = self._get_backup(user_id, backup_id)
if existing_data:
existing = self.backup_manager.search(user_id, backup_id)
if existing: # len(existing) > 0
raise exceptions.DocumentExists(
message='Backup data already existing ({0})'.format(backup_id),
resp_body={'backup_id': backup_id})
if not self._index(backup_metadata_doc.serialize()):
# should never happen
if not self.backup_manager.insert(backup_metadata_doc.serialize()):
raise exceptions.StorageEngineError(
message='index operation failed',
resp_body={'backup_id': backup_id})
@ -106,5 +160,36 @@ class ElasticSearchEngine(object):
return backup_id
def delete_backup(self, user_id, backup_id):
self._delete_backup(user_id, backup_id)
return backup_id
return self.backup_manager.delete(user_id, backup_id)
def get_client(self, user_id, client_id=None, offset=0, limit=10, search={}):
return self.client_manager.search(user_id,
client_id,
search=search,
offset=offset,
limit=limit)
def add_client(self, user_id, doc):
client_id = doc.get('client_id', None)
if client_id is None:
raise exceptions.BadDataFormat(message='Bad Data Format')
existing = self.client_manager.search(user_id, client_id)
if existing: # len(existing) > 0
raise exceptions.DocumentExists(
message='Client already registered ({0})'.format(client_id),
resp_body={'client_id': client_id})
client_doc = {'client': doc,
'user_id': user_id}
if not self.client_manager.insert(client_doc):
raise exceptions.StorageEngineError(
message='index operation failed',
resp_body={'client_id': client_id})
logging.info('Client registered, client_id: {0}'.
format(client_id))
return client_id
def delete_client(self, user_id, client_id):
return self.client_manager.delete(user_id, client_id)

View File

@ -1,69 +0,0 @@
"""
Copyright 2014 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import logging
from freezer_api.common.utils import BackupMetadataDoc
from freezer_api.common import exceptions
class SimpleDictStorageEngine(object):
def __init__(self):
self._map = {}
def get_backup(self, user_id, backup_id):
try:
backup_data = self._map[(user_id, backup_id)]
except:
raise exceptions.ObjectNotFound(
message='Requested backup data not found: {0}'.
format(backup_id),
resp_body={'backup_id': backup_id})
return backup_data
def get_backup_list(self, user_id):
backup_list = []
for (key, backup_data) in self._map.iteritems():
if key[0] == user_id:
backup_list.append(backup_data)
return backup_list
def add_backup(self, user_id, user_name, data):
backup_metadata_doc = BackupMetadataDoc(user_id, user_name, data)
if not backup_metadata_doc.is_valid():
raise exceptions.BadDataFormat(message='Bad Data Format')
backup_id = backup_metadata_doc.backup_id
if (user_id, backup_id) in self._map:
raise exceptions.DocumentExists(
message='Backup data already existing ({0})'.format(backup_id),
resp_body={'backup_id': backup_id})
self._map[(user_id, backup_id)] = backup_metadata_doc.serialize()
logging.info('Adding backup data with backup_id {0}'.format(backup_id))
return backup_id
def delete_backup(self, user_id, backup_id):
try:
self._map.pop((user_id, backup_id))
except KeyError:
raise exceptions.ObjectNotFound(
message='Object to remove not found: {0}'.format(backup_id),
resp_body={'backup_id': backup_id})
return backup_id

View File

@ -170,6 +170,58 @@ fake_data_0_elasticsearch_miss = {
"took": 1
}
fake_data_1_wrapped_backup_metadata = {
'backup_id': 'freezer_container_alpha_important_data_backup_125235431_1',
'user_id': 'qwerty1234',
'user_name': 'asdffdsa',
'backup_metadata': {
"container": "freezer_container",
"host_name": "alpha",
"backup_name": "important_data_backup",
"timestamp": 125235431,
"level": 1,
"backup_session": 8475903425,
"max_level": 5,
"mode" : "fs",
"fs_real_path": "/blabla",
"vol_snap_path": "/blablasnap",
"total_broken_links" : 0,
"total_fs_files" : 11,
"total_directories" : 2,
"backup_size_uncompressed" : 4567,
"backup_size_compressed" : 1212,
"total_backup_session_size" : 6789,
"compression_alg": "None",
"encrypted": "false",
"client_os": "linux",
"broken_links": ["link_01", "link_02"],
"excluded_files": ["excluded_file_01", "excluded_file_02"],
"cli": ""
}
}
fake_client_info_0 = {
"client_id": "test-tenant_5253_test-hostname_09544",
"description": "some usefule text here",
"config_id": "config_id_contains_uuid_of_config"
}
fake_client_info_1 = {
"client_id": "test-tenant_5253_test-hostname_6543",
"description": "also some useful text here",
"config_id": "config_id_blablawhatever"
}
fake_client_entry_0 = {
"client" : fake_client_info_0,
"user_id": "user_id-is-provided-keystone"
}
fake_client_entry_1 = {
"client" : fake_client_info_0,
"user_id": "user_id-is-provided-keystone"
}
class FakeReqResp:
@ -183,119 +235,3 @@ class FakeReqResp:
def get_header(self, key):
return self.header.get(key, None)
class FakeElasticsearch_hit:
def __init__(self, host=None):
pass
def search(self, index, doc_type, q):
return fake_data_0_elasticsearch_hit
def index(self, index, doc_type, body):
return {'created': True}
def delete_by_query(self, index, doc_type, q):
pass
class FakeElasticsearch_insert_ok:
def __init__(self, host=None):
pass
def search(self, index, doc_type, q):
return fake_data_0_elasticsearch_miss
def index(self, index, doc_type, body):
return {'created': True}
def delete_by_query(self, index, doc_type, q):
pass
class FakeElasticsearch_miss:
def __init__(self, host=None):
pass
def search(self, index, doc_type, q):
return fake_data_0_elasticsearch_miss
def index(self, index, doc_type, body):
return {'created': False}
def delete_by_query(self, index, doc_type, q):
pass
class FakeElasticsearch_index_raise:
def __init__(self, host=None):
pass
def search(self, index, doc_type, q):
return fake_data_0_elasticsearch_miss
def index(self, index, doc_type, body):
raise Exception
def delete_by_query(self, index, doc_type, q):
pass
class FakeElasticsearch_search_raise:
def __init__(self, host=None):
pass
def search(self, index, doc_type, q):
raise Exception
def index(self, index, doc_type, body):
return {'created': True}
def delete_by_query(self, index, doc_type, q):
pass
class FakeElasticsearch_delete_raise:
def __init__(self, host=None):
pass
def search(self, index, doc_type, q):
return fake_data_0_elasticsearch_miss
def index(self, index, doc_type, body):
return {'created': True}
def delete_by_query(self, index, doc_type, q):
raise Exception
class FakeLogging:
def __init__(self):
return None
def __call__(self, *args, **kwargs):
return True
@classmethod
def logging(cls, opt1=True):
return True
@classmethod
def info(cls, opt1=True):
return True
@classmethod
def warning(cls, opt1=True):
return True
@classmethod
def critical(cls, opt1=True):
return True
@classmethod
def exception(cls, opt1=True):
return True
@classmethod
def error(cls, opt1=True):
return True

View File

@ -20,27 +20,19 @@ Hudson (tjh@cryptsoft.com).
========================================================================
"""
import pytest
import unittest
from mock import Mock, patch
from common import *
from freezer_api.common.exceptions import *
from keystonemiddleware import auth_token
from freezer_api.cmd import api
class TestAPI:
class TestAPI(unittest.TestCase):
def patch_logging(self, monkeypatch):
fakelogging = FakeLogging()
monkeypatch.setattr(logging, 'critical', fakelogging.critical)
monkeypatch.setattr(logging, 'warning', fakelogging.warning)
monkeypatch.setattr(logging, 'exception', fakelogging.exception)
monkeypatch.setattr(logging, 'error', fakelogging.error)
def test_auth_install(self, monkeypatch):
self.patch_logging(monkeypatch)
@patch('freezer_api.storage.elastic.logging')
def test_auth_install(self, mock_logging):
app = api.get_application(None)
assert isinstance(app, auth_token.AuthProtocol)

View File

@ -20,99 +20,85 @@ Hudson (tjh@cryptsoft.com).
========================================================================
"""
import unittest
from mock import Mock, patch
import falcon
from freezer_api.api.v1 import backups
from freezer_api.storage import simpledict
from freezer_api.common.exceptions import *
from common import *
from freezer_api.common.exceptions import *
class TestBackupsCollectionResource(unittest.TestCase):
def setUp(self):
self.db = simpledict.SimpleDictStorageEngine()
self.resource = backups.BackupsCollectionResource(self.db)
self.req = FakeReqResp()
self.req.header['X-User-ID'] = fake_data_0_user_id
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = {'X-User-ID': fake_data_0_user_id}
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = backups.BackupsCollectionResource(self.mock_db)
def test_on_get_return_empty_list(self):
self.mock_db.get_backup.return_value = []
expected_result = {'backups': []}
self.resource.on_get(self.req, self.req)
result = self.req.context['result']
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_get_return_correct_list(self):
self.db.add_backup(user_id=fake_data_0_user_id,
user_name=fake_data_0_user_name,
data=fake_data_0_backup_metadata)
self.resource.on_get(self.req, self.req)
result = self.req.context['result']
expected_result = {'backups': [fake_data_0_wrapped_backup_metadata]}
self.assertEqual(result, expected_result)
def test_on_get_return_empty_list_without_user_id(self):
self.req.header.pop('X-User-ID')
self.db.add_backup(user_id=fake_data_0_user_id,
user_name=fake_data_0_user_name,
data=fake_data_0_backup_metadata)
self.resource.on_get(self.req, self.req)
result = self.req.context['result']
expected_result = {'backups': []}
self.assertEqual(result, expected_result)
def test_on_get_return_empty_list_with_different_user_id(self):
self.req.header['X-User-ID'] = 'LupinIII'
self.db.add_backup(user_id=fake_data_0_user_id,
user_name=fake_data_0_user_name,
data=fake_data_0_backup_metadata)
self.resource.on_get(self.req, self.req)
result = self.req.context['result']
expected_result = {'backups': []}
self.mock_db.get_backup.return_value = [fake_data_0_backup_metadata]
expected_result = {'backups': [fake_data_0_backup_metadata]}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_post_raises_when_missing_body(self):
self.assertRaises(BadDataFormat, self.resource.on_post, self.req, self.req)
self.mock_db.add_backup.return_value = [fake_data_0_wrapped_backup_metadata['backup_id']]
expected_result = {'backup_id': fake_data_0_wrapped_backup_metadata['backup_id']}
self.assertRaises(BadDataFormat, self.resource.on_post, self.mock_req, self.mock_req)
def test_on_post_inserts_correct_data(self):
self.req.context['doc'] = fake_data_0_backup_metadata
self.resource.on_post(self.req, self.req)
self.assertEquals(self.req.status, falcon.HTTP_201)
expected_result = {'backup_id': fake_data_0_backup_id}
self.assertEquals(self.req.context['result'], expected_result)
self.mock_req.context['doc'] = fake_data_0_backup_metadata
self.mock_db.add_backup.return_value = fake_data_0_wrapped_backup_metadata['backup_id']
self.resource.on_post(self.mock_req, self.mock_req)
expected_result = {'backup_id': fake_data_0_wrapped_backup_metadata['backup_id']}
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
self.assertEqual(self.mock_req.context['result'], expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
class TestBackupsResource(unittest.TestCase):
def setUp(self):
self.db = simpledict.SimpleDictStorageEngine()
self.resource = backups.BackupsResource(self.db)
self.req = FakeReqResp()
self.req.header['X-User-ID'] = fake_data_0_user_id
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = {'X-User-ID': fake_data_0_user_id}
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = backups.BackupsResource(self.mock_db)
def test_on_get_raises_when_not_found(self):
self.assertRaises(ObjectNotFound, self.resource.on_get, self.req, self.req, fake_data_0_backup_id)
def test_on_get_return_no_result_and_404_when_not_found(self):
self.mock_db.get_backup.return_value = []
self.resource.on_get(self.mock_req, self.mock_req, fake_data_0_wrapped_backup_metadata['backup_id'])
self.assertNotIn('result', self.mock_req.context)
self.assertEqual(self.mock_req.status, falcon.HTTP_404)
def test_on_get_return_correct_data(self):
self.db.add_backup(user_id=fake_data_0_user_id,
user_name=fake_data_0_user_name,
data=fake_data_0_backup_metadata)
self.resource.on_get(self.req, self.req, fake_data_0_backup_id)
result = self.req.context['result']
self.assertEqual(result, fake_data_0_wrapped_backup_metadata)
def test_on_delete_raises_when_not_found(self):
self.assertRaises(ObjectNotFound, self.resource.on_delete, self.req, self.req, fake_data_0_backup_id)
self.mock_db.get_backup.return_value = [fake_data_0_wrapped_backup_metadata]
expected_result = [fake_data_0_wrapped_backup_metadata]
self.resource.on_get(self.mock_req, self.mock_req, fake_data_0_wrapped_backup_metadata['backup_id'])
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_delete_removes_proper_data(self):
self.db.add_backup(user_id=fake_data_0_user_id,
user_name=fake_data_0_user_name,
data=fake_data_0_backup_metadata)
self.resource.on_delete(self.req, self.req, fake_data_0_backup_id)
result = self.req.context['result']
#self.mock_db.delete_backup.return_value = True
self.resource.on_delete(self.mock_req, self.mock_req, fake_data_0_backup_id)
result = self.mock_req.context['result']
expected_result = {'backup_id': fake_data_0_backup_id}
self.assertEquals(self.req.status, falcon.HTTP_204)
self.assertEquals(self.mock_req.status, falcon.HTTP_204)
self.assertEqual(result, expected_result)

85
tests/test_clients.py Normal file
View File

@ -0,0 +1,85 @@
import unittest
from mock import Mock, patch
import falcon
from common import *
from freezer_api.common.exceptions import *
from freezer_api.api.v1 import clients as v1_clients
class TestClientsCollectionResource(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = {'X-User-ID': fake_data_0_user_id}
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = v1_clients.ClientsCollectionResource(self.mock_db)
def test_on_get_return_empty_list(self):
self.mock_db.get_client.return_value = []
expected_result = {'clients': []}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_get_return_correct_list(self):
self.mock_db.get_client.return_value = [fake_client_entry_0, fake_client_entry_1]
expected_result = {'clients': [fake_client_entry_0, fake_client_entry_1]}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_post_raises_when_missing_body(self):
self.mock_db.add_client.return_value = [fake_client_info_0['client_id']]
self.assertRaises(BadDataFormat, self.resource.on_post, self.mock_req, self.mock_req)
def test_on_post_inserts_correct_data(self):
self.mock_req.context['doc'] = fake_client_info_0
self.mock_db.add_client.return_value = fake_client_info_0['client_id']
expected_result = {'client_id': fake_client_info_0['client_id']}
self.resource.on_post(self.mock_req, self.mock_req)
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
self.assertEqual(self.mock_req.context['result'], expected_result)
class TestClientsResource(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = {'X-User-ID': fake_data_0_user_id}
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = v1_clients.ClientsResource(self.mock_db)
def test_create_resource(self):
self.assertIsInstance(self.resource, v1_clients.ClientsResource)
def test_on_get_return_no_result_and_404_when_not_found(self):
self.mock_db.get_client.return_value = []
expected_result = []
self.resource.on_get(self.mock_req, self.mock_req, fake_client_info_0['client_id'])
self.assertNotIn('result', self.mock_req.context)
self.assertEqual(self.mock_req.status, falcon.HTTP_404)
def test_on_get_return_correct_data(self):
self.mock_db.get_client.return_value = [fake_client_entry_0]
expected_result = fake_client_entry_0
self.resource.on_get(self.mock_req, self.mock_req, fake_client_info_0['client_id'])
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_delete_removes_proper_data(self):
self.resource.on_delete(self.mock_req, self.mock_req, fake_client_info_0['client_id'])
result = self.mock_req.context['result']
expected_result = {'client_id': fake_client_info_0['client_id']}
self.assertEquals(self.mock_req.status, falcon.HTTP_204)
self.assertEqual(result, expected_result)

View File

@ -22,41 +22,27 @@ Hudson (tjh@cryptsoft.com).
import unittest
from mock import patch
import pytest
#import pytest
#import falcon
#from common import *
import falcon
from common import *
from freezer_api.common.exceptions import *
from oslo.config import cfg
from freezer_api.common.exceptions import *
from freezer_api.storage import driver, elastic, simpledict
class TestStorageDriver:
class TestStorageDriver(unittest.TestCase):
def patch_logging(self, monkeypatch):
fakelogging = FakeLogging()
monkeypatch.setattr(logging, 'critical', fakelogging.critical)
monkeypatch.setattr(logging, 'warning', fakelogging.warning)
monkeypatch.setattr(logging, 'exception', fakelogging.exception)
monkeypatch.setattr(logging, 'error', fakelogging.error)
def test_get_db_raises_when_db_not_supported(self, monkeypatch):
self.patch_logging(monkeypatch)
@patch('freezer_api.storage.elastic.logging')
def test_get_db_raises_when_db_not_supported(self, mock_logging):
cfg.CONF.storage.db = 'nodb'
pytest.raises(Exception, driver.get_db)
self.assertRaises(Exception, driver.get_db)
def test_get_db_simpledict(self, monkeypatch):
self.patch_logging(monkeypatch)
cfg.CONF.storage.db = 'simpledict'
db = driver.get_db()
assert isinstance(db, simpledict.SimpleDictStorageEngine)
def test_get_db_elastic(self, monkeypatch):
self.patch_logging(monkeypatch)
@patch('freezer_api.storage.elastic.logging')
def test_get_db_elastic(self, mock_logging):
cfg.CONF.storage.db = 'elasticsearch'
db = driver.get_db()
assert isinstance(db, elastic.ElasticSearchEngine)
self.assertIsInstance(db, elastic.ElasticSearchEngine)

View File

@ -20,105 +20,346 @@ Hudson (tjh@cryptsoft.com).
========================================================================
"""
import unittest
import pytest
from mock import Mock, patch
from freezer_api.storage import elastic
from common import *
from freezer_api.common.exceptions import *
import elasticsearch
class TypeManager(unittest.TestCase):
def setUp(self):
self.mock_es = Mock()
self.type_manager = elastic.TypeManager(self.mock_es, 'base_doc_type', 'freezer')
def test_get_base_search_filter(self):
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
q = self.type_manager.get_base_search_filter('my_user_id', search=my_search)
expected_q = [{'term': {'user_id': 'my_user_id'}},
{'query':
{'bool':
{'must':
[{'match': {'some_field': 'some text'}},
{'match': {'description': 'some other text'}}
]}}}]
self.assertEqual(q, expected_q)
def test_search_ok(self):
self.mock_es.search.return_value = fake_data_0_elasticsearch_hit
expected_q = {'filter':
{'bool':
{'must':
[{'term': {'user_id': 'my_user_id'}},
{'query':
{'bool':
{'must':
[{'match': {'some_field': 'some text'}},
{'match': {'description': 'some other text'}}]}}}
]}}}
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.type_manager.search(user_id='my_user_id', doc_id='mydocid', search=my_search, offset=7, limit=19)
self.mock_es.search.assert_called_with(index='freezer', doc_type='base_doc_type', size=19, from_=7, body=expected_q)
self.assertEqual(res, [fake_data_0_backup_metadata])
def test_search_raise_StorageEngineError_when_search_raises(self):
self.mock_es.search.side_effect = Exception('regular test failure')
self.assertRaises(StorageEngineError, self.type_manager.search, user_id='my_user_id', doc_id='mydocid')
def test_insert_ok(self):
self.mock_es.index.return_value = {'created': True} # question: elasticsearch returns bool or string ?
test_doc = {'test_key_412': 'test_value_412'}
res = self.type_manager.insert(doc=test_doc)
self.assertEqual(res, True)
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc)
def test_insert_fails(self):
self.mock_es.index.side_effect = Exception('regular test failure')
test_doc = {'test_key_412': 'test_value_412'}
self.assertRaises(StorageEngineError, self.type_manager.insert, doc=test_doc)
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc)
def test_delete(self):
#self.mock_es.delete_by_query.return_value = True
doc_id='mydocid345'
res = self.type_manager.delete(user_id='my_user_id', doc_id=doc_id)
self.assertEqual(res, doc_id)
#self.mock_es.delete_by_query.assert_called_with(index='freezer', doc_type='base_doc_type', body=expected_q)
def test_delete_fails(self):
self.mock_es.delete_by_query.side_effect = Exception('regular test failure')
doc_id='mydocid345'
self.assertRaises(StorageEngineError, self.type_manager.delete, user_id='my_user_id', doc_id=doc_id)
#self.mock_es.delete_by_query.assert_called_with(index='freezer', doc_type='base_doc_type', body=expected_q)
class TestElasticSearchEngine:
class TestBackupManager(unittest.TestCase):
def patch_logging(self, monkeypatch):
fakelogging = FakeLogging()
monkeypatch.setattr(logging, 'critical', fakelogging.critical)
monkeypatch.setattr(logging, 'warning', fakelogging.warning)
monkeypatch.setattr(logging, 'exception', fakelogging.exception)
monkeypatch.setattr(logging, 'error', fakelogging.error)
def setUp(self):
self.mock_es = Mock()
self.backup_manager = elastic.BackupTypeManager(self.mock_es, 'backups')
def test_get_search_query(self):
my_search = {'match': [{'backup_name': 'my_backup'} , {'mode': 'fs'}],
"time_before": 1428510506,
"time_after": 1428510506
}
q = self.backup_manager.get_search_query('my_user_id', 'my_doc_id', search=my_search)
expected_q = {'filter':
{'bool':
{'must':
[{'term': {'user_id': 'my_user_id'}},
{'query': {'bool': {'must': [{'match': {'backup_name': 'my_backup'}},
{'match': {'mode': 'fs'}}]}}},
{'term': {'backup_id': 'my_doc_id'}},
{'range': {'timestamp': {'gte': 1428510506}}},
{'range': {'timestamp': {'lte': 1428510506}}}
]}}}
self.assertEqual(q, expected_q)
class TestElasticSearchEngine_get_backup(TestElasticSearchEngine):
class ClientTypeManager(unittest.TestCase):
def test_get_backup_userid_and_backup_id_return_ok(self, monkeypatch):
self.patch_logging(monkeypatch)
monkeypatch.setattr(elasticsearch, 'Elasticsearch', FakeElasticsearch_hit)
engine = elastic.ElasticSearchEngine('host')
res = engine.get_backup(fake_data_0_user_id, fake_data_0_backup_id)
assert (res == [fake_data_0_backup_metadata, ])
def setUp(self):
self.mock_es = Mock()
self.client_manager = elastic.ClientTypeManager(self.mock_es, 'clients')
def test_get_backup_raises_when_query_has_no_hits(self, monkeypatch):
self.patch_logging(monkeypatch)
monkeypatch.setattr(elasticsearch, 'Elasticsearch', FakeElasticsearch_miss)
engine = elastic.ElasticSearchEngine('host')
pytest.raises(ObjectNotFound, engine.get_backup, fake_data_0_user_id, fake_data_0_backup_id)
def test_get_search_query(self):
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
q = self.client_manager.get_search_query('my_user_id', 'my_doc_id', search=my_search)
expected_q = {'filter':
{'bool':
{'must':
[{'term': {'user_id': 'my_user_id'}},
{'query':
{'bool':
{'must':
[{'match': {'some_field': 'some text'}},
{'match': {'description': 'some other text'}}]}}},
{'term': {'client_id': 'my_doc_id'}}
]}}}
self.assertEqual(q, expected_q)
class TestElasticSearchEngine_get_backup_list(TestElasticSearchEngine):
class TestElasticSearchEngine_backup(unittest.TestCase):
def test_get_backup_list_return_ok(self, monkeypatch):
self.patch_logging(monkeypatch)
monkeypatch.setattr(elasticsearch, 'Elasticsearch', FakeElasticsearch_hit)
engine = elastic.ElasticSearchEngine('host')
res = engine.get_backup_list(fake_data_0_user_id)
assert (res == [fake_data_0_backup_metadata, ])
@patch('freezer_api.storage.elastic.logging')
@patch('freezer_api.storage.elastic.elasticsearch')
def setUp(self, mock_logging, mock_elasticsearch):
mock_elasticsearch.Elasticsearch.return_value = Mock()
self.eng = elastic.ElasticSearchEngine('http://elasticservaddr:1997')
self.eng.backup_manager = Mock()
def test_get_backup_userid_and_backup_id_return_ok(self):
self.eng.backup_manager.search.return_value = [fake_data_0_wrapped_backup_metadata]
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.get_backup(user_id=fake_data_0_user_id,
backup_id=fake_data_0_backup_id,
offset=3, limit=7,
search=my_search)
self.assertEqual(res, [fake_data_0_wrapped_backup_metadata])
self.eng.backup_manager.search.assert_called_with(
fake_data_0_wrapped_backup_metadata['user_id'],
fake_data_0_wrapped_backup_metadata['backup_id'],
search=my_search,
limit=7, offset=3)
def test_get_backup_list_with_userid_and_search_return_list(self):
self.eng.backup_manager.search.return_value = [fake_data_0_wrapped_backup_metadata,
fake_data_1_wrapped_backup_metadata]
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.get_backup(user_id=fake_data_0_user_id,
offset=3, limit=7,
search=my_search)
self.assertEqual(res, [fake_data_0_wrapped_backup_metadata,
fake_data_1_wrapped_backup_metadata])
self.eng.backup_manager.search.assert_called_with(
fake_data_0_wrapped_backup_metadata['user_id'],
None,
search=my_search,
limit=7, offset=3)
def test_get_backup_list_with_userid_and_search_return_empty(self):
self.eng.backup_manager.search.return_value = []
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.get_backup(user_id=fake_data_0_user_id,
offset=3, limit=7,
search=my_search)
self.assertEqual(res, [])
self.eng.backup_manager.search.assert_called_with(
fake_data_0_wrapped_backup_metadata['user_id'],
None,
search=my_search,
limit=7, offset=3)
def test_get_backup_userid_and_backup_id_not_found_returns_empty(self):
self.eng.backup_manager.search.return_value = []
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.get_backup(user_id=fake_data_0_user_id,
backup_id=fake_data_0_backup_id,
offset=3, limit=7,
search=my_search)
self.assertEqual(res, [])
self.eng.backup_manager.search.assert_called_with(
fake_data_0_wrapped_backup_metadata['user_id'],
fake_data_0_wrapped_backup_metadata['backup_id'],
search=my_search,
limit=7, offset=3)
def test_add_backup_raises_when_data_is_malformed(self):
self.assertRaises(BadDataFormat, self.eng.add_backup,
user_id=fake_data_0_user_id,
user_name=fake_data_0_user_name,
doc=fake_malformed_data_0_backup_metadata)
def test_add_backup_ok(self):
self.eng.backup_manager.search.return_value = []
res = self.eng.add_backup(fake_data_0_user_id,
user_name=fake_data_0_user_name,
doc=fake_data_0_backup_metadata)
self.assertEqual(res, fake_data_0_wrapped_backup_metadata['backup_id'])
def test_add_backup_raises_when_doc_exists(self):
self.eng.backup_manager.search.return_value = [fake_data_0_wrapped_backup_metadata]
self.assertRaises(DocumentExists, self.eng.add_backup,
user_id=fake_data_0_user_id,
user_name=fake_data_0_user_name,
doc=fake_data_0_backup_metadata)
def test_add_backup_raises_when_manager_insert_raises(self):
self.eng.backup_manager.search.return_value = []
self.eng.backup_manager.insert.side_effect = StorageEngineError('regular test failure')
self.assertRaises(StorageEngineError, self.eng.add_backup,
user_id=fake_data_0_user_id,
user_name=fake_data_0_user_name,
doc=fake_data_0_backup_metadata)
def test_add_backup_raises_when_manager_insert_fails(self):
self.eng.backup_manager.search.return_value = []
self.eng.backup_manager.insert.return_value = False
self.assertRaises(StorageEngineError, self.eng.add_backup,
user_id=fake_data_0_user_id,
user_name=fake_data_0_user_name,
doc=fake_data_0_backup_metadata)
def test_delete_backup_ok(self):
self.eng.backup_manager.delete.return_value = fake_data_0_backup_id
res = self.eng.delete_backup(user_id=fake_data_0_user_id,
backup_id=fake_data_0_backup_id)
self.assertEqual(res, fake_data_0_backup_id)
def test_delete_backup_raises_when_es_delete_raises(self):
self.eng.backup_manager.delete.side_effect = StorageEngineError()
self.assertRaises(StorageEngineError, self.eng.delete_backup,
user_id=fake_data_0_user_id,
backup_id=fake_data_0_backup_id)
class TestElasticSearchEngine_add_backup(TestElasticSearchEngine):
class TestElasticSearchEngine_client(unittest.TestCase):
def test_index_backup_success(self, monkeypatch):
self.patch_logging(monkeypatch)
monkeypatch.setattr(elasticsearch, 'Elasticsearch', FakeElasticsearch_insert_ok)
engine = elastic.ElasticSearchEngine('host')
res = engine.add_backup(fake_data_0_user_id, fake_data_0_user_name, fake_data_0_backup_metadata)
assert (res == fake_data_0_backup_id)
@patch('freezer_api.storage.elastic.logging')
@patch('freezer_api.storage.elastic.elasticsearch')
def setUp(self, mock_logging, mock_elasticsearch):
mock_elasticsearch.Elasticsearch.return_value = Mock()
self.eng = elastic.ElasticSearchEngine('http://elasticservaddr:1997')
self.eng.client_manager = Mock()
def test_index_backup_raise_when_data_exists(self, monkeypatch):
self.patch_logging(monkeypatch)
monkeypatch.setattr(elasticsearch, 'Elasticsearch', FakeElasticsearch_hit)
engine = elastic.ElasticSearchEngine('host')
pytest.raises(DocumentExists, engine.add_backup, fake_data_0_user_id,
fake_data_0_user_name, fake_data_0_backup_metadata)
def test_get_client_userid_and_backup_id_return_1elem_list_(self):
self.eng.client_manager.search.return_value = [fake_client_entry_0]
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.get_client(user_id=fake_client_entry_0['user_id'],
client_id=fake_client_info_0['client_id'],
offset=6, limit=15,
search=my_search)
self.assertEqual(res, [fake_client_entry_0])
self.eng.client_manager.search.assert_called_with(
fake_client_entry_0['user_id'],
fake_client_info_0['client_id'],
search=my_search,
limit=15, offset=6)
def test_index_backup_raise_when_es_index_raises(self, monkeypatch):
self.patch_logging(monkeypatch)
monkeypatch.setattr(elasticsearch, 'Elasticsearch', FakeElasticsearch_index_raise)
engine = elastic.ElasticSearchEngine('host')
pytest.raises(StorageEngineError, engine.add_backup, fake_data_0_user_id,
fake_data_0_user_name, fake_data_0_backup_metadata)
def test_get_client_list_with_userid_and_search_return_list(self):
self.eng.client_manager.search.return_value = [fake_client_entry_0, fake_client_entry_1]
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.get_client(user_id=fake_client_entry_0['user_id'],
offset=6, limit=15,
search=my_search)
self.assertEqual(res, [fake_client_entry_0, fake_client_entry_1])
self.eng.client_manager.search.assert_called_with(
fake_client_entry_0['user_id'],
None,
search=my_search,
limit=15, offset=6)
def test_index_backup_raise_when_es_search_raises(self, monkeypatch):
self.patch_logging(monkeypatch)
monkeypatch.setattr(elasticsearch, 'Elasticsearch', FakeElasticsearch_search_raise)
engine = elastic.ElasticSearchEngine('host')
pytest.raises(StorageEngineError, engine.add_backup, fake_data_0_user_id,
fake_data_0_user_name, fake_data_0_backup_metadata)
def test_get_client_list_with_userid_and_search_return_empty_list(self):
self.eng.client_manager.search.return_value = []
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.get_client(user_id=fake_client_entry_0['user_id'],
offset=6, limit=15,
search=my_search)
self.assertEqual(res, [])
self.eng.client_manager.search.assert_called_with(
fake_client_entry_0['user_id'],
None,
search=my_search,
limit=15, offset=6)
def test_index_backup_raise_when_data_is_malformed(self, monkeypatch):
self.patch_logging(monkeypatch)
monkeypatch.setattr(elasticsearch, 'Elasticsearch', FakeElasticsearch_insert_ok)
engine = elastic.ElasticSearchEngine('host')
pytest.raises(BadDataFormat, engine.add_backup, fake_data_0_user_id,
fake_data_0_user_name, fake_malformed_data_0_backup_metadata)
def test_add_client_raises_when_data_is_malformed(self):
doc = fake_client_info_0.copy()
doc.pop('client_id')
self.assertRaises(BadDataFormat, self.eng.add_client,
user_id=fake_data_0_user_name,
doc=doc)
def test_add_client_raises_when_doc_exists(self):
self.eng.client_manager.search.return_value = [fake_client_entry_0]
self.assertRaises(DocumentExists, self.eng.add_client,
user_id=fake_data_0_user_id,
doc=fake_client_info_0)
class TestElasticSearchEngine_delete_backup(TestElasticSearchEngine):
def test_add_client_ok(self):
self.eng.client_manager.search.return_value = []
res = self.eng.add_client(user_id=fake_data_0_user_id,
doc=fake_client_info_0)
self.assertEqual(res, fake_client_info_0['client_id'])
self.eng.client_manager.search.assert_called_with(
fake_data_0_user_id,
fake_client_info_0['client_id'])
def test_delete_backup_raise_when_es_delete_raises(self, monkeypatch):
self.patch_logging(monkeypatch)
monkeypatch.setattr(elasticsearch, 'Elasticsearch', FakeElasticsearch_delete_raise)
engine = elastic.ElasticSearchEngine('host')
pytest.raises(StorageEngineError, engine.delete_backup, fake_data_0_user_id, fake_data_0_backup_id)
def test_add_client_raises_when_manager_insert_raises(self):
self.eng.client_manager.search.return_value = []
self.eng.client_manager.insert.side_effect = StorageEngineError('regular test failure')
self.assertRaises(StorageEngineError, self.eng.add_client,
user_id=fake_data_0_user_id,
doc=fake_client_info_0)
def test_delete_backup_ok(self, monkeypatch):
self.patch_logging(monkeypatch)
monkeypatch.setattr(elasticsearch, 'Elasticsearch', FakeElasticsearch_hit)
engine = elastic.ElasticSearchEngine('host')
res = engine.delete_backup(fake_data_0_user_id, fake_data_0_backup_id)
assert (res == fake_data_0_backup_id)
def test_add_client_raises_when_manager_insert_fails_without_raise(self):
self.eng.client_manager.search.return_value = []
self.eng.client_manager.insert.return_value = False
self.assertRaises(StorageEngineError, self.eng.add_client,
user_id=fake_data_0_user_id,
doc=fake_client_info_0)
def test_delete_client_ok(self):
self.eng.client_manager.delete.return_value = fake_client_info_0['client_id']
res = self.eng.delete_client(user_id=fake_data_0_user_id,
client_id=fake_client_info_0['client_id'])
self.assertEqual(res, fake_client_info_0['client_id'])
def test_delete_client_raises_when_es_delete_raises(self):
self.eng.client_manager.delete.side_effect = StorageEngineError()
self.assertRaises(StorageEngineError, self.eng.delete_client,
user_id=fake_data_0_user_id,
client_id=fake_client_info_0['client_id'])

View File

@ -21,7 +21,8 @@ Hudson (tjh@cryptsoft.com).
"""
import unittest
from common import FakeReqResp
from mock import Mock, patch
from freezer_api.api import v1
import json
@ -30,7 +31,7 @@ class TestHomedocResource(unittest.TestCase):
def setUp(self):
self.resource = v1.homedoc.Resource()
self.req = FakeReqResp()
self.req = Mock()
def test_on_get_return_resources_information(self):
self.resource.on_get(self.req, self.req)

View File

@ -21,9 +21,10 @@ Hudson (tjh@cryptsoft.com).
"""
import json
import unittest
#from mock import Mock
import json
import falcon
from freezer_api.api.common import middleware

View File

@ -21,8 +21,9 @@ Hudson (tjh@cryptsoft.com).
"""
import unittest
from mock import Mock
import falcon
from common import FakeReqResp
from freezer_api.api import versions
from freezer_api.api import v1
import json
@ -32,7 +33,7 @@ class TestVersionResource(unittest.TestCase):
def setUp(self):
self.resource = versions.Resource()
self.req = FakeReqResp()
self.req = Mock()
def test_on_get_return_versions(self):
self.resource.on_get(self.req, self.req)