Add api support for jobs

removes distinction between actions and configs in the api

The v1/jobs endpoint is the single point of access to the api
to store, search and retrieve operating instruction about
a single freezer run.

optional scheduling informations can be added to a job to
allow for future/recurrent job execution.

removes api endpoints
  - v1/actions
  - v1/configs

adds api endpoints:
  - v1/jobs

Implements blueprint: freezer-api-jobs

Change-Id: Ideeef14dfccd21ddd10b4faa438124c04d2e1ff8
This commit is contained in:
Fabrizio Vanni 2015-06-08 12:05:08 +01:00 committed by Fausto Marzi
parent 1ecfecdfd3
commit f4a9d19a12
18 changed files with 1131 additions and 893 deletions

View File

@ -103,15 +103,16 @@ GET /v1/clients/{freezerc_id} Get client details
UPDATE /v1/clients/{freezerc_id} Updates the specified client information
DELETE /v1/clients/{freezerc_id} Deletes the specified client information
Freezer actions management
---------------------------
GET /v1/actions(?limit,offset) Lists registered actions
POST /v1/actions Creates action entry
Freezer jobs management
-----------------------
GET /v1/jobs(?limit,offset) Lists registered jobs
POST /v1/jobs Creates job entry
GET /v1/actions/{action_id} Get action details
UPDATE /v1/actions/{action_id} Updates the specified action information
DELETE /v1/actions/{action_id} Deletes the specified action information
PATCH /v1/actions/{action_id} updates part of the document (such as status information)
GET /v1/jobs/{jobs_id} Get job details
POST /v1/jobs/{jobs_id} creates or replaces a job entry using the specified job_id
UPDATE /v1/jobs/{jobs_id} Updates the existing job information
DELETE /v1/jobs/{jobs_id} Deletes the specified job information
PATCH /v1/jobs/{jobs_id} Updates part of the document
Data Structures
===============
@ -188,33 +189,120 @@ client_type :=
}
Jobs and Actions
----------------
Jobs
----
A job describes a single action to be executed by a freezer client, for example a backup, or a restore.
It contains the necessary information as if they were provided on the command line.
job_info
{
parameters for freezer to execute a specific job.
A job is stored in the api together with some metadata information such as:
job_id, user_id, client_id, status, scheduling information etc
Scheduling information enables future/recurrent execution of jobs
+---------------------+
| Job |
| | job_action +-------------------+
| +job_id +------------------>| job action dict |
| +client_id | +-------------------+
| +user_id |
| +description | job_schedule
| +---------------+
| | | +-------------------+
+---------------------+ +-->| job schedule dict |
+-------------------+
job document structure:
"job": {
"job_action": { parameters for freezer to execute a specific action }
"job_schedule": { scheduling information }
"job_id": string
"client_id": string
"user_id": string
"description": string
}
example backup job_info
"job_action": {
{
"action" = "backup"
"mode" = "fs"
"src_file" = /home/tylerdurden/project_mayhem
"backup_name" = project_mayhem_backup
"container" = my_backup_container
"action" : string
"mode" : string
"src_file" : string
"backup_name" : string
"container" : string
...
}
"job_schedule": {
"time_created": int (timestamp)
"time_started": int (timestamp)
"time_ended": int (timestamp)
"status": string ["stop", "scheduled", "running", "aborting", "removed"]
"event": string ["", "stop", "start", "abort", "remove"]
"result": string ["", "success", "fail", "aborted"]
SCHEDULING TIME INFORMATION
}
Scheduling Time Information
---------------------------
Three types of scheduling can be identified:
* date - used for single run jobs
* interval - periodic jobs, providing an interval value
* cron-like jobs
Each type has specific parameters which can be given.
date scheduling
---------------
"schedule_date": : datetime isoformat
interval scheduling
-------------------
"schedule_interval" : "continuous", "N weeks" / "N days" / "N hours" / "N minutes" / "N seconds"
"schedule_start_date" : datetime isoformat
"schedule_end_date" : datetime isoformat
cron-like scheduling
--------------------
"schedule_year" : 4 digit year
"schedule_month" : 1-12
"schedule_day" : 1-31
"schedule_week" : 1-53
"schedule_day_of_week": 0-6 or string mon,tue,wed,thu,fri,sat,sun
"schedule_hour" : 0-23
"schedule_minute" : 0-59
"schedule_second" : 0-59
"schedule_start_date" : datetime isoformat
"schedule_end_date" : datetime isoformat
example backup job_action
"job_action": {
"action" : "backup"
"mode" : "fs"
"src_file" : "/home/tylerdurden/project_mayhem"
"backup_name" : "project_mayhem_backup"
"container" : "my_backup_container"
"max_backup_level" : int
"always_backup_level": int
"restart_always_backup": int
"no_incremental" : bool
"encrypt_pass_file" = private_key_file
"log_file" = /var/log/freezer.log
"hostname" = false
"max_cpu_priority" = false
"encrypt_pass_file" : private_key_file
"log_file" : "/var/log/freezer.log"
"hostname" : false
"max_cpu_priority" : false
}
example restore job_info
{
example restore job_action
"job_action": {
"action": "restore"
"restore-abs-path": "/home/tylerdurden/project_mayhem"
"container" : "my_backup_container"
@ -223,20 +311,129 @@ example restore job_info
"max_cpu_priority": true
}
action_info
{
"action_id": string uuid4, not analyzed
"job": job_info list ?
"client_id": string
"description": string
"time_created": int (timestamp)
"time_started": int (timestamp)
"time_ended": int (timestamp)
"status": string: pending | notified(?) | started | abort_req | aborting | aborted | success | fail
example scheduled backup job
job will be executed once at the provided datetime
"job": {
"job_action": {
"action" : "backup",
"mode" : "fs",
"src_file" : "/home/tylerdurden/project_mayhem",
"backup_name" : "project_mayhem_backup",
"container" : "my_backup_container",
}
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "scheduled",
"schedule_date": "2015-06-02T16:20:00"
}
"job_id": "blabla",
"client_id": "blabla",
"user_id": "blabla",
"description": "scheduled one shot",
}
Action document (the actual document stored in elasticsearch)
{
"action": action_info
"user_id": string, # owner of the information (OS X-User-Id, keystone provided, added by api)
new job, in stop status, with pending start request
job will be executed daily at the provided hour:min:sec
while year,month,day are ignored, if provided
"job": {
"job_action": {
"action" : "backup"
"mode" : "fs"
"src_file" : "/home/tylerdurden/project_mayhem"
"backup_name" : "project_mayhem_backup"
"container" : "my_backup_container"
},
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "stop",
"event": "start"
"schedule_period" : "daily"
"schedule_time": "2015-06-02T16:20:00"
},
"job_id": "blabla",
"client_id": "blabla",
"user_id": "blabla",
"description": "daily backup",
}
multiple scheduling choices allowed
"job": {
"job_action": {
"action" : "backup"
"mode" : "fs"
"src_file" : "/home/tylerdurden/project_mayhem"
"backup_name" : "project_mayhem_backup"
"container" : "my_backup_container"
}
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "scheduled"
"schedule_month" : "1-6, 9-12"
"schedule_day" : "mon, wed, fri"
"schedule_hour": "03"
"schedule_minute": "25"
}
"job_id": "blabla",
"client_id": "blabla",
"user_id": "blabla",
"description": "daily backup",
}
Finished job with result:
"job": {
"job_action": {
"action" : "backup"
"mode" : "fs"
"src_file" : "/home/tylerdurden/project_mayhem"
"backup_name" : "project_mayhem_backup"
"container" : "my_backup_container"
},
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 4321,
"status": "stop",
"event": "",
"result": "success",
"schedule_time": "2015-06-02T16:20:00"
},
"job_id": "blabla",
"client_id": "blabla",
"user_id": "blabla",
"description": "one shot job",
}
Ini version:
[job]
job_id = 12344321
client_id = 12344321
user_id = qwerty
description = scheduled one shot
[job_action]
action = backup
mode = fs
src_file = /home/tylerdurden/project_mayhem
backup_name = project_mayhem_backup
container = my_backup_container
[job_schedule]
time_created = 1234
time_started = 1234
time_ended =
status = scheduled
schedule_time = 2015-06-02T16:20:00

View File

@ -21,8 +21,7 @@ Hudson (tjh@cryptsoft.com).
from freezer_api.api.v1 import backups
from freezer_api.api.v1 import clients
from freezer_api.api.v1 import actions
from freezer_api.api.v1 import configs
from freezer_api.api.v1 import jobs
from freezer_api.api.v1 import homedoc
VERSION = {
@ -55,15 +54,9 @@ def public_endpoints(storage_driver):
('/clients/{client_id}',
clients.ClientsResource(storage_driver)),
('/actions',
actions.ActionsCollectionResource(storage_driver)),
('/jobs',
jobs.JobsCollectionResource(storage_driver)),
('/actions/{action_id}',
actions.ActionsResource(storage_driver)),
('/configs',
configs.ConfigsCollectionResource(storage_driver)),
('/configs/{config_id}',
configs.ConfigsResource(storage_driver))
('/jobs/{job_id}',
jobs.JobsResource(storage_driver)),
]

View File

@ -1,116 +0,0 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import time
import uuid
import falcon
from freezer_api.common import exceptions
class ActionsCollectionResource(object):
"""
Handler for endpoint: /v1/actions
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_get(self, req, resp):
# GET /v1/actions(?limit,offset) Lists actions
user_id = req.get_header('X-User-ID')
offset = req.get_param_as_int('offset') or 0
limit = req.get_param_as_int('limit') or 10
search = req.context.get('doc', {})
obj_list = self.db.search_action(user_id=user_id, offset=offset,
limit=limit, search=search)
req.context['result'] = {'actions': obj_list}
def on_post(self, req, resp):
# POST /v1/actions Creates action entry
try:
doc = req.context['doc']
except KeyError:
raise exceptions.BadDataFormat(
message='Missing request body')
user_id = req.get_header('X-User-ID')
action_id = '{0}'.format(uuid.uuid4())
time_created = int(time.time())
time_started = -1
time_ended = -1
doc.update({
'action_id': action_id,
'time_created': time_created,
'time_started': time_started,
'time_ended': time_ended,
'status': 'pending'
})
action_id = self.db.add_action(
user_id=user_id, doc=doc)
resp.status = falcon.HTTP_201
req.context['result'] = {'action_id': action_id}
class ActionsResource(object):
"""
Handler for endpoint: /v1/actions/{action_id}
"""
starting_states = ['started']
ending_states = ['aborted', 'success', 'fail']
def __init__(self, storage_driver):
self.db = storage_driver
def on_get(self, req, resp, action_id):
# GET /v1/actions/{action_id} retrieves the specified action
# search in body
user_id = req.get_header('X-User-ID') or ''
obj = self.db.get_action(user_id=user_id, action_id=action_id)
if obj:
req.context['result'] = obj
else:
resp.status = falcon.HTTP_404
def on_delete(self, req, resp, action_id):
# DELETE /v1/actions/{action_id} Deletes the specified action
user_id = req.get_header('X-User-ID')
self.db.delete_action(
user_id=user_id, action_id=action_id)
req.context['result'] = {'action_id': action_id}
resp.status = falcon.HTTP_204
def on_patch(self, req, resp, action_id):
# PATCH /v1/actions/{action_id} updates the specified action
user_id = req.get_header('X-User-ID') or ''
patch = req.context.get('doc', {})
# Some knowledge of internal workings here:
# status update triggers timestamp update
status = patch.get('status', '')
if status in ActionsResource.starting_states:
patch.update({"time_started": int(time.time())})
elif status in ActionsResource.ending_states:
patch.update({"time_ended": int(time.time())})
new_version = self.db.update_action(user_id=user_id,
action_id=action_id,
patch=patch)
req.context['result'] = {'action_id': action_id,
'patch': patch,
'version': new_version}

View File

@ -21,7 +21,6 @@ Hudson (tjh@cryptsoft.com).
import falcon
from freezer_api.common import exceptions
import logging
class BackupsCollectionResource(object):

View File

@ -22,6 +22,7 @@ Hudson (tjh@cryptsoft.com).
import falcon
from freezer_api.common import exceptions
class ClientsCollectionResource(object):
"""
Handler for endpoint: /v1/clients

View File

@ -1,87 +0,0 @@
"""
Copyright 2014 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import falcon
from freezer_api.common import exceptions
class ConfigsCollectionResource(object):
"""
Handler for endpoint: /v1/configs
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_get(self, req, resp):
user_id = req.get_header('X-User-ID') or ''
offset = req.get_param('offset') or ''
limit = req.get_param_as_int('limit') or 10
search = req.context.get('doc', {})
obj_list = self.db.get_config(user_id=user_id, offset=offset,
limit=limit, search=search)
req.context['result'] = {'configs': obj_list}
def on_post(self, req, resp):
try:
doc = req.context['doc']
except KeyError:
raise exceptions.BadDataFormat(
message='Missing request body',
resp_body={'error': 'missing request body'})
user_name = req.get_header('X-User-Name')
user_id = req.get_header('X-User-ID')
config_id = self.db.add_config(
user_id=user_id, user_name=user_name, doc=doc)
resp.status = falcon.HTTP_201
req.context['result'] = {'config_id': config_id}
class ConfigsResource(object):
"""
Handler for endpoint: /v1/configs/{config_id}
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_get(self, req, resp, config_id):
user_id = req.get_header('X-User-ID')
obj = self.db.get_config(user_id=user_id, config_id=config_id)
req.context['result'] = obj
def on_delete(self, req, resp, config_id):
user_id = req.get_header('X-User-ID')
self.db.delete_config(
user_id=user_id, config_id=config_id)
req.context['result'] = {'config_id': config_id}
resp.status = falcon.HTTP_204
def on_patch(self, req, resp, config_id):
# PATCH /v1/configs/{config_id}
user_id = req.get_header('X-User-ID') or ''
patch = req.context.get('doc', {})
new_version = self.db.update_config(user_id=user_id,
config_id=config_id,
patch=patch)
req.context['result'] = {'config_id': config_id,
'patch': patch,
'version': new_version}

101
freezer_api/api/v1/jobs.py Normal file
View File

@ -0,0 +1,101 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
import falcon
from freezer_api.common import exceptions
class JobsCollectionResource(object):
"""
Handler for endpoint: /v1/jobs
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_get(self, req, resp):
# GET /v1/jobs(?limit,offset) Lists jobs
user_id = req.get_header('X-User-ID')
offset = req.get_param_as_int('offset') or 0
limit = req.get_param_as_int('limit') or 10
search = req.context.get('doc', {})
obj_list = self.db.search_job(user_id=user_id, offset=offset,
limit=limit, search=search)
req.context['result'] = {'jobs': obj_list}
def on_post(self, req, resp):
# POST /v1/jobs Creates job entry
try:
doc = req.context['doc']
except KeyError:
raise exceptions.BadDataFormat(
message='Missing request body')
user_id = req.get_header('X-User-ID')
job_id = self.db.add_job(user_id=user_id, doc=doc)
resp.status = falcon.HTTP_201
req.context['result'] = {'job_id': job_id}
class JobsResource(object):
"""
Handler for endpoint: /v1/jobs/{job_id}
"""
def __init__(self, storage_driver):
self.db = storage_driver
def on_get(self, req, resp, job_id):
# GET /v1/jobs/{job_id} retrieves the specified job
# search in body
user_id = req.get_header('X-User-ID') or ''
obj = self.db.get_job(user_id=user_id, job_id=job_id)
if obj:
req.context['result'] = obj
else:
resp.status = falcon.HTTP_404
def on_delete(self, req, resp, job_id):
# DELETE /v1/jobs/{job_id} Deletes the specified job
user_id = req.get_header('X-User-ID')
self.db.delete_job(user_id=user_id, job_id=job_id)
req.context['result'] = {'job_id': job_id}
resp.status = falcon.HTTP_204
def on_patch(self, req, resp, job_id):
# PATCH /v1/jobs/{job_id} updates the specified job
user_id = req.get_header('X-User-ID') or ''
doc = req.context.get('doc', {})
new_version = self.db.update_job(user_id=user_id,
job_id=job_id,
patch_doc=doc)
req.context['result'] = {'job_id': job_id,
'version': new_version}
def on_post(self, req, resp, job_id):
# PUT /v1/jobs/{job_id} creates/replaces the specified job
user_id = req.get_header('X-User-ID') or ''
doc = req.context.get('doc', {})
new_version = self.db.replace_job(user_id=user_id,
job_id=job_id,
doc=doc)
resp.status = falcon.HTTP_201
req.context['result'] = {'job_id': job_id,
'version': new_version}

View File

@ -23,6 +23,7 @@ Hudson (tjh@cryptsoft.com).
import falcon
import logging
class FreezerAPIException(falcon.HTTPError):
"""
Base Freezer API Exception
@ -73,10 +74,17 @@ class DocumentNotFound(FreezerAPIException):
title="Not Found",
description=ex.message)
class AccessForbidden(FreezerAPIException):
@staticmethod
def handle(ex, req, resp, params):
raise falcon.HTTPForbidden(
title="Access Forbidden",
description=ex.message)
exception_handlers_catalog = [
BadDataFormat,
DocumentExists,
StorageEngineError,
DocumentNotFound
DocumentNotFound,
AccessForbidden
]

View File

@ -0,0 +1,212 @@
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This product includes cryptographic software written by Eric Young
(eay@cryptsoft.com). This product includes software written by Tim
Hudson (tjh@cryptsoft.com).
========================================================================
"""
job_action_properties = {
"action": {
"id": "action",
"type": "string"
},
"mode": {
"id": "mode",
"type": "string"
},
"src_file": {
"id": "src_file",
"type": "string"
},
"backup_name": {
"id": "backup_name",
"type": "string"
},
"container": {
"id": "container",
"type": "string"
}
}
job_schedule_properties = {
"time_created": {
"id": "time_created",
"type": "integer"
},
"time_started": {
"id": "time_started",
"type": "integer"
},
"time_ended": {
"id": "time_ended",
"type": "integer"
},
"event": {
"id": "event",
"type": "string",
"enum": ["", "stop", "start", "abort", "remove"]
},
"status": {
"id": "status",
"type": "string",
"enum": ["completed", "stop", "scheduled", "running", "aborting", "removed"]
},
"result": {
"id": "result",
"type": "string",
"enum": ["", "success", "fail", "aborted"]
},
"schedule_date": {
"id": "schedule_date",
"type": "string",
"pattern": "^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])?$"
},
"schedule_interval": {
"id": "schedule_interval",
"type": "string",
"pattern": "^(continuous|(\d+ +(weeks|weeks|days|hours|minutes|seconds)))$"
},
"schedule_start_date": {
"id": "schedule_start_date",
"type": "string",
"pattern": "^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])?$"
},
"schedule_end_date": {
"id": "schedule_end_date",
"type": "string",
"pattern": "^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])?$"
},
"schedule_year": {
"id": "schedule_year",
"type": "string",
"pattern": "^\d{4}$"
},
"schedule_month": {
"id": "schedule_month",
"type": "string"
},
"schedule_day": {
"id": "schedule_day",
"type": "string"
},
"schedule_week": {
"id": "schedule_week",
"type": "string"
},
"schedule_day_of_week": {
"id": "schedule_day_of_week",
"type": "string"
},
"schedule_hour": {
"id": "schedule_hour",
"type": "string"
},
"schedule_minute": {
"id": "schedule_minute",
"type": "string"
},
"schedule_second": {
"id": "schedule_second",
"type": "string"
},
}
job_schema = {
"id": "/",
"type": "object",
"properties": {
"job_action": {
"id": "job_action",
"type": "object",
"properties": job_action_properties,
"additionalProperties": True,
"required": [
"action"
]
},
"job_schedule": {
"id": "job_schedule",
"type": "object",
"properties": job_schedule_properties,
"additionalProperties": False,
},
"job_id": {
"id": "job_id",
"pattern": "^[\w-]+$",
"type": "string"
},
"client_id": {
"id": "client_id",
"pattern": "^[\w-]+$",
"type": "string"
},
"user_id": {
"id": "user_id",
"pattern": "^[\w-]+$",
"type": "string"
},
"description": {
"id": "description",
"type": "string"
}
},
"additionalProperties": False,
"required": [
"job_action",
"job_schedule",
"job_id",
"client_id",
"user_id"
]
}
job_patch_schema = {
"id": "/",
"type": "object",
"properties": {
"job_action": {
"id": "job_action",
"type": "object",
"properties": job_action_properties,
"additionalProperties": True
},
"job_schedule": {
"id": "job_schedule",
"type": "object",
"properties": job_schedule_properties,
"additionalProperties": False,
},
"job_id": {
"id": "job_id",
"type": "string"
},
"client_id": {
"id": "client_id",
"type": "string"
},
"user_id": {
"id": "user_id",
"type": "string"
},
"description": {
"id": "description",
"type": "string"
}
},
"additionalProperties": False
}

View File

@ -19,7 +19,12 @@ Hudson (tjh@cryptsoft.com).
========================================================================
"""
import time
import uuid
import jsonschema
import json_schemas
from freezer_api.common import exceptions
class BackupMetadataDoc:
@ -46,13 +51,6 @@ class BackupMetadataDoc:
'user_name': self.user_name,
'backup_metadata': self.data}
@staticmethod
def un_serialize(d):
return BackupMetadataDoc(
user_id=d['user_id'],
user_name=d['user_name'],
data=d['backup_metadata'])
@property
def backup_set_id(self):
return '{0}_{1}_{2}'.format(
@ -70,42 +68,55 @@ class BackupMetadataDoc:
)
class ConfigDoc:
"""
Wraps a config_file dict and adds some utility methods,
and fields
"""
def __init__(self, user_id='', user_name='', data={}):
self.user_id = user_id
self.user_name = user_name
self.data = data
# self.id = str(uuid.uuid4().hex)
def is_valid(self):
try:
assert (self.config_id is not '')
assert (self.user_id is not '')
except Exception:
return False
return True
def serialize(self):
return {'config_id': self.config_id,
'user_id': self.user_id,
'user_name': self.user_name,
'config_file': self.data}
class JobDoc:
job_doc_validator = jsonschema.Draft4Validator(
schema=json_schemas.job_schema)
job_patch_validator = jsonschema.Draft4Validator(
schema=json_schemas.job_patch_schema)
@staticmethod
def un_serialize(d):
return ConfigDoc(
user_id=d['user_id'],
user_name=d['user_name'],
data=d['config_file'])
def validate(doc):
try:
JobDoc.job_doc_validator.validate(doc)
except Exception as e:
raise exceptions.BadDataFormat(str(e).splitlines()[0])
@property
def config_set_id(self):
return {'config_id': str(uuid.uuid4().hex)}
@staticmethod
def validate_patch(doc):
try:
JobDoc.job_patch_validator.validate(doc)
except Exception as e:
raise exceptions.BadDataFormat(str(e).splitlines()[0])
@property
def config_id(self):
return str(uuid.uuid4().hex)
@staticmethod
def create_patch(doc):
# changes in user_id or job_id are not allowed
doc.pop('user_id', None)
doc.pop('job_id', None)
JobDoc.validate_patch(doc)
return doc
@staticmethod
def create(doc, user_id):
job_schedule = doc.get('job_schedule', {})
job_schedule.update({
'time_created': int(time.time()),
'time_started': -1,
'time_ended': -1
})
doc.update({
'user_id': user_id,
'job_id': uuid.uuid4().hex,
'job_schedule': job_schedule
})
JobDoc.validate(doc)
return doc
@staticmethod
def update(doc, user_id, job_id):
doc.update({
'user_id': user_id,
'job_id': job_id,
})
JobDoc.validate(doc)
return doc

View File

@ -22,7 +22,7 @@ Hudson (tjh@cryptsoft.com).
import elasticsearch
import logging
from freezer_api.common.utils import BackupMetadataDoc
from freezer_api.common.utils import ConfigDoc
from freezer_api.common.utils import JobDoc
from freezer_api.common import exceptions
@ -37,15 +37,19 @@ class TypeManager:
user_id_filter = {"term": {"user_id": user_id}}
base_filter = [user_id_filter]
match_list = [{"match": m} for m in search.get('match', [])]
if match_list:
base_filter.append({"query": {"bool": {"must": match_list}}})
match_not_list = [{"match": m} for m in search.get('match_not', [])]
base_filter.append({"query": {"bool": {"must": match_list, "must_not": match_not_list}}})
return base_filter
@staticmethod
def get_search_query(user_id, doc_id, search={}):
base_filter = TypeManager.get_base_search_filter(user_id, search)
query_filter = {"filter": {"bool": {"must": base_filter}}}
return {'query': {'filtered': query_filter}}
try:
base_filter = TypeManager.get_base_search_filter(user_id, search)
query_filter = {"filter": {"bool": {"must": base_filter}}}
return {'query': {'filtered': query_filter}}
except:
raise exceptions.StorageEngineError(
message='search operation failed: query not valid')
def get(self, user_id, doc_id):
try:
@ -53,25 +57,24 @@ class TypeManager:
doc_type=self.doc_type,
id=doc_id)
doc = res['_source']
if doc['user_id'] != user_id:
raise elasticsearch.TransportError()
except elasticsearch.TransportError:
raise exceptions.DocumentNotFound(
message='No document found with ID {0}'.format(doc_id))
except Exception as e:
raise exceptions.StorageEngineError(
message='Get operation failed: {0}'.format(e))
if doc['user_id'] != user_id:
raise exceptions.AccessForbidden("Document access forbidden")
return doc
def search(self, user_id, doc_id=None, search={}, offset=0, limit=10):
try:
query_dsl = self.get_search_query(user_id, doc_id, search)
except:
raise exceptions.StorageEngineError(
message='search operation failed: query not valid')
query_dsl = self.get_search_query(user_id, doc_id, search)
try:
res = self.es.search(index=self.index, doc_type=self.doc_type,
size=limit, from_=offset, body=query_dsl)
except elasticsearch.ConnectionError:
raise exceptions.StorageEngineError(
message='unable to connecto to db server')
except Exception as e:
raise exceptions.StorageEngineError(
message='search operation failed: {0}'.format(e))
@ -83,17 +86,14 @@ class TypeManager:
res = self.es.index(index=self.index, doc_type=self.doc_type,
body=doc, id=doc_id)
created = res['created']
version = res['_version']
except Exception as e:
raise exceptions.StorageEngineError(
message='index operation failed {0}'.format(e))
return created
return (created, version)
def delete(self, user_id, doc_id):
try:
query_dsl = self.get_search_query(user_id, doc_id)
except:
raise exceptions.StorageEngineError(
message='Delete operation failed: query not valid')
query_dsl = self.get_search_query(user_id, doc_id)
try:
self.es.delete_by_query(index=self.index,
doc_type=self.doc_type,
@ -140,7 +140,7 @@ class ClientTypeManager(TypeManager):
return {'query': {'filtered': query_filter}}
class ActionTypeManager(TypeManager):
class JobTypeManager(TypeManager):
def __init__(self, es, doc_type, index='freezer'):
TypeManager.__init__(self, es, doc_type, index=index)
@ -148,57 +148,23 @@ class ActionTypeManager(TypeManager):
def get_search_query(user_id, doc_id, search={}):
base_filter = TypeManager.get_base_search_filter(user_id, search)
if doc_id is not None:
base_filter.append({"term": {"action_id": doc_id}})
base_filter.append({"term": {"job_id": doc_id}})
query_filter = {"filter": {"bool": {"must": base_filter}}}
return {'query': {'filtered': query_filter}}
def update(self, action_id, action_update_doc):
update_doc = {"doc": action_update_doc}
def update(self, job_id, job_update_doc):
update_doc = {"doc": job_update_doc}
try:
res = self.es.update(index=self.index, doc_type=self.doc_type,
id=action_id, body=update_doc)
id=job_id, body=update_doc)
version = res['_version']
except elasticsearch.TransportError:
raise exceptions.DocumentNotFound(
message='Unable to find action to update '
'with ID {0} '.format(action_id))
except Exception as e:
message='Unable to find job to update '
'with id {0} '.format(job_id))
except Exception:
raise exceptions.StorageEngineError(
message='Unable to update action, '
'action ID: {0} '.format(action_id))
return version
class ConfigTypeManager(TypeManager):
def __init__(self, es, doc_type, index='freezer'):
TypeManager.__init__(self, es, doc_type, index=index)
@staticmethod
def get_search_query(user_id, doc_id, search={}):
base_filter = TypeManager.get_base_search_filter(user_id, search)
if doc_id is not None:
base_filter.append({"term": {"config_id": doc_id}})
query_filter = {"filter": {"bool": {"must": base_filter}}}
return {'query': {'filtered': query_filter}}
def update(self, config_id, config_update_doc):
update_doc = {'doc': config_update_doc}
try:
print config_update_doc
res = self.es.update(index=self.index,
doc_type=self.doc_type,
id=config_id,
body=update_doc)
print 'here?'
version = res['_version']
except elasticsearch.TransportError as error:
raise exceptions.DocumentNotFound(
message='Unable to find configuration file to update '
'with ID {0}'.format(config_id))
except Exception as error:
raise exceptions.StorageEngineError(
message='Unable to update configuration file, '
'config ID {0}'.format(config_id))
message='Unable to update job with id {0}'.format(job_id))
return version
@ -210,10 +176,10 @@ class ElasticSearchEngine(object):
logging.info('Using Elasticsearch host {0}'.format(hosts))
self.backup_manager = BackupTypeManager(self.es, 'backups')
self.client_manager = ClientTypeManager(self.es, 'clients')
self.action_manager = ActionTypeManager(self.es, 'actions')
self.config_manager = ConfigTypeManager(self.es, 'configs')
self.job_manager = JobTypeManager(self.es, 'jobs')
def get_backup(self, user_id, backup_id=None, offset=0, limit=10, search={}):
def get_backup(self, user_id, backup_id=None,
offset=0, limit=10, search={}):
return self.backup_manager.search(user_id,
backup_id,
search=search,
@ -231,16 +197,14 @@ class ElasticSearchEngine(object):
raise exceptions.DocumentExists(
message='Backup data already existing '
'with ID {0}'.format(backup_id))
if not self.backup_manager.insert(backup_metadata_doc.serialize()):
raise exceptions.StorageEngineError(
message='Index operation failed, '
'backup ID: {0}'.format(backup_id))
self.backup_manager.insert(backup_metadata_doc.serialize())
return backup_id
def delete_backup(self, user_id, backup_id):
return self.backup_manager.delete(user_id, backup_id)
def get_client(self, user_id, client_id=None, offset=0, limit=10, search={}):
def get_client(self, user_id, client_id=None,
offset=0, limit=10, search={}):
return self.client_manager.search(user_id,
client_id,
search=search,
@ -254,13 +218,11 @@ class ElasticSearchEngine(object):
existing = self.client_manager.search(user_id, client_id)
if existing: # len(existing) > 0
raise exceptions.DocumentExists(
message='Client already registered with ID {0}'.format(client_id))
message=('Client already registered with '
'ID {0}'.format(client_id)))
client_doc = {'client': doc,
'user_id': user_id}
if not self.client_manager.insert(client_doc):
raise exceptions.StorageEngineError(
message='Index operation failed, '
'client ID: {0}'.format(client_id))
self.client_manager.insert(client_doc)
logging.info('Client registered, client_id: {0}'.
format(client_id))
return client_id
@ -268,86 +230,51 @@ class ElasticSearchEngine(object):
def delete_client(self, user_id, client_id):
return self.client_manager.delete(user_id, client_id)
def get_action(self, user_id, action_id):
return self.action_manager.get(user_id, action_id)
def get_job(self, user_id, job_id):
return self.job_manager.get(user_id, job_id)
def search_action(self, user_id, offset=0, limit=10, search={}):
return self.action_manager.search(user_id,
search=search,
offset=offset,
limit=limit)
def search_job(self, user_id, offset=0, limit=10, search={}):
return self.job_manager.search(user_id,
search=search,
offset=offset,
limit=limit)
def add_action(self, user_id, doc):
action_id = doc.get('action_id', None)
if action_id is None:
raise exceptions.BadDataFormat(message='Missing action ID')
action_doc = {'action': doc,
'user_id': user_id}
if not self.action_manager.insert(action_doc, action_id):
raise exceptions.StorageEngineError(
message='Index operation failed, '
' action ID: {0}'.format(action_id))
logging.info('Action registered, action ID: {0}'.
format(action_id))
return action_id
def add_job(self, user_id, doc):
jobdoc = JobDoc.create(doc, user_id)
job_id = jobdoc['job_id']
self.job_manager.insert(jobdoc, job_id)
logging.info('Job registered, job id: {0}'.
format(job_id))
return job_id
def delete_action(self, user_id, action_id):
return self.action_manager.delete(user_id, action_id)
def delete_job(self, user_id, job_id):
return self.job_manager.delete(user_id, job_id)
def update_action(self, user_id, action_id, patch):
if 'action_id' in patch:
raise exceptions.BadDataFormat(
message='Action ID modification is not allowed, '
'action ID: {0}'.format(action_id))
action_doc = self.action_manager.get(user_id, action_id)
action_doc['action'].update(patch)
version = self.action_manager.update(action_id, action_doc)
logging.info('Action {0} updated to version {1}'.
format(action_id, version))
def update_job(self, user_id, job_id, patch_doc):
valid_patch = JobDoc.create_patch(patch_doc)
# check that document exists
assert (self.job_manager.get(user_id, job_id))
version = self.job_manager.update(job_id, valid_patch)
logging.info('Job {0} updated to version {1}'.
format(job_id, version))
return version
def add_config(self, user_id, user_name, doc):
config_doc = ConfigDoc(user_id, user_name, doc)
config_doc = config_doc.serialize()
config_id = config_doc['config_id']
def replace_job(self, user_id, job_id, doc):
# check that no document exists with
# same job_id and different user_id
try:
self.job_manager.get(user_id, job_id)
except exceptions.DocumentNotFound:
pass
if config_id is None:
raise exceptions.BadDataFormat(message='Missing config ID')
valid_doc = JobDoc.update(doc, user_id, job_id)
if not self.config_manager.insert(config_doc,
doc_id=config_id):
raise exceptions.StorageEngineError(
message='Index operation failed, '
' config ID: {0}'.format(config_id))
logging.info('Config registered, config ID: {0}'.
format(config_id))
return config_id
def delete_config(self, user_id, config_id):
return self.config_manager.delete(user_id, config_id)
def get_config(self, user_id, config_id=None,
offset=0, limit=10, search={}):
return self.config_manager.search(user_id,
config_id,
search=search,
offset=offset,
limit=limit)
def update_config(self, user_id, config_id, patch,
offset=0, limit=10, search={}):
if 'config_id' in patch:
raise exceptions.BadDataFormat(
message='Config ID modification is not allowed, '
' config ID: {0}'.format(config_id))
config_doc = self.config_manager.search(user_id,
config_id,
search=search,
offset=offset,
limit=limit)[0]
config_doc['config_file'].update(patch)
version = self.config_manager.update(config_id, config_doc)
logging.info('Configuration file {0} updated to version {1}'.
format(config_id, version))
(created, version) = self.job_manager.insert(valid_doc, job_id)
if created:
logging.info('Job {0} created'.format(job_id, version))
else:
logging.info('Job {0} replaced with version {1}'.
format(job_id, version))
return version

View File

@ -22,7 +22,7 @@ Hudson (tjh@cryptsoft.com).
import io
import copy
fake_data_0_backup_id = 'freezer_container_alpha_important_data_backup_8475903425_0'
fake_data_0_user_id = 'qwerty1234'
@ -170,73 +170,99 @@ fake_data_0_elasticsearch_miss = {
"took": 1
}
fake_action_0_user_id = "f4db4da085f043059441565720b217c7"
fake_action_0_action_id = "e7181e5e-2c75-43f8-92c0-c037ae5f11e4"
fake_job_0_user_id = "f4db4da085f043059441565720b217c7"
fake_job_0_job_id = "e7181e5e-2c75-43f8-92c0-c037ae5f11e4"
fake_action_0_elasticsearch_not_found = {
fake_job_0_elasticsearch_not_found = {
"_id": "e7181e5e-2c75-43f8-92c0-c037ae5f11e43",
"_index": "freezer",
"_type": "actions",
"_type": "job",
"found": False
}
fake_action_0 = {
"action_id": "e7181e5e-2c75-43f8-92c0-c037ae5f11e4",
"client_id": "mytenantid_myhostname",
"description": "test action 4",
"job": {
"action": "restore",
"backup-name": "project_mayhem_backup",
"container": "my_backup_container",
"max_cpu_priority": True,
"restore-abs-path": "/home/tylerdurden/project_mayhem",
"restore-from-host": "another_host"
},
"status": "pending",
"time_created": 1431100962,
"time_end": 0,
"time_start": 0
fake_job_0 = {
"job_action": {
"action": "backup",
"mode": "fs",
"src_file": "/home/tylerdurden/project_mayhem",
"backup_name": "project_mayhem_backup",
"container": "my_backup_container"
},
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 1234,
"status": "stop",
"schedule_date": "2015-06-02T16:20:00",
"schedule_interval": "2 days"
},
"job_id": "e7181e5e-2c75-43f8-92c0-c037ae5f11e4",
"client_id": "mytenantid_myhostname",
"user_id": "f4db4da085f043059441565720b217c7",
"description": "test action 4"
}
fake_action_0_doc = {
"action": fake_action_0,
"user_id": "f4db4da085f043059441565720b217c7"
}
def get_fake_job_0():
return copy.deepcopy(fake_job_0)
fake_action_0_elasticsearch_found = {
def get_fake_job_1():
return copy.deepcopy(fake_job_1)
fake_job_0_elasticsearch_found = {
"_id": "e7181e5e-2c75-43f8-92c0-c037ae5f11e4",
"_index": "freezer",
"_source": fake_action_0_doc,
"_source": fake_job_0,
"_type": "actions",
"_version": 1,
"found": True
}
fake_action_1 = {
"action_id": "1b05e367-7832-42df-850e-bc48eabee04e",
"client_id": "mytenantid_myhostname",
"description": "test action 4",
"job": {
"action": "restore",
"backup-name": "project_mayhem_backup",
"container": "my_backup_container",
"max_cpu_priority": True,
"restore-abs-path": "/home/tylerdurden/project_mayhem",
"restore-from-host": "another_host"
},
"status": "pending",
"time_created": 1431100962,
"time_end": 0,
"time_start": 0
fake_job_1 = {
"job_action": {
"action": "backup",
"mode": "fs",
"src_file": "/home/tylerdurden/project_mayhem",
"backup_name": "project_mayhem_backup",
"container": "my_backup_container",
},
"job_schedule": {
"time_created": 1234,
"time_started": 1234,
"time_ended": 0,
"status": "invalid",
"schedule_time": "2015-06-02T16:20:00"
},
"job_id": "1b05e367-7832-42df-850e-bc48eabee04e",
"client_id": "mytenantid_myhostname",
"user_id": "f4db4da085f043059441565720b217c7",
"description": "test action 4"
}
fake_action_1_doc = {
"action": fake_action_1,
"user_id": "f4db4da085f043059441565720b217c7"
}
# fake_action_1 = {
# "action_id": "1b05e367-7832-42df-850e-bc48eabee04e",
# "client_id": "mytenantid_myhostname",
# "description": "test action 4",
# "job": {
# "action": "restore",
# "backup-name": "project_mayhem_backup",
# "container": "my_backup_container",
# "max_cpu_priority": True,
# "restore-abs-path": "/home/tylerdurden/project_mayhem",
# "restore-from-host": "another_host"
# },
# "status": "pending",
# "time_created": 1431100962,
# "time_end": 0,
# "time_start": 0
# }
#
# fake_action_1_doc = {
# "action": fake_action_1,
# "user_id": "f4db4da085f043059441565720b217c7"
# }
#
#
fake_data_1_wrapped_backup_metadata = {
'backup_id': 'freezer_container_alpha_important_data_backup_125235431_1',
'user_id': 'qwerty1234',

View File

@ -1,239 +0,0 @@
import unittest
from mock import Mock, patch
import time
import random
import falcon
from common import *
from freezer_api.common.exceptions import *
from freezer_api.api.v1 import actions as v1_actions
class TestClientsCollectionResource(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = fake_action_0_user_id
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = v1_actions.ActionsCollectionResource(self.mock_db)
def test_on_get_return_empty_list(self):
self.mock_db.search_action.return_value = []
expected_result = {'actions': []}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_get_return_correct_list(self):
self.mock_db.search_action.return_value = [fake_action_0_doc, fake_action_1_doc]
expected_result = {'actions': [fake_action_0_doc, fake_action_1_doc]}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_post_raises_when_missing_body(self):
self.mock_db.add_action.return_value = fake_action_0_action_id
self.assertRaises(BadDataFormat, self.resource.on_post, self.mock_req, self.mock_req)
def test_on_post_inserts_correct_data(self):
action = fake_action_0.copy()
self.mock_req.context['doc'] = action
self.mock_db.add_action.return_value = fake_action_0_action_id
expected_result = {'action_id': fake_action_0_action_id}
self.resource.on_post(self.mock_req, self.mock_req)
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
self.assertEqual(self.mock_req.context['result'], expected_result)
assigned_action_id = self.mock_req.context['doc']['action_id']
self.assertNotEqual(assigned_action_id, fake_action_0_action_id)
class TestClientsResource(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = fake_action_0_user_id
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = v1_actions.ActionsResource(self.mock_db)
def test_create_resource(self):
self.assertIsInstance(self.resource, v1_actions.ActionsResource)
def test_on_get_return_no_result_and_404_when_not_found(self):
self.mock_db.get_action.return_value = None
self.resource.on_get(self.mock_req, self.mock_req, fake_action_0_action_id)
self.assertNotIn('result', self.mock_req.context)
self.assertEqual(self.mock_req.status, falcon.HTTP_404)
def test_on_get_return_correct_data(self):
self.mock_db.get_action.return_value = fake_action_0
self.resource.on_get(self.mock_req, self.mock_req, fake_action_0_action_id)
result = self.mock_req.context['result']
self.assertEqual(result, fake_action_0)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_delete_removes_proper_data(self):
self.resource.on_delete(self.mock_req, self.mock_req, fake_action_0_action_id)
result = self.mock_req.context['result']
expected_result = {'action_id': fake_action_0_action_id}
self.assertEquals(self.mock_req.status, falcon.HTTP_204)
self.assertEqual(result, expected_result)
@patch('freezer_api.api.v1.actions.time')
def test_on_patch_ok_with_some_fields(self, mock_time):
mock_time.time.return_value = int(time.time())
new_version = random.randint(0, 99)
self.mock_db.update_action.return_value = new_version
patch_doc = {'some_field': 'some_value',
'because': 'size_matters'}
self.mock_req.context['doc'] = patch_doc
expected_patch = patch_doc.copy()
expected_result = {'action_id': fake_action_0_action_id,
'patch': expected_patch,
'version': new_version}
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0_action_id)
self.mock_db.update_action.assert_called_with(
user_id=fake_action_0_user_id,
action_id=fake_action_0_action_id,
patch=patch_doc)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
@patch('freezer_api.api.v1.actions.time')
def test_on_patch_no_timestamp_on_unknown_status(self, mock_time):
timestamp = int(time.time())
mock_time.time.return_value = timestamp
new_version = random.randint(0, 99)
self.mock_db.update_action.return_value = new_version
patch_doc = {'some_field': 'some_value',
'status': 'happy'}
self.mock_req.context['doc'] = patch_doc
expected_patch = patch_doc.copy()
expected_result = {'action_id': fake_action_0_action_id,
'patch': expected_patch,
'version': new_version}
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0_action_id)
self.mock_db.update_action.assert_called_with(
user_id=fake_action_0_user_id,
action_id=fake_action_0_action_id,
patch=expected_patch)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
@patch('freezer_api.api.v1.actions.time')
def test_on_patch_adds_correct_start_time(self, mock_time):
timestamp = int(time.time())
mock_time.time.return_value = timestamp
new_version = random.randint(0, 99)
self.mock_db.update_action.return_value = new_version
patch_doc = {'some_field': 'some_value',
'status': 'started'}
self.mock_req.context['doc'] = patch_doc
expected_patch = patch_doc.copy()
expected_patch.update({"time_started": timestamp})
expected_result = {'action_id': fake_action_0_action_id,
'patch': expected_patch,
'version': new_version}
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0_action_id)
self.mock_db.update_action.assert_called_with(
user_id=fake_action_0_user_id,
action_id=fake_action_0_action_id,
patch=expected_patch)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
@patch('freezer_api.api.v1.actions.time')
def test_on_patch_adds_correct_end_time_on_abort(self, mock_time):
timestamp = int(time.time())
mock_time.time.return_value = timestamp
new_version = random.randint(0, 99)
self.mock_db.update_action.return_value = new_version
patch_doc = {'some_field': 'some_value',
'status': 'aborted'}
self.mock_req.context['doc'] = patch_doc
expected_patch = patch_doc.copy()
expected_patch.update({"time_ended": timestamp})
expected_result = {'action_id': fake_action_0_action_id,
'patch': expected_patch,
'version': new_version}
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0_action_id)
self.mock_db.update_action.assert_called_with(
user_id=fake_action_0_user_id,
action_id=fake_action_0_action_id,
patch=expected_patch)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
@patch('freezer_api.api.v1.actions.time')
def test_on_patch_adds_correct_end_time_on_success(self, mock_time):
timestamp = int(time.time())
mock_time.time.return_value = timestamp
new_version = random.randint(0, 99)
self.mock_db.update_action.return_value = new_version
patch_doc = {'some_field': 'some_value',
'status': 'success'}
self.mock_req.context['doc'] = patch_doc
expected_patch = patch_doc.copy()
expected_patch.update({"time_ended": timestamp})
expected_result = {'action_id': fake_action_0_action_id,
'patch': expected_patch,
'version': new_version}
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0_action_id)
self.mock_db.update_action.assert_called_with(
user_id=fake_action_0_user_id,
action_id=fake_action_0_action_id,
patch=expected_patch)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
@patch('freezer_api.api.v1.actions.time')
def test_on_patch_adds_correct_end_time_on_fail(self, mock_time):
timestamp = int(time.time())
mock_time.time.return_value = timestamp
new_version = random.randint(0, 99)
self.mock_db.update_action.return_value = new_version
patch_doc = {'some_field': 'some_value',
'status': 'fail'}
self.mock_req.context['doc'] = patch_doc
expected_patch = patch_doc.copy()
expected_patch.update({"time_ended": timestamp})
expected_result = {'action_id': fake_action_0_action_id,
'patch': expected_patch,
'version': new_version}
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0_action_id)
self.mock_db.update_action.assert_called_with(
user_id=fake_action_0_user_id,
action_id=fake_action_0_action_id,
patch=expected_patch)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)

View File

View File

@ -43,35 +43,39 @@ class TypeManager(unittest.TestCase):
expected_q = [{'term': {'user_id': 'my_user_id'}},
{'query':
{'bool':
{'must':
{'must_not':
[],
'must':
[{'match': {'some_field': 'some text'}},
{'match': {'description': 'some other text'}}
]}}}]
]}}}]
self.assertEqual(q, expected_q)
def test_get_ok(self):
self.mock_es.get.return_value = fake_action_0_elasticsearch_found
res = self.type_manager.get(user_id=fake_action_0_user_id,
doc_id=fake_action_0_action_id)
self.assertEqual(res, fake_action_0_doc)
self.mock_es.get.return_value = fake_job_0_elasticsearch_found
res = self.type_manager.get(user_id=fake_job_0_user_id,
doc_id=fake_job_0_job_id)
self.assertEqual(res, fake_job_0)
def test_get_raise_DocumentNotFound_when_doc_not_found(self):
self.mock_es.get.side_effect = TransportError('regular test failure')
self.assertRaises(DocumentNotFound, self.type_manager.get,
user_id=fake_action_0_user_id,
doc_id=fake_action_0_action_id)
user_id=fake_job_0_user_id,
doc_id=fake_job_0_job_id)
def test_get_raise_StorageEngineError_when_db_raises(self):
self.mock_es.get.side_effect = Exception('regular test failure')
self.assertRaises(StorageEngineError, self.type_manager.get,
user_id=fake_action_0_user_id,
doc_id=fake_action_0_action_id)
user_id=fake_job_0_user_id,
doc_id=fake_job_0_job_id)
def test_get_raises_DocumentNotFound_when_user_id_not_match(self):
self.mock_es.get.return_value = fake_action_0_elasticsearch_found
self.assertRaises(DocumentNotFound, self.type_manager.get,
def test_get_raises_AccessForbidden_when_user_id_not_match(self):
self.mock_es.get.return_value = fake_job_0_elasticsearch_found
self.assertRaises(AccessForbidden, self.type_manager.get,
user_id='obluraschi',
doc_id=fake_action_0_action_id)
doc_id=fake_job_0_job_id)
def test_search_ok(self):
self.mock_es.search.return_value = fake_data_0_elasticsearch_hit
@ -79,12 +83,13 @@ class TypeManager(unittest.TestCase):
{'bool':
{'must':
[{'term': {'user_id': 'my_user_id'}},
{'query':
{'bool':
{'must':
{'query': {'bool':
{'must_not':
[],
'must':
[{'match': {'some_field': 'some text'}},
{'match': {'description': 'some other text'}}]}}}
]}}}}}
]}}}}}
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.type_manager.search(user_id='my_user_id', doc_id='mydocid', search=my_search, offset=7, limit=19)
@ -96,10 +101,10 @@ class TypeManager(unittest.TestCase):
self.assertRaises(StorageEngineError, self.type_manager.search, user_id='my_user_id', doc_id='mydocid')
def test_insert_ok(self):
self.mock_es.index.return_value = {'created': True}
self.mock_es.index.return_value = {'created': True, '_version': 15}
test_doc = {'test_key_412': 'test_value_412'}
res = self.type_manager.insert(doc=test_doc)
self.assertEqual(res, True)
self.assertEqual(res, (True, 15))
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc, id=None)
def test_insert_fails(self):
@ -135,13 +140,16 @@ class TestBackupManager(unittest.TestCase):
{'bool':
{'must':
[{'term': {'user_id': 'my_user_id'}},
{'query': {'bool': {'must': [{'match': {'backup_name': 'my_backup'}},
{'match': {'mode': 'fs'}}]}}},
{'query': {'bool': {
'must_not':
[],
'must':
[{'match': {'backup_name': 'my_backup'}},
{'match': {'mode': 'fs'}}]}}
},
{'term': {'backup_id': 'my_doc_id'}},
{'range': {'timestamp': {'gte': 1428510506}}},
{'range': {'timestamp': {'lte': 1428510506}}}
]}}}}}
{'range': {'timestamp': {'lte': 1428510506}}}]}}}}}
self.assertEqual(q, expected_q)
@ -161,63 +169,68 @@ class ClientTypeManager(unittest.TestCase):
[{'term': {'user_id': 'my_user_id'}},
{'query':
{'bool':
{'must':
{'must_not':
[],
'must':
[{'match': {'some_field': 'some text'}},
{'match': {'description': 'some other text'}}]}}},
{'term': {'client_id': 'my_doc_id'}}
]}}}}}
{'match': {'description': 'some other text'}}]}}
},
{'term': {'client_id': 'my_doc_id'}}
]}}}}}
self.assertEqual(q, expected_q)
class ActionTypeManager(unittest.TestCase):
class JobTypeManager(unittest.TestCase):
def setUp(self):
self.mock_es = Mock()
self.action_manager = elastic.ActionTypeManager(self.mock_es, 'clients')
self.job_manager = elastic.JobTypeManager(self.mock_es, 'clients')
def test_get_search_query(self):
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
q = self.action_manager.get_search_query('my_user_id', 'my_doc_id', search=my_search)
q = self.job_manager.get_search_query('my_user_id', 'my_doc_id', search=my_search)
expected_q = {'query': {'filtered': {'filter':
{'bool':
{'must':
[{'term': {'user_id': 'my_user_id'}},
{'query':
{'bool':
{'must':
{'must_not':
[],
'must':
[{'match': {'some_field': 'some text'}},
{'match': {'description': 'some other text'}}]}}},
{'term': {'action_id': 'my_doc_id'}}
]}}}}}
{'term': {'job_id': 'my_doc_id'}}
]}}}}}
self.assertEqual(q, expected_q)
def test_update_ok(self):
self.mock_es.update.return_value = {
u'_id': u'd6c1e00d-b9c1-4eb3-8219-1e83c02af101',
u'_index': u'freezer',
u'_type': u'actions',
u'_type': u'jobs',
u'_version': 3
}
res = self.action_manager.update(action_id=fake_action_0_action_id,
action_update_doc={'status': 'sleepy'})
res = self.job_manager.update(job_id=fake_job_0_job_id,
job_update_doc={'status': 'sleepy'})
self.assertEqual(res, 3)
self.mock_es.update.assert_called_with(index=self.action_manager.index,
doc_type=self.action_manager.doc_type,
id=fake_action_0_action_id,
self.mock_es.update.assert_called_with(index=self.job_manager.index,
doc_type=self.job_manager.doc_type,
id=fake_job_0_job_id,
body={"doc": {'status': 'sleepy'}})
def test_update_raise_DocumentNotFound_when_not_found(self):
self.mock_es.update.side_effect = TransportError('regular test failure')
self.assertRaises(DocumentNotFound, self.action_manager.update,
action_id=fake_action_0_action_id,
action_update_doc={'status': 'sleepy'})
self.assertRaises(DocumentNotFound, self.job_manager.update,
job_id=fake_job_0_job_id,
job_update_doc={'status': 'sleepy'})
def test_update_raise_StorageEngineError_when_db_raises(self):
self.mock_es.update.side_effect = Exception('regular test failure')
self.assertRaises(StorageEngineError, self.action_manager.update,
action_id=fake_action_0_action_id,
action_update_doc={'status': 'sleepy'})
self.assertRaises(StorageEngineError, self.job_manager.update,
job_id=fake_job_0_job_id,
job_update_doc={'status': 'sleepy'})
@ -318,14 +331,6 @@ class TestElasticSearchEngine_backup(unittest.TestCase):
user_name=fake_data_0_user_name,
doc=fake_data_0_backup_metadata)
def test_add_backup_raises_when_manager_insert_fails(self):
self.eng.backup_manager.search.return_value = []
self.eng.backup_manager.insert.return_value = False
self.assertRaises(StorageEngineError, self.eng.add_backup,
user_id=fake_data_0_user_id,
user_name=fake_data_0_user_name,
doc=fake_data_0_backup_metadata)
def test_delete_backup_ok(self):
self.eng.backup_manager.delete.return_value = fake_data_0_backup_id
res = self.eng.delete_backup(user_id=fake_data_0_user_id,
@ -420,13 +425,6 @@ class TestElasticSearchEngine_client(unittest.TestCase):
user_id=fake_data_0_user_id,
doc=fake_client_info_0)
def test_add_client_raises_when_manager_insert_fails_without_raise(self):
self.eng.client_manager.search.return_value = []
self.eng.client_manager.insert.return_value = False
self.assertRaises(StorageEngineError, self.eng.add_client,
user_id=fake_data_0_user_id,
doc=fake_client_info_0)
def test_delete_client_ok(self):
self.eng.client_manager.delete.return_value = fake_client_info_0['client_id']
res = self.eng.delete_client(user_id=fake_data_0_user_id,
@ -439,133 +437,135 @@ class TestElasticSearchEngine_client(unittest.TestCase):
user_id=fake_data_0_user_id,
client_id=fake_client_info_0['client_id'])
class TestElasticSearchEngine_action(unittest.TestCase):
class TestElasticSearchEngine_job(unittest.TestCase):
@patch('freezer_api.storage.elastic.logging')
@patch('freezer_api.storage.elastic.elasticsearch')
def setUp(self, mock_elasticsearch, mock_logging):
mock_elasticsearch.Elasticsearch.return_value = Mock()
self.eng = elastic.ElasticSearchEngine('http://elasticservaddr:1997')
self.eng.action_manager = Mock()
self.eng.job_manager = Mock()
def test_get_action_userid_and_action_id_return_doc(self):
self.eng.action_manager.get.return_value = fake_action_0_doc
res = self.eng.get_action(user_id=fake_client_entry_0['user_id'],
action_id=fake_client_info_0['client_id'])
self.assertEqual(res, fake_action_0_doc)
self.eng.action_manager.get.assert_called_with(
def test_get_job_userid_and_job_id_return_doc(self):
self.eng.job_manager.get.return_value = get_fake_job_0()
res = self.eng.get_job(user_id=fake_client_entry_0['user_id'],
job_id=fake_client_info_0['client_id'])
self.assertEqual(res, fake_job_0)
self.eng.job_manager.get.assert_called_with(
fake_client_entry_0['user_id'],
fake_client_info_0['client_id'])
def test_get_action_userid_and_action_id_return_none(self):
self.eng.action_manager.get.return_value = None
res = self.eng.get_action(user_id=fake_client_entry_0['user_id'],
action_id=fake_client_info_0['client_id'])
def test_get_job_userid_and_job_id_return_none(self):
self.eng.job_manager.get.return_value = None
res = self.eng.get_job(user_id=fake_client_entry_0['user_id'],
job_id=fake_client_info_0['client_id'])
self.assertEqual(res, None)
self.eng.action_manager.get.assert_called_with(
self.eng.job_manager.get.assert_called_with(
fake_client_entry_0['user_id'],
fake_client_info_0['client_id'])
def test_get_action_with_userid_and_search_return_list(self):
self.eng.action_manager.search.return_value = \
[fake_action_0_doc, fake_action_0_doc]
def test_get_job_with_userid_and_search_return_list(self):
self.eng.job_manager.search.return_value = \
[fake_job_0, fake_job_0]
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.search_action(user_id=fake_action_0_doc['user_id'],
res = self.eng.search_job(user_id=fake_job_0['user_id'],
offset=6, limit=15,
search=my_search)
self.assertEqual(res, [fake_action_0_doc, fake_action_0_doc])
self.eng.action_manager.search.assert_called_with(
fake_action_0_doc['user_id'],
self.assertEqual(res, [fake_job_0, fake_job_0])
self.eng.job_manager.search.assert_called_with(
fake_job_0['user_id'],
search=my_search,
limit=15, offset=6)
def test_get_action_with_userid_and_search_return_empty_list(self):
self.eng.action_manager.search.return_value = []
def test_get_job_with_userid_and_search_return_empty_list(self):
self.eng.job_manager.search.return_value = []
my_search = {'match': [{'some_field': 'some text'},
{'description': 'some other text'}]}
res = self.eng.search_action(user_id=fake_action_0_doc['user_id'],
res = self.eng.search_job(user_id=fake_job_0['user_id'],
offset=6, limit=15,
search=my_search)
self.assertEqual(res, [])
self.eng.action_manager.search.assert_called_with(
fake_action_0_doc['user_id'],
self.eng.job_manager.search.assert_called_with(
fake_job_0['user_id'],
search=my_search,
limit=15, offset=6)
def test_add_action_raises_BadDataFormat_when_data_is_malformed(self):
doc = fake_action_0.copy()
doc.pop('action_id')
self.assertRaises(BadDataFormat, self.eng.add_action,
user_id=fake_action_0_doc['user_id'],
doc=doc)
@patch('freezer_api.storage.elastic.JobDoc')
def test_add_job_ok(self, mock_jobdoc):
mock_jobdoc.create.return_value = get_fake_job_0()
self.eng.job_manager.insert.return_value = (True, 1)
res = self.eng.add_job(user_id=fake_job_0_user_id,
doc=get_fake_job_0())
self.assertEqual(res, fake_job_0_job_id)
self.eng.job_manager.insert.assert_called_with(fake_job_0,
fake_job_0_job_id)
def test_add_action_ok(self):
self.eng.action_manager.insert.return_value = fake_action_0_action_id
res = self.eng.add_action(user_id=fake_action_0_user_id,
doc=fake_action_0)
self.assertEqual(res, fake_action_0_action_id)
self.eng.action_manager.insert.assert_called_with(
{'action': fake_action_0,
'user_id': fake_action_0_user_id},
fake_action_0_action_id)
def test_add_job_raises_StorageEngineError_when_manager_insert_raises(self):
self.eng.job_manager.get.return_value = None
self.eng.job_manager.insert.side_effect = StorageEngineError('regular test failure')
self.assertRaises(StorageEngineError, self.eng.add_job,
user_id=fake_job_0_user_id,
doc=get_fake_job_0())
def test_add_action_raises_StorageEngineError_when_manager_insert_raises(self):
self.eng.action_manager.get.return_value = None
self.eng.action_manager.insert.side_effect = StorageEngineError('regular test failure')
self.assertRaises(StorageEngineError, self.eng.add_action,
user_id=fake_action_0_user_id,
doc=fake_action_0)
def test_add_action_raises_StorageEngineError_when_manager_insert_fails_without_raise(self):
self.eng.action_manager.get.return_value = None
self.eng.action_manager.insert.return_value = False
self.assertRaises(StorageEngineError, self.eng.add_action,
user_id=fake_action_0_user_id,
doc=fake_action_0)
def test_delete_action_ok(self):
self.eng.action_manager.delete.return_value = fake_action_0['action_id']
res = self.eng.delete_action(user_id=fake_action_0_user_id,
action_id=fake_action_0_action_id)
self.assertEqual(res, fake_action_0_action_id)
def test_delete_job_ok(self):
self.eng.job_manager.delete.return_value = fake_job_0['job_id']
res = self.eng.delete_job(user_id=fake_job_0_user_id,
job_id=fake_job_0_job_id)
self.assertEqual(res, fake_job_0_job_id)
def test_delete_client_raises_StorageEngineError_when_es_delete_raises(self):
self.eng.action_manager.delete.side_effect = StorageEngineError()
self.assertRaises(StorageEngineError, self.eng.delete_action,
user_id=fake_action_0_user_id,
action_id=fake_action_0_action_id)
self.eng.job_manager.delete.side_effect = StorageEngineError()
self.assertRaises(StorageEngineError, self.eng.delete_job,
user_id=fake_job_0_user_id,
job_id=fake_job_0_job_id)
def test_update_client_raises_BadDataFormat_when_update_has_action_id(self):
self.eng.action_manager.get.return_value = fake_action_0_doc
patch = {'action_id': 'butterfly_caught'}
self.assertRaises(BadDataFormat, self.eng.update_action,
user_id=fake_action_0_user_id,
action_id=fake_action_0_action_id,
patch=patch)
def test_update_job_raises_DocumentNotFound_when_doc_not_exists(self):
self.eng.job_manager.get.side_effect = DocumentNotFound('regular test failure')
patch = {'job_id': 'black_milk'}
self.assertRaises(DocumentNotFound, self.eng.update_job,
user_id=fake_job_0_user_id,
job_id=fake_job_0_job_id,
patch_doc=patch)
def test_update_action_raises_DocumentNotFound_when_doc_not_exists(self):
self.eng.action_manager.get.side_effect = DocumentNotFound('regular test failure')
patch = {'some_field': 'black_milk'}
self.assertRaises(DocumentNotFound, self.eng.update_action,
user_id=fake_action_0_user_id,
action_id=fake_action_0_action_id,
patch=patch)
def test_update_job_raises_DocumentNotFound_when_update_raises_DocumentNotFound(self):
self.eng.job_manager.get.return_value = get_fake_job_0()
patch = {'job_id': 'black_milk'}
self.eng.job_manager.update.side_effect = DocumentNotFound('regular test failure')
self.assertRaises(DocumentNotFound, self.eng.update_job,
user_id=fake_job_0_user_id,
job_id=fake_job_0_job_id,
patch_doc=patch)
def test_update_action_raises_DocumentNotFound_when_update_raises_DocumentNotFound(self):
self.eng.action_manager.get.return_value = fake_action_0_doc
patch = {'some_field': 'black_milk'}
self.eng.action_manager.update.side_effect = DocumentNotFound('regular test failure')
self.assertRaises(DocumentNotFound, self.eng.update_action,
user_id=fake_action_0_user_id,
action_id=fake_action_0_action_id,
patch=patch)
def test_update_job_returns_new_doc_version(self):
self.eng.job_manager.get.return_value = get_fake_job_0()
patch = {'job_id': 'group_four'}
self.eng.job_manager.update.return_value = 11
res = self.eng.update_job(user_id=fake_job_0_user_id,
job_id=fake_job_0_job_id,
patch_doc=patch)
self.assertEqual(res, 11)
def test_update_action_returns_new_doc_version(self):
self.eng.action_manager.get.return_value = fake_action_0_doc
patch = {'some_field': 'group_four'}
self.eng.action_manager.update.return_value = 11
res = self.eng.update_action(user_id=fake_action_0_user_id,
action_id=fake_action_0_action_id,
patch=patch)
self.assertEqual(res, 11)
def test_replace_job_raises_AccessForbidden_when_job_manager_raises_AccessForbidden(self):
self.eng.job_manager.get.side_effect = AccessForbidden('regular test failure')
self.eng.job_manager.insert.return_value = (True, 3)
self.assertRaises(AccessForbidden, self.eng.replace_job,
user_id=fake_job_0_user_id,
job_id=fake_job_0_job_id,
doc=get_fake_job_0())
def test_replace_job_returns_ok_when_doc_is_new(self):
self.eng.job_manager.get.side_effect = DocumentNotFound('regular test failure')
self.eng.job_manager.insert.return_value = (True, 1)
res = self.eng.replace_job(user_id=fake_job_0_user_id,
job_id=fake_job_0_job_id,
doc=get_fake_job_0())
self.assertEqual(res, 1)
def test_replace_job_returns_version_1_when_doc_is_overwritten(self):
self.eng.job_manager.get.return_value = get_fake_job_0()
self.eng.job_manager.insert.return_value = (False, 3)
res = self.eng.replace_job(user_id=fake_job_0_user_id,
job_id=fake_job_0_job_id,
doc=get_fake_job_0())
self.assertEqual(res, 3)

View File

@ -14,7 +14,6 @@ class TestExceptions(unittest.TestCase):
self.ex.message = 'test exception'
self.mock_req = Mock()
self.mock_req.context = {}
self.exceptions = [e() for e in exceptions.exception_handlers_catalog]
def test_FreezerAPIException(self):
e = exceptions.FreezerAPIException(message='testing')
@ -31,7 +30,6 @@ class TestExceptions(unittest.TestCase):
self.assertRaises(falcon.HTTPConflict,
e.handle, self.ex, self.mock_req, self.mock_req, None)
def test_StorageEngineError(self):
e = exceptions.StorageEngineError(message='testing')
self.assertRaises(falcon.HTTPInternalServerError,
@ -41,3 +39,8 @@ class TestExceptions(unittest.TestCase):
e = exceptions.DocumentNotFound(message='testing')
self.assertRaises(falcon.HTTPNotFound,
e.handle, self.ex, self.mock_req, self.mock_req, None)
def test_AccessForbidden(self):
e = exceptions.AccessForbidden(message='testing')
self.assertRaises(falcon.HTTPForbidden,
e.handle, self.ex, self.mock_req, self.mock_req, None)

127
tests/test_jobs.py Normal file
View File

@ -0,0 +1,127 @@
import unittest
from mock import Mock, patch
import random
import falcon
from common import *
from freezer_api.common.exceptions import *
from freezer_api.api.v1 import jobs as v1_jobs
class TestJobsCollectionResource(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = fake_job_0_user_id
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = v1_jobs.JobsCollectionResource(self.mock_db)
def test_on_get_return_empty_list(self):
self.mock_db.search_job.return_value = []
expected_result = {'jobs': []}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_get_return_correct_list(self):
self.mock_db.search_job.return_value = [get_fake_job_0(), get_fake_job_1()]
expected_result = {'jobs': [get_fake_job_0(), get_fake_job_1()]}
self.resource.on_get(self.mock_req, self.mock_req)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_post_raises_when_missing_body(self):
self.mock_db.add_job.return_value = fake_job_0_job_id
self.assertRaises(BadDataFormat, self.resource.on_post, self.mock_req, self.mock_req)
def test_on_post_inserts_correct_data(self):
job = get_fake_job_0()
self.mock_req.context['doc'] = job
self.mock_db.add_job.return_value = 'pjiofrdslaikfunr'
expected_result = {'job_id': 'pjiofrdslaikfunr'}
self.resource.on_post(self.mock_req, self.mock_req)
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
self.assertEqual(self.mock_req.context['result'], expected_result)
# assigned_job_id = self.mock_req.context['doc']['job_id']
# self.assertNotEqual(assigned_job_id, fake_job_0_job_id)
class TestJobsResource(unittest.TestCase):
def setUp(self):
self.mock_db = Mock()
self.mock_req = Mock()
self.mock_req.get_header.return_value = fake_job_0_user_id
self.mock_req.context = {}
self.mock_req.status = falcon.HTTP_200
self.resource = v1_jobs.JobsResource(self.mock_db)
def test_create_resource(self):
self.assertIsInstance(self.resource, v1_jobs.JobsResource)
def test_on_get_return_no_result_and_404_when_not_found(self):
self.mock_db.get_job.return_value = None
self.resource.on_get(self.mock_req, self.mock_req, fake_job_0_job_id)
self.assertNotIn('result', self.mock_req.context)
self.assertEqual(self.mock_req.status, falcon.HTTP_404)
def test_on_get_return_correct_data(self):
self.mock_db.get_job.return_value = get_fake_job_0()
self.resource.on_get(self.mock_req, self.mock_req, fake_job_0_job_id)
result = self.mock_req.context['result']
self.assertEqual(result, get_fake_job_0())
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
def test_on_delete_removes_proper_data(self):
self.resource.on_delete(self.mock_req, self.mock_req, fake_job_0_job_id)
result = self.mock_req.context['result']
expected_result = {'job_id': fake_job_0_job_id}
self.assertEquals(self.mock_req.status, falcon.HTTP_204)
self.assertEqual(result, expected_result)
def test_on_patch_ok_with_some_fields(self):
new_version = random.randint(0, 99)
self.mock_db.update_job.return_value = new_version
patch_doc = {'some_field': 'some_value',
'because': 'size_matters'}
self.mock_req.context['doc'] = patch_doc
expected_patch = patch_doc.copy()
expected_result = {'job_id': fake_job_0_job_id,
'version': new_version}
self.resource.on_patch(self.mock_req, self.mock_req, fake_job_0_job_id)
self.mock_db.update_job.assert_called_with(
user_id=fake_job_0_user_id,
job_id=fake_job_0_job_id,
patch_doc=patch_doc)
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
result = self.mock_req.context['result']
self.assertEqual(result, expected_result)
def test_on_post_ok(self):
new_version = random.randint(0, 99)
self.mock_db.replace_job.return_value = new_version
job = get_fake_job_0()
self.mock_req.context['doc'] = job
expected_result = {'job_id': fake_job_0_job_id,
'version': new_version}
self.resource.on_post(self.mock_req, self.mock_req, fake_job_0_job_id)
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
self.assertEqual(self.mock_req.context['result'], expected_result)
def test_on_post_raises_when_db_replace_job_raises(self):
self.mock_db.replace_job.side_effect = AccessForbidden('regular test failure')
job = get_fake_job_0()
self.mock_req.context['doc'] = job
self.assertRaises(AccessForbidden, self.resource.on_post,
self.mock_req,
self.mock_req,
fake_job_0_job_id)

View File

@ -20,10 +20,13 @@ Hudson (tjh@cryptsoft.com).
========================================================================
"""
import unittest
from mock import Mock, patch
from freezer_api.common import utils
from freezer_api.common.exceptions import *
from common import *
DATA_backup_metadata = {
"container": "freezer_container",
@ -109,3 +112,75 @@ class TestBackupMetadataDoc(unittest.TestCase):
self.assertEqual(self.backup_metadata.backup_id, DATA_backup_id)
self.backup_metadata.data['container'] = 'different'
self.assertNotEqual(self.backup_metadata.backup_id, DATA_backup_id)
class TestJobDoc(unittest.TestCase):
def test_validate_ok_when_data_ok(self):
job_doc = get_fake_job_0()
res = utils.JobDoc.validate(job_doc)
self.assertIsNone(res)
def test_validate_raises_BadDataFormat_when_doc_has_no_jobid(self):
job_doc = get_fake_job_0()
job_doc.pop('job_id')
self.assertRaises(BadDataFormat, utils.JobDoc.validate, job_doc)
def test_validate_raises_BadDataFormat_when_doc_has_no_userid(self):
job_doc = get_fake_job_0()
job_doc.pop('job_id')
self.assertRaises(BadDataFormat, utils.JobDoc.validate, job_doc)
def test_validate_raises_BadDataFormat_when_doc_has_invalid_state(self):
job_doc = get_fake_job_0()
job_doc['job_schedule']['status'] = 'not_valid'
self.assertRaises(BadDataFormat, utils.JobDoc.validate, job_doc)
def test_validate_patch_raises_when_doc_has_invalid_state(self):
job_doc = get_fake_job_0()
job_doc['job_schedule']['status'] = 'value_no_allowed'
self.assertRaises(BadDataFormat, utils.JobDoc.validate_patch, job_doc)
def test_createpatch_pops_jobid_and_userid(self):
job_doc = get_fake_job_0()
res_doc = utils.JobDoc.create_patch(job_doc)
self.assertFalse('job_id' in res_doc)
self.assertFalse('user_id' in res_doc)
def test_createpatch_raises_BadDataFormat_when_patch_has_invalid_state(self):
job_doc = get_fake_job_0()
job_doc['job_schedule']['status'] = 'value_no_allowed'
self.assertRaises(BadDataFormat, utils.JobDoc.create_patch, job_doc)
def test_createpatch_raises_BadDataFormat_when_patch_has_invalid_event(self):
job_doc = get_fake_job_0()
job_doc['job_schedule']['event'] = 'value_no_allowed'
self.assertRaises(BadDataFormat, utils.JobDoc.create_patch, job_doc)
@patch('freezer_api.common.utils.uuid')
@patch('freezer_api.common.utils.time')
def test_create_inserts_correct_uuid_timecreated_status(self, mock_time, mock_uuid):
mock_time.time.return_value = 1433611337
mock_uuid.uuid4.return_value = mock_uuid
mock_uuid.hex = 'hotforteacher'
job_doc = get_fake_job_0()
job_doc['job_schedule']['status'] = 'stop'
res_doc = utils.JobDoc.create(job_doc, 'dude')
self.assertEqual(res_doc['job_schedule']['time_created'], 1433611337)
self.assertEqual(res_doc['job_schedule']['time_started'], -1)
self.assertEqual(res_doc['job_schedule']['time_ended'], -1)
self.assertEqual(res_doc['job_schedule']['status'], 'stop')
self.assertEqual(res_doc['user_id'], 'dude')
self.assertEqual(res_doc['job_id'], 'hotforteacher')
@patch('freezer_api.common.utils.uuid')
@patch('freezer_api.common.utils.time')
def test_create_raises_BadDataFormat_when_isvalid_fails(self, mock_time, mock_uuid):
mock_time.time.return_value = 1433611337
mock_uuid.uuid4.return_value = mock_uuid
mock_uuid.hex = 'hotforteacher'
job_doc = get_fake_job_0()
job_doc['job_schedule']['event'] = 'not_valid'
self.assertRaises(BadDataFormat, utils.JobDoc.create, job_doc, 'dude')