summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJeremy Freudberg <jeremyfreudberg@gmail.com>2019-01-09 19:24:34 -0500
committerTelles Nobrega <tellesnobrega@gmail.com>2019-01-10 10:01:04 -0300
commit69d74c1a66d1eab260caaa1017ecdbd446cf5263 (patch)
tree09a8d8f93f43a42d88d563fefef0f8a681c3a571
parent71efb19ebc59743f0cb82923a11083ef7c9bef5f (diff)
Some polish for APIv2
- update_keypair now only in v2 schema - tenant_id->project_id in cluster provision steps - tenant_id->project_id in referenced job binaries in job templates - proper check for job template existence, to fail early (as intended) - hadoop_version->plugin_version for query string filter - unbreak some data source stuff (related to tenant_id->project_id) - fix omission of show_progress from cluster GET query string whitelist - job_id->job_template_id for v2 jobs - add missing release note info for strict query string checking - release notes for all the rest Change-Id: Idea117c406b5ab9b8d85ccf8adb175053416d6ff Story: 2004505 Task: 28822
Notes
Notes (review): Code-Review+2: Telles Mota Vidal Nóbrega <tenobreg@redhat.com> Code-Review+2: Luigi Toscano <ltoscano@redhat.com> Workflow+1: Luigi Toscano <ltoscano@redhat.com> Verified+2: Zuul Submitted-by: Zuul Submitted-at: Thu, 10 Jan 2019 18:24:33 +0000 Reviewed-on: https://review.openstack.org/629747 Project: openstack/sahara Branch: refs/heads/master
-rw-r--r--releasenotes/notes/some-polish-api-v2-2d2e390a74b088f9.yaml12
-rw-r--r--releasenotes/notes/strict-validation-query-string-a6cadbf2f9c57d06.yaml5
-rw-r--r--sahara/api/v2/cluster_templates.py8
-rw-r--r--sahara/api/v2/clusters.py23
-rw-r--r--sahara/api/v2/data_sources.py8
-rw-r--r--sahara/api/v2/job_templates.py22
-rw-r--r--sahara/api/v2/job_types.py9
-rw-r--r--sahara/api/v2/jobs.py9
-rw-r--r--sahara/api/v2/node_group_templates.py8
-rw-r--r--sahara/service/validations/clusters_schema.py8
10 files changed, 90 insertions, 22 deletions
diff --git a/releasenotes/notes/some-polish-api-v2-2d2e390a74b088f9.yaml b/releasenotes/notes/some-polish-api-v2-2d2e390a74b088f9.yaml
new file mode 100644
index 0000000..b6f8a49
--- /dev/null
+++ b/releasenotes/notes/some-polish-api-v2-2d2e390a74b088f9.yaml
@@ -0,0 +1,12 @@
1---
2other:
3 - Some polishings to APIv2 have been made in an effort to bring it from
4 experimental (and therefore, evolving and unpredictable) to stable. More
5 instances of `tenant_id` have been changed to `project_id`, in the
6 cluster and job template APIs. `job_id` was changed to `job_template_id`
7 in the job API. The newly-minted query string validation feature has been
8 fixed to allow `show_progress` as a parameter on cluster GET; on a similar
9 note some APIv2 endpoints which previously could be filtered by
10 `hadoop_version` are now filtered by `plugin_version` instead. Also, the
11 schema for cluster PATCH in APIv1.1 now no longer includes the key
12 `update_keypair`; its prior inclusion was a mistake.
diff --git a/releasenotes/notes/strict-validation-query-string-a6cadbf2f9c57d06.yaml b/releasenotes/notes/strict-validation-query-string-a6cadbf2f9c57d06.yaml
new file mode 100644
index 0000000..64eac63
--- /dev/null
+++ b/releasenotes/notes/strict-validation-query-string-a6cadbf2f9c57d06.yaml
@@ -0,0 +1,5 @@
1---
2other:
3 - In APIv2 there is now strict checking of parameters in the query string.
4 This means that unexpected values in the query string will give a 400
5 error (as opposed to previously being ignored, or causing a 500 error).
diff --git a/sahara/api/v2/cluster_templates.py b/sahara/api/v2/cluster_templates.py
index bfcc895..b7ca523 100644
--- a/sahara/api/v2/cluster_templates.py
+++ b/sahara/api/v2/cluster_templates.py
@@ -29,9 +29,13 @@ rest = u.RestV2('cluster-templates', __name__)
29@v.check_exists(api.get_cluster_template, 'marker') 29@v.check_exists(api.get_cluster_template, 'marker')
30@v.validate(None, v.validate_pagination_limit, 30@v.validate(None, v.validate_pagination_limit,
31 v.validate_sorting_cluster_templates) 31 v.validate_sorting_cluster_templates)
32@v.validate_request_params(['plugin_name', 'hadoop_version', 'name']) 32@v.validate_request_params(['plugin_name', 'plugin_version', 'name'])
33def cluster_templates_list(): 33def cluster_templates_list():
34 result = api.get_cluster_templates(**u.get_request_args().to_dict()) 34 request_args = u.get_request_args().to_dict()
35 if 'plugin_version' in request_args:
36 request_args['hadoop_version'] = request_args['plugin_version']
37 del request_args['plugin_version']
38 result = api.get_cluster_templates(**request_args)
35 for ct in result: 39 for ct in result:
36 u._replace_hadoop_version_plugin_version(ct) 40 u._replace_hadoop_version_plugin_version(ct)
37 u._replace_tenant_id_project_id(ct) 41 u._replace_tenant_id_project_id(ct)
diff --git a/sahara/api/v2/clusters.py b/sahara/api/v2/clusters.py
index 007e441..649dd63 100644
--- a/sahara/api/v2/clusters.py
+++ b/sahara/api/v2/clusters.py
@@ -27,16 +27,28 @@ import sahara.utils.api as u
27rest = u.RestV2('clusters', __name__) 27rest = u.RestV2('clusters', __name__)
28 28
29 29
30def _replace_tenant_id_project_id_provision_steps(c):
31 if 'provision_progress' in c:
32 for step in c['provision_progress']:
33 dict.update(step, {'project_id': step['tenant_id']})
34 dict.pop(step, 'tenant_id')
35
36
30@rest.get('/clusters') 37@rest.get('/clusters')
31@acl.enforce("data-processing:clusters:get_all") 38@acl.enforce("data-processing:clusters:get_all")
32@v.check_exists(api.get_cluster, 'marker') 39@v.check_exists(api.get_cluster, 'marker')
33@v.validate(None, v.validate_pagination_limit) 40@v.validate(None, v.validate_pagination_limit)
34@v.validate_request_params(['plugin_name', 'hadoop_version', 'name']) 41@v.validate_request_params(['plugin_name', 'plugin_version', 'name'])
35def clusters_list(): 42def clusters_list():
36 result = api.get_clusters(**u.get_request_args().to_dict()) 43 request_args = u.get_request_args().to_dict()
44 if 'plugin_version' in request_args:
45 request_args['hadoop_version'] = request_args['plugin_version']
46 del request_args['plugin_version']
47 result = api.get_clusters(**request_args)
37 for c in result: 48 for c in result:
38 u._replace_hadoop_version_plugin_version(c) 49 u._replace_hadoop_version_plugin_version(c)
39 u._replace_tenant_id_project_id(c) 50 u._replace_tenant_id_project_id(c)
51 _replace_tenant_id_project_id_provision_steps(c)
40 return u.render(res=result, name='clusters') 52 return u.render(res=result, name='clusters')
41 53
42 54
@@ -73,13 +85,14 @@ def clusters_scale(cluster_id, data):
73 api.scale_cluster, cluster_id, data) 85 api.scale_cluster, cluster_id, data)
74 u._replace_hadoop_version_plugin_version(result['cluster']) 86 u._replace_hadoop_version_plugin_version(result['cluster'])
75 u._replace_tenant_id_project_id(result['cluster']) 87 u._replace_tenant_id_project_id(result['cluster'])
88 _replace_tenant_id_project_id_provision_steps(result['cluster'])
76 return u.render(result) 89 return u.render(result)
77 90
78 91
79@rest.get('/clusters/<cluster_id>') 92@rest.get('/clusters/<cluster_id>')
80@acl.enforce("data-processing:clusters:get") 93@acl.enforce("data-processing:clusters:get")
81@v.check_exists(api.get_cluster, 'cluster_id') 94@v.check_exists(api.get_cluster, 'cluster_id')
82@v.validate_request_params([]) 95@v.validate_request_params(['show_progress'])
83def clusters_get(cluster_id): 96def clusters_get(cluster_id):
84 data = u.get_request_args() 97 data = u.get_request_args()
85 show_events = six.text_type( 98 show_events = six.text_type(
@@ -88,19 +101,21 @@ def clusters_get(cluster_id):
88 api.get_cluster, cluster_id, show_events) 101 api.get_cluster, cluster_id, show_events)
89 u._replace_hadoop_version_plugin_version(result['cluster']) 102 u._replace_hadoop_version_plugin_version(result['cluster'])
90 u._replace_tenant_id_project_id(result['cluster']) 103 u._replace_tenant_id_project_id(result['cluster'])
104 _replace_tenant_id_project_id_provision_steps(result['cluster'])
91 return u.render(result) 105 return u.render(result)
92 106
93 107
94@rest.patch('/clusters/<cluster_id>') 108@rest.patch('/clusters/<cluster_id>')
95@acl.enforce("data-processing:clusters:modify") 109@acl.enforce("data-processing:clusters:modify")
96@v.check_exists(api.get_cluster, 'cluster_id') 110@v.check_exists(api.get_cluster, 'cluster_id')
97@v.validate(v_c_schema.CLUSTER_UPDATE_SCHEMA, v_c.check_cluster_update) 111@v.validate(v_c_schema.CLUSTER_UPDATE_SCHEMA_V2, v_c.check_cluster_update)
98@v.validate_request_params([]) 112@v.validate_request_params([])
99def clusters_update(cluster_id, data): 113def clusters_update(cluster_id, data):
100 result = u.to_wrapped_dict_no_render( 114 result = u.to_wrapped_dict_no_render(
101 api.update_cluster, cluster_id, data) 115 api.update_cluster, cluster_id, data)
102 u._replace_hadoop_version_plugin_version(result['cluster']) 116 u._replace_hadoop_version_plugin_version(result['cluster'])
103 u._replace_tenant_id_project_id(result['cluster']) 117 u._replace_tenant_id_project_id(result['cluster'])
118 _replace_tenant_id_project_id_provision_steps(result['cluster'])
104 return u.render(result) 119 return u.render(result)
105 120
106 121
diff --git a/sahara/api/v2/data_sources.py b/sahara/api/v2/data_sources.py
index 476254a..4d56402 100644
--- a/sahara/api/v2/data_sources.py
+++ b/sahara/api/v2/data_sources.py
@@ -52,9 +52,9 @@ def data_source_register(data):
52@v.check_exists(api.get_data_source, 'data_source_id') 52@v.check_exists(api.get_data_source, 'data_source_id')
53@v.validate_request_params([]) 53@v.validate_request_params([])
54def data_source_get(data_source_id): 54def data_source_get(data_source_id):
55 result = u.to_wrapped_dict(api.get_data_source, data_source_id) 55 result = api.get_data_source(data_source_id).to_wrapped_dict()
56 u._replace_tenant_id_project_id(result['data_source']) 56 u._replace_tenant_id_project_id(result['data_source'])
57 return result 57 return u.render(result)
58 58
59 59
60@rest.delete('/data-sources/<data_source_id>') 60@rest.delete('/data-sources/<data_source_id>')
@@ -72,6 +72,6 @@ def data_source_delete(data_source_id):
72@v.validate(v_d_s_schema.DATA_SOURCE_UPDATE_SCHEMA) 72@v.validate(v_d_s_schema.DATA_SOURCE_UPDATE_SCHEMA)
73@v.validate_request_params([]) 73@v.validate_request_params([])
74def data_source_update(data_source_id, data): 74def data_source_update(data_source_id, data):
75 result = u.to_wrapped_dict(api.data_source_update, data_source_id, data) 75 result = api.data_source_update(data_source_id, data).to_wrapped_dict()
76 u._replace_tenant_id_project_id(result['data_source']) 76 u._replace_tenant_id_project_id(result['data_source'])
77 return result 77 return u.render(result)
diff --git a/sahara/api/v2/job_templates.py b/sahara/api/v2/job_templates.py
index 6ac5ebc..f9d5b1a 100644
--- a/sahara/api/v2/job_templates.py
+++ b/sahara/api/v2/job_templates.py
@@ -24,9 +24,15 @@ import sahara.utils.api as u
24rest = u.RestV2('job-templates', __name__) 24rest = u.RestV2('job-templates', __name__)
25 25
26 26
27def _replace_tenant_id_project_id_job_binary(jb_list):
28 for jb_obj in jb_list:
29 dict.update(jb_obj, {'project_id': jb_obj['tenant_id']})
30 dict.pop(jb_obj, 'tenant_id')
31
32
27@rest.get('/job-templates') 33@rest.get('/job-templates')
28@acl.enforce("data-processing:job-templates:get_all") 34@acl.enforce("data-processing:job-templates:get_all")
29@v.check_exists(api.get_job_templates, 'marker') 35@v.check_exists(api.get_job_template, 'marker')
30@v.validate(None, v.validate_pagination_limit, 36@v.validate(None, v.validate_pagination_limit,
31 v.validate_sorting_jobs) 37 v.validate_sorting_jobs)
32@v.validate_request_params(['type', 'name']) 38@v.validate_request_params(['type', 'name'])
@@ -34,6 +40,8 @@ def job_templates_list():
34 result = api.get_job_templates(**u.get_request_args().to_dict()) 40 result = api.get_job_templates(**u.get_request_args().to_dict())
35 for jt in result: 41 for jt in result:
36 u._replace_tenant_id_project_id(jt) 42 u._replace_tenant_id_project_id(jt)
43 _replace_tenant_id_project_id_job_binary(jt['mains'])
44 _replace_tenant_id_project_id_job_binary(jt['libs'])
37 return u.render(res=result, name='job_templates') 45 return u.render(res=result, name='job_templates')
38 46
39 47
@@ -44,35 +52,41 @@ def job_templates_list():
44def job_templates_create(data): 52def job_templates_create(data):
45 result = {'job_template': api.create_job_template(data).to_dict()} 53 result = {'job_template': api.create_job_template(data).to_dict()}
46 u._replace_tenant_id_project_id(result['job_template']) 54 u._replace_tenant_id_project_id(result['job_template'])
55 _replace_tenant_id_project_id_job_binary(result['job_template']['mains'])
56 _replace_tenant_id_project_id_job_binary(result['job_template']['libs'])
47 return u.render(result) 57 return u.render(result)
48 58
49 59
50@rest.get('/job-templates/<job_templates_id>') 60@rest.get('/job-templates/<job_templates_id>')
51@acl.enforce("data-processing:job-templates:get") 61@acl.enforce("data-processing:job-templates:get")
52@v.check_exists(api.get_job_templates, id='job_templates_id') 62@v.check_exists(api.get_job_template, id='job_templates_id')
53@v.validate_request_params([]) 63@v.validate_request_params([])
54def job_templates_get(job_templates_id): 64def job_templates_get(job_templates_id):
55 result = {'job_template': api.get_job_template( 65 result = {'job_template': api.get_job_template(
56 job_templates_id).to_dict()} 66 job_templates_id).to_dict()}
57 u._replace_tenant_id_project_id(result['job_template']) 67 u._replace_tenant_id_project_id(result['job_template'])
68 _replace_tenant_id_project_id_job_binary(result['job_template']['mains'])
69 _replace_tenant_id_project_id_job_binary(result['job_template']['libs'])
58 return u.render(result) 70 return u.render(result)
59 71
60 72
61@rest.patch('/job-templates/<job_templates_id>') 73@rest.patch('/job-templates/<job_templates_id>')
62@acl.enforce("data-processing:jobs:modify") 74@acl.enforce("data-processing:jobs:modify")
63@v.check_exists(api.get_job_templates, id='job_templates_id') 75@v.check_exists(api.get_job_template, id='job_templates_id')
64@v.validate(v_j_schema.JOB_UPDATE_SCHEMA) 76@v.validate(v_j_schema.JOB_UPDATE_SCHEMA)
65@v.validate_request_params([]) 77@v.validate_request_params([])
66def job_templates_update(job_templates_id, data): 78def job_templates_update(job_templates_id, data):
67 result = {'job_template': api.update_job_template( 79 result = {'job_template': api.update_job_template(
68 job_templates_id, data).to_dict()} 80 job_templates_id, data).to_dict()}
69 u._replace_tenant_id_project_id(result['job_template']) 81 u._replace_tenant_id_project_id(result['job_template'])
82 _replace_tenant_id_project_id_job_binary(result['job_template']['mains'])
83 _replace_tenant_id_project_id_job_binary(result['job_template']['libs'])
70 return u.render(result) 84 return u.render(result)
71 85
72 86
73@rest.delete('/job-templates/<job_templates_id>') 87@rest.delete('/job-templates/<job_templates_id>')
74@acl.enforce("data-processing:jobs:delete") 88@acl.enforce("data-processing:jobs:delete")
75@v.check_exists(api.get_job_templates, id='job_templates_id') 89@v.check_exists(api.get_job_template, id='job_templates_id')
76@v.validate_request_params([]) 90@v.validate_request_params([])
77def job_templates_delete(job_templates_id): 91def job_templates_delete(job_templates_id):
78 api.delete_job_template(job_templates_id) 92 api.delete_job_template(job_templates_id)
diff --git a/sahara/api/v2/job_types.py b/sahara/api/v2/job_types.py
index 5c81416..c0cf914 100644
--- a/sahara/api/v2/job_types.py
+++ b/sahara/api/v2/job_types.py
@@ -24,10 +24,13 @@ rest = u.RestV2('job-types', __name__)
24 24
25@rest.get('/job-types') 25@rest.get('/job-types')
26@acl.enforce("data-processing:job-types:get_all") 26@acl.enforce("data-processing:job-types:get_all")
27@v.validate_request_params(['type', 'plugin_name', 'hadoop_version']) 27@v.validate_request_params(['type', 'plugin_name', 'plugin_version'])
28def job_types_get(): 28def job_types_get():
29 # We want to use flat=False with to_dict() so that 29 # We want to use flat=False with to_dict() so that
30 # the value of each arg is given as a list. This supports 30 # the value of each arg is given as a list. This supports
31 # filters of the form ?type=Pig&type=Java, etc. 31 # filters of the form ?type=Pig&type=Java, etc.
32 return u.render(job_types=api.get_job_types( 32 request_args = u.get_request_args().to_dict(flat=False)
33 **u.get_request_args().to_dict(flat=False))) 33 if 'plugin_version' in request_args:
34 request_args['hadoop_version'] = request_args['plugin_version']
35 del request_args['plugin_version']
36 return u.render(job_types=api.get_job_types(**request_args))
diff --git a/sahara/api/v2/jobs.py b/sahara/api/v2/jobs.py
index 7db59c1..1ba1ad9 100644
--- a/sahara/api/v2/jobs.py
+++ b/sahara/api/v2/jobs.py
@@ -26,6 +26,11 @@ import sahara.utils.api as u
26rest = u.RestV2('jobs', __name__) 26rest = u.RestV2('jobs', __name__)
27 27
28 28
29def _replace_job_id_job_template_id(job_obj):
30 dict.update(job_obj, {'job_template_id': job_obj['job_id']})
31 dict.pop(job_obj, 'job_id')
32
33
29@rest.get('/jobs') 34@rest.get('/jobs')
30@acl.enforce("data-processing:job-executions:get_all") 35@acl.enforce("data-processing:job-executions:get_all")
31@v.check_exists(api.get_job_execution, 'marker') 36@v.check_exists(api.get_job_execution, 'marker')
@@ -40,6 +45,7 @@ def jobs_list():
40 for je in result: 45 for je in result:
41 je.pop('oozie_job_id', force=True) 46 je.pop('oozie_job_id', force=True)
42 u._replace_tenant_id_project_id(je) 47 u._replace_tenant_id_project_id(je)
48 _replace_job_id_job_template_id(je)
43 return u.render(res=result, name='jobs') 49 return u.render(res=result, name='jobs')
44 50
45 51
@@ -53,6 +59,7 @@ def jobs_execute(data):
53 {'engine_job_id': result['job']['oozie_job_id']}) 59 {'engine_job_id': result['job']['oozie_job_id']})
54 dict.pop(result['job'], 'oozie_job_id') 60 dict.pop(result['job'], 'oozie_job_id')
55 u._replace_tenant_id_project_id(result['job']) 61 u._replace_tenant_id_project_id(result['job'])
62 _replace_job_id_job_template_id(result['job'])
56 return u.render(result) 63 return u.render(result)
57 64
58 65
@@ -67,6 +74,7 @@ def jobs_get(job_id):
67 result = {'job': api.get_job_execution(job_id, refresh_status)} 74 result = {'job': api.get_job_execution(job_id, refresh_status)}
68 result['job'].pop('oozie_job_id', force=True) 75 result['job'].pop('oozie_job_id', force=True)
69 u._replace_tenant_id_project_id(result['job']) 76 u._replace_tenant_id_project_id(result['job'])
77 _replace_job_id_job_template_id(result['job'])
70 return u.render(result) 78 return u.render(result)
71 79
72 80
@@ -80,6 +88,7 @@ def jobs_update(job_id, data):
80 result = {'job': api.update_job_execution(job_id, data)} 88 result = {'job': api.update_job_execution(job_id, data)}
81 result['job'].pop('oozie_job_id', force=True) 89 result['job'].pop('oozie_job_id', force=True)
82 u._replace_tenant_id_project_id(result['job']) 90 u._replace_tenant_id_project_id(result['job'])
91 _replace_job_id_job_template_id(result['job'])
83 return u.render(result) 92 return u.render(result)
84 93
85 94
diff --git a/sahara/api/v2/node_group_templates.py b/sahara/api/v2/node_group_templates.py
index 00cc435..42498ff 100644
--- a/sahara/api/v2/node_group_templates.py
+++ b/sahara/api/v2/node_group_templates.py
@@ -30,9 +30,13 @@ rest = u.RestV2('node-group-templates', __name__)
30@v.check_exists(api.get_node_group_template, 'marker') 30@v.check_exists(api.get_node_group_template, 'marker')
31@v.validate(None, v.validate_pagination_limit, 31@v.validate(None, v.validate_pagination_limit,
32 v.validate_sorting_node_group_templates) 32 v.validate_sorting_node_group_templates)
33@v.validate_request_params(['plugin_name', 'hadoop_version', 'name']) 33@v.validate_request_params(['plugin_name', 'plugin_version', 'name'])
34def node_group_templates_list(): 34def node_group_templates_list():
35 result = api.get_node_group_templates(**u.get_request_args().to_dict()) 35 request_args = u.get_request_args().to_dict()
36 if 'plugin_version' in request_args:
37 request_args['hadoop_version'] = request_args['plugin_version']
38 del request_args['plugin_version']
39 result = api.get_node_group_templates(**request_args)
36 for ngt in result: 40 for ngt in result:
37 u._replace_hadoop_version_plugin_version(ngt) 41 u._replace_hadoop_version_plugin_version(ngt)
38 u._replace_tenant_id_project_id(ngt) 42 u._replace_tenant_id_project_id(ngt)
diff --git a/sahara/service/validations/clusters_schema.py b/sahara/service/validations/clusters_schema.py
index 77e0419..904bb31 100644
--- a/sahara/service/validations/clusters_schema.py
+++ b/sahara/service/validations/clusters_schema.py
@@ -71,9 +71,6 @@ CLUSTER_UPDATE_SCHEMA = {
71 "description": { 71 "description": {
72 "type": ["string", "null"] 72 "type": ["string", "null"]
73 }, 73 },
74 "update_keypair": {
75 "type": ["boolean", "null"]
76 },
77 "name": { 74 "name": {
78 "type": "string", 75 "type": "string",
79 "minLength": 1, 76 "minLength": 1,
@@ -99,6 +96,11 @@ CLUSTER_UPDATE_SCHEMA = {
99 "additionalProperties": False, 96 "additionalProperties": False,
100 "required": [] 97 "required": []
101} 98}
99CLUSTER_UPDATE_SCHEMA_V2 = copy.deepcopy(CLUSTER_UPDATE_SCHEMA)
100CLUSTER_UPDATE_SCHEMA_V2['properties'].update({
101 "update_keypair": {
102 "type": ["boolean", "null"]
103 }})
102 104
103CLUSTER_SCALING_SCHEMA = { 105CLUSTER_SCALING_SCHEMA = {
104 "type": "object", 106 "type": "object",