Some polish for APIv2

- update_keypair now only in v2 schema
- tenant_id->project_id in cluster provision steps
- tenant_id->project_id in referenced job binaries in job templates
- proper check for job template existence, to fail early (as intended)
- hadoop_version->plugin_version for query string filter
- unbreak some data source stuff (related to tenant_id->project_id)
- fix omission of show_progress from cluster GET query string whitelist
- job_id->job_template_id for v2 jobs
- add missing release note info for strict query string checking
- release notes for all the rest

Change-Id: Idea117c406b5ab9b8d85ccf8adb175053416d6ff
Story: 2004505
Task: 28822
This commit is contained in:
Jeremy Freudberg 2019-01-09 19:24:34 -05:00 committed by Telles Nobrega
parent 71efb19ebc
commit 69d74c1a66
10 changed files with 90 additions and 22 deletions

View File

@ -0,0 +1,12 @@
---
other:
- Some polishings to APIv2 have been made in an effort to bring it from
experimental (and therefore, evolving and unpredictable) to stable. More
instances of `tenant_id` have been changed to `project_id`, in the
cluster and job template APIs. `job_id` was changed to `job_template_id`
in the job API. The newly-minted query string validation feature has been
fixed to allow `show_progress` as a parameter on cluster GET; on a similar
note some APIv2 endpoints which previously could be filtered by
`hadoop_version` are now filtered by `plugin_version` instead. Also, the
schema for cluster PATCH in APIv1.1 now no longer includes the key
`update_keypair`; its prior inclusion was a mistake.

View File

@ -0,0 +1,5 @@
---
other:
- In APIv2 there is now strict checking of parameters in the query string.
This means that unexpected values in the query string will give a 400
error (as opposed to previously being ignored, or causing a 500 error).

View File

@ -29,9 +29,13 @@ rest = u.RestV2('cluster-templates', __name__)
@v.check_exists(api.get_cluster_template, 'marker')
@v.validate(None, v.validate_pagination_limit,
v.validate_sorting_cluster_templates)
@v.validate_request_params(['plugin_name', 'hadoop_version', 'name'])
@v.validate_request_params(['plugin_name', 'plugin_version', 'name'])
def cluster_templates_list():
result = api.get_cluster_templates(**u.get_request_args().to_dict())
request_args = u.get_request_args().to_dict()
if 'plugin_version' in request_args:
request_args['hadoop_version'] = request_args['plugin_version']
del request_args['plugin_version']
result = api.get_cluster_templates(**request_args)
for ct in result:
u._replace_hadoop_version_plugin_version(ct)
u._replace_tenant_id_project_id(ct)

View File

@ -27,16 +27,28 @@ import sahara.utils.api as u
rest = u.RestV2('clusters', __name__)
def _replace_tenant_id_project_id_provision_steps(c):
if 'provision_progress' in c:
for step in c['provision_progress']:
dict.update(step, {'project_id': step['tenant_id']})
dict.pop(step, 'tenant_id')
@rest.get('/clusters')
@acl.enforce("data-processing:clusters:get_all")
@v.check_exists(api.get_cluster, 'marker')
@v.validate(None, v.validate_pagination_limit)
@v.validate_request_params(['plugin_name', 'hadoop_version', 'name'])
@v.validate_request_params(['plugin_name', 'plugin_version', 'name'])
def clusters_list():
result = api.get_clusters(**u.get_request_args().to_dict())
request_args = u.get_request_args().to_dict()
if 'plugin_version' in request_args:
request_args['hadoop_version'] = request_args['plugin_version']
del request_args['plugin_version']
result = api.get_clusters(**request_args)
for c in result:
u._replace_hadoop_version_plugin_version(c)
u._replace_tenant_id_project_id(c)
_replace_tenant_id_project_id_provision_steps(c)
return u.render(res=result, name='clusters')
@ -73,13 +85,14 @@ def clusters_scale(cluster_id, data):
api.scale_cluster, cluster_id, data)
u._replace_hadoop_version_plugin_version(result['cluster'])
u._replace_tenant_id_project_id(result['cluster'])
_replace_tenant_id_project_id_provision_steps(result['cluster'])
return u.render(result)
@rest.get('/clusters/<cluster_id>')
@acl.enforce("data-processing:clusters:get")
@v.check_exists(api.get_cluster, 'cluster_id')
@v.validate_request_params([])
@v.validate_request_params(['show_progress'])
def clusters_get(cluster_id):
data = u.get_request_args()
show_events = six.text_type(
@ -88,19 +101,21 @@ def clusters_get(cluster_id):
api.get_cluster, cluster_id, show_events)
u._replace_hadoop_version_plugin_version(result['cluster'])
u._replace_tenant_id_project_id(result['cluster'])
_replace_tenant_id_project_id_provision_steps(result['cluster'])
return u.render(result)
@rest.patch('/clusters/<cluster_id>')
@acl.enforce("data-processing:clusters:modify")
@v.check_exists(api.get_cluster, 'cluster_id')
@v.validate(v_c_schema.CLUSTER_UPDATE_SCHEMA, v_c.check_cluster_update)
@v.validate(v_c_schema.CLUSTER_UPDATE_SCHEMA_V2, v_c.check_cluster_update)
@v.validate_request_params([])
def clusters_update(cluster_id, data):
result = u.to_wrapped_dict_no_render(
api.update_cluster, cluster_id, data)
u._replace_hadoop_version_plugin_version(result['cluster'])
u._replace_tenant_id_project_id(result['cluster'])
_replace_tenant_id_project_id_provision_steps(result['cluster'])
return u.render(result)

View File

@ -52,9 +52,9 @@ def data_source_register(data):
@v.check_exists(api.get_data_source, 'data_source_id')
@v.validate_request_params([])
def data_source_get(data_source_id):
result = u.to_wrapped_dict(api.get_data_source, data_source_id)
result = api.get_data_source(data_source_id).to_wrapped_dict()
u._replace_tenant_id_project_id(result['data_source'])
return result
return u.render(result)
@rest.delete('/data-sources/<data_source_id>')
@ -72,6 +72,6 @@ def data_source_delete(data_source_id):
@v.validate(v_d_s_schema.DATA_SOURCE_UPDATE_SCHEMA)
@v.validate_request_params([])
def data_source_update(data_source_id, data):
result = u.to_wrapped_dict(api.data_source_update, data_source_id, data)
result = api.data_source_update(data_source_id, data).to_wrapped_dict()
u._replace_tenant_id_project_id(result['data_source'])
return result
return u.render(result)

View File

@ -24,9 +24,15 @@ import sahara.utils.api as u
rest = u.RestV2('job-templates', __name__)
def _replace_tenant_id_project_id_job_binary(jb_list):
for jb_obj in jb_list:
dict.update(jb_obj, {'project_id': jb_obj['tenant_id']})
dict.pop(jb_obj, 'tenant_id')
@rest.get('/job-templates')
@acl.enforce("data-processing:job-templates:get_all")
@v.check_exists(api.get_job_templates, 'marker')
@v.check_exists(api.get_job_template, 'marker')
@v.validate(None, v.validate_pagination_limit,
v.validate_sorting_jobs)
@v.validate_request_params(['type', 'name'])
@ -34,6 +40,8 @@ def job_templates_list():
result = api.get_job_templates(**u.get_request_args().to_dict())
for jt in result:
u._replace_tenant_id_project_id(jt)
_replace_tenant_id_project_id_job_binary(jt['mains'])
_replace_tenant_id_project_id_job_binary(jt['libs'])
return u.render(res=result, name='job_templates')
@ -44,35 +52,41 @@ def job_templates_list():
def job_templates_create(data):
result = {'job_template': api.create_job_template(data).to_dict()}
u._replace_tenant_id_project_id(result['job_template'])
_replace_tenant_id_project_id_job_binary(result['job_template']['mains'])
_replace_tenant_id_project_id_job_binary(result['job_template']['libs'])
return u.render(result)
@rest.get('/job-templates/<job_templates_id>')
@acl.enforce("data-processing:job-templates:get")
@v.check_exists(api.get_job_templates, id='job_templates_id')
@v.check_exists(api.get_job_template, id='job_templates_id')
@v.validate_request_params([])
def job_templates_get(job_templates_id):
result = {'job_template': api.get_job_template(
job_templates_id).to_dict()}
u._replace_tenant_id_project_id(result['job_template'])
_replace_tenant_id_project_id_job_binary(result['job_template']['mains'])
_replace_tenant_id_project_id_job_binary(result['job_template']['libs'])
return u.render(result)
@rest.patch('/job-templates/<job_templates_id>')
@acl.enforce("data-processing:jobs:modify")
@v.check_exists(api.get_job_templates, id='job_templates_id')
@v.check_exists(api.get_job_template, id='job_templates_id')
@v.validate(v_j_schema.JOB_UPDATE_SCHEMA)
@v.validate_request_params([])
def job_templates_update(job_templates_id, data):
result = {'job_template': api.update_job_template(
job_templates_id, data).to_dict()}
u._replace_tenant_id_project_id(result['job_template'])
_replace_tenant_id_project_id_job_binary(result['job_template']['mains'])
_replace_tenant_id_project_id_job_binary(result['job_template']['libs'])
return u.render(result)
@rest.delete('/job-templates/<job_templates_id>')
@acl.enforce("data-processing:jobs:delete")
@v.check_exists(api.get_job_templates, id='job_templates_id')
@v.check_exists(api.get_job_template, id='job_templates_id')
@v.validate_request_params([])
def job_templates_delete(job_templates_id):
api.delete_job_template(job_templates_id)

View File

@ -24,10 +24,13 @@ rest = u.RestV2('job-types', __name__)
@rest.get('/job-types')
@acl.enforce("data-processing:job-types:get_all")
@v.validate_request_params(['type', 'plugin_name', 'hadoop_version'])
@v.validate_request_params(['type', 'plugin_name', 'plugin_version'])
def job_types_get():
# We want to use flat=False with to_dict() so that
# the value of each arg is given as a list. This supports
# filters of the form ?type=Pig&type=Java, etc.
return u.render(job_types=api.get_job_types(
**u.get_request_args().to_dict(flat=False)))
request_args = u.get_request_args().to_dict(flat=False)
if 'plugin_version' in request_args:
request_args['hadoop_version'] = request_args['plugin_version']
del request_args['plugin_version']
return u.render(job_types=api.get_job_types(**request_args))

View File

@ -26,6 +26,11 @@ import sahara.utils.api as u
rest = u.RestV2('jobs', __name__)
def _replace_job_id_job_template_id(job_obj):
dict.update(job_obj, {'job_template_id': job_obj['job_id']})
dict.pop(job_obj, 'job_id')
@rest.get('/jobs')
@acl.enforce("data-processing:job-executions:get_all")
@v.check_exists(api.get_job_execution, 'marker')
@ -40,6 +45,7 @@ def jobs_list():
for je in result:
je.pop('oozie_job_id', force=True)
u._replace_tenant_id_project_id(je)
_replace_job_id_job_template_id(je)
return u.render(res=result, name='jobs')
@ -53,6 +59,7 @@ def jobs_execute(data):
{'engine_job_id': result['job']['oozie_job_id']})
dict.pop(result['job'], 'oozie_job_id')
u._replace_tenant_id_project_id(result['job'])
_replace_job_id_job_template_id(result['job'])
return u.render(result)
@ -67,6 +74,7 @@ def jobs_get(job_id):
result = {'job': api.get_job_execution(job_id, refresh_status)}
result['job'].pop('oozie_job_id', force=True)
u._replace_tenant_id_project_id(result['job'])
_replace_job_id_job_template_id(result['job'])
return u.render(result)
@ -80,6 +88,7 @@ def jobs_update(job_id, data):
result = {'job': api.update_job_execution(job_id, data)}
result['job'].pop('oozie_job_id', force=True)
u._replace_tenant_id_project_id(result['job'])
_replace_job_id_job_template_id(result['job'])
return u.render(result)

View File

@ -30,9 +30,13 @@ rest = u.RestV2('node-group-templates', __name__)
@v.check_exists(api.get_node_group_template, 'marker')
@v.validate(None, v.validate_pagination_limit,
v.validate_sorting_node_group_templates)
@v.validate_request_params(['plugin_name', 'hadoop_version', 'name'])
@v.validate_request_params(['plugin_name', 'plugin_version', 'name'])
def node_group_templates_list():
result = api.get_node_group_templates(**u.get_request_args().to_dict())
request_args = u.get_request_args().to_dict()
if 'plugin_version' in request_args:
request_args['hadoop_version'] = request_args['plugin_version']
del request_args['plugin_version']
result = api.get_node_group_templates(**request_args)
for ngt in result:
u._replace_hadoop_version_plugin_version(ngt)
u._replace_tenant_id_project_id(ngt)

View File

@ -71,9 +71,6 @@ CLUSTER_UPDATE_SCHEMA = {
"description": {
"type": ["string", "null"]
},
"update_keypair": {
"type": ["boolean", "null"]
},
"name": {
"type": "string",
"minLength": 1,
@ -99,6 +96,11 @@ CLUSTER_UPDATE_SCHEMA = {
"additionalProperties": False,
"required": []
}
CLUSTER_UPDATE_SCHEMA_V2 = copy.deepcopy(CLUSTER_UPDATE_SCHEMA)
CLUSTER_UPDATE_SCHEMA_V2['properties'].update({
"update_keypair": {
"type": ["boolean", "null"]
}})
CLUSTER_SCALING_SCHEMA = {
"type": "object",