Preparing OSC for APIv2

On the way to have APIv2 as stable we need to have it available on the
OSC for sahara.

Change-Id: I84f4bc56c641caad7c04190c7a344a6773440eef
This commit is contained in:
Telles Nobrega 2018-07-12 21:02:52 -03:00 committed by Telles Nobrega
parent 2a66f9a715
commit 3f6f2d1128
35 changed files with 4308 additions and 137 deletions

View File

@ -0,0 +1,5 @@
---
features:
- |
Adding the ability for the CLI to communicate with OpenStack
Sahara using the new APIv2.

View File

@ -105,6 +105,241 @@ def wait_for_delete(manager, obj_id, sleep_time=5, timeout=3000):
return False
def get_api_version(app):
return app.api_version['data_processing']
def is_api_v2(app):
if get_api_version(app) == '2':
return True
return False
def _cluster_templates_configure_ng(app, node_groups, client):
node_groups_list = dict(
map(lambda x: x.split(':', 1), node_groups))
node_groups = []
plugins_versions = set()
for name, count in node_groups_list.items():
ng = get_resource(client.node_group_templates, name)
node_groups.append({'name': ng.name,
'count': int(count),
'node_group_template_id': ng.id})
if is_api_v2(app):
plugins_versions.add((ng.plugin_name, ng.plugin_version))
else:
plugins_versions.add((ng.plugin_name, ng.hadoop_version))
if len(plugins_versions) != 1:
raise exceptions.CommandError('Node groups with the same plugins '
'and versions must be specified')
plugin, plugin_version = plugins_versions.pop()
return plugin, plugin_version, node_groups
def _get_plugin_version(app, cluster_template, client):
ct = get_resource(client.cluster_templates, cluster_template)
if is_api_v2(app):
return ct.plugin_name, ct.plugin_version, ct.id
else:
return ct.plugin_name, ct.hadoop_version, ct.id
def create_job_templates(app, client, mains_ids, libs_ids, parsed_args):
args_dict = dict(name=parsed_args.name,
type=parsed_args.type,
mains=mains_ids,
libs=libs_ids,
description=parsed_args.description,
interface=parsed_args.interface,
is_public=parsed_args.public,
is_protected=parsed_args.protected)
if is_api_v2(app):
data = client.job_templates.create(**args_dict).to_dict()
else:
data = client.jobs.create(**args_dict).to_dict()
return data
def create_job_template_json(app, client, **template):
if is_api_v2(app):
data = client.job_templates.create(**template).to_dict()
else:
data = client.jobs.create(**template).to_dict()
return data
def list_job_templates(app, client, search_opts):
if is_api_v2(app):
data = client.job_templates.list(search_opts=search_opts)
else:
data = client.jobs.list(search_opts=search_opts)
return data
def get_job_templates_resources(app, client, parsed_args):
if is_api_v2(app):
data = get_resource(
client.job_templates, parsed_args.job_template).to_dict()
else:
data = get_resource(
client.jobs, parsed_args.job_template).to_dict()
return data
def delete_job_templates(app, client, jt):
if is_api_v2(app):
jt_id = get_resource_id(client.job_templates, jt)
client.job_templates.delete(jt_id)
else:
jt_id = get_resource_id(client.jobs, jt)
client.jobs.delete(jt_id)
def get_job_template_id(app, client, parsed_args):
if is_api_v2(app):
jt_id = get_resource_id(
client.job_templates, parsed_args.job_template)
else:
jt_id = get_resource_id(
client.jobs, parsed_args.job_template)
return jt_id
def update_job_templates(app, client, jt_id, update_data):
if is_api_v2(app):
data = client.job_templates.update(jt_id, **update_data).job_template
else:
data = client.jobs.update(jt_id, **update_data).job
return data
def create_cluster_template(app, client, plugin, plugin_version,
parsed_args, configs, shares, node_groups):
args_dict = dict(
name=parsed_args.name,
plugin_name=plugin,
description=parsed_args.description,
node_groups=node_groups,
use_autoconfig=parsed_args.autoconfig,
cluster_configs=configs,
shares=shares,
is_public=parsed_args.public,
is_protected=parsed_args.protected,
domain_name=parsed_args.domain_name)
if is_api_v2(app):
args_dict['plugin_version'] = plugin_version
else:
args_dict['hadoop_version'] = plugin_version
data = client.cluster_templates.create(**args_dict).to_dict()
return data
def update_cluster_template(app, client, plugin, plugin_version,
parsed_args, configs, shares, node_groups, ct_id):
args_dict = dict(
name=parsed_args.name,
plugin_name=plugin,
description=parsed_args.description,
node_groups=node_groups,
use_autoconfig=parsed_args.use_autoconfig,
cluster_configs=configs,
shares=shares,
is_public=parsed_args.is_public,
is_protected=parsed_args.is_protected,
domain_name=parsed_args.domain_name
)
if is_api_v2(app):
args_dict['plugin_version'] = plugin_version
else:
args_dict['hadoop_version'] = plugin_version
update_dict = create_dict_from_kwargs(**args_dict)
data = client.cluster_templates.update(
ct_id, **update_dict).to_dict()
return data
def create_cluster(client, app, parsed_args, plugin, plugin_version,
template_id, image_id, net_id):
args = dict(
name=parsed_args.name,
plugin_name=plugin,
cluster_template_id=template_id,
default_image_id=image_id,
description=parsed_args.description,
is_transient=parsed_args.transient,
user_keypair_id=parsed_args.user_keypair,
net_id=net_id,
count=parsed_args.count,
is_public=parsed_args.public,
is_protected=parsed_args.protected)
if is_api_v2(app):
args['plugin_version'] = plugin_version
else:
args['hadoop_version'] = plugin_version
data = client.clusters.create(**args).to_dict()
return data
def create_job(client, app, jt_id, cluster_id, input_id, output_id,
job_configs, parsed_args):
args_dict = dict(cluster_id=cluster_id,
input_id=input_id,
output_id=output_id,
interface=parsed_args.interface,
configs=job_configs,
is_public=parsed_args.public,
is_protected=parsed_args.protected)
if is_api_v2(app):
args_dict['job_template_id'] = jt_id
data = client.jobs.create(**args_dict).to_dict()
else:
args_dict['job_id'] = jt_id
data = client.job_executions.create(**args_dict).to_dict()
return data
def create_job_json(client, app, **template):
if is_api_v2(app):
data = client.jobs.create(**template).to_dict()
else:
data = client.job_executions.create(**template).to_dict()
return data
def update_job(client, app, parsed_args, update_dict):
if is_api_v2(app):
data = client.jobs.update(
parsed_args.job, **update_dict).job
else:
data = client.job_executions.update(
parsed_args.job, **update_dict).job_execution
return data
def create_node_group_templates(client, app, parsed_args, flavor_id, configs,
shares):
if app.api_version['data_processing'] == '2':

View File

@ -33,13 +33,13 @@ def _format_node_groups_list(node_groups):
['%s:%s' % (ng['name'], ng['count']) for ng in node_groups])
def _format_ct_output(data):
def _format_ct_output(app, data):
data['plugin_version'] = data.pop('hadoop_version')
data['node_groups'] = _format_node_groups_list(data['node_groups'])
data['anti_affinity'] = osc_utils.format_list(data['anti_affinity'])
def _configure_node_groups(node_groups, client):
def _configure_node_groups(app, node_groups, client):
node_groups_list = dict(
map(lambda x: x.split(':', 1), node_groups))
@ -140,10 +140,7 @@ class CreateClusterTemplate(command.ShowOne):
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
def _take_action(self, client, parsed_args):
if parsed_args.json:
blob = osc_utils.read_blob_file_contents(parsed_args.json)
try:
@ -184,24 +181,24 @@ class CreateClusterTemplate(command.ShowOne):
'An error occurred when reading '
'shares from file %s: %s' % (parsed_args.shares, e))
plugin, plugin_version, node_groups = _configure_node_groups(
parsed_args.node_groups, client)
plugin, plugin_version, node_groups = (
utils._cluster_templates_configure_ng(self.app,
parsed_args.node_groups,
client))
data = utils.create_cluster_template(self.app, client, plugin,
plugin_version,
parsed_args, configs, shares,
node_groups)
data = client.cluster_templates.create(
name=parsed_args.name,
plugin_name=plugin,
hadoop_version=plugin_version,
description=parsed_args.description,
node_groups=node_groups,
use_autoconfig=parsed_args.autoconfig,
cluster_configs=configs,
shares=shares,
is_public=parsed_args.public,
is_protected=parsed_args.protected,
domain_name=parsed_args.domain_name
).to_dict()
return data
_format_ct_output(data)
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
_format_ct_output(self.app, data)
data = utils.prepare_data(data, CT_FIELDS)
return self.dict2columns(data)
@ -249,7 +246,10 @@ class ListClusterTemplates(command.Lister):
if parsed_args.plugin:
search_opts['plugin_name'] = parsed_args.plugin
if parsed_args.plugin_version:
search_opts['hadoop_version'] = parsed_args.plugin_version
if utils.is_api_v2(self.app):
search_opts['plugin_version'] = parsed_args.plugin_version
else:
search_opts['hadoop_version'] = parsed_args.plugin_version
data = client.cluster_templates.list(search_opts=search_opts)
@ -301,7 +301,7 @@ class ShowClusterTemplate(command.ShowOne):
data = utils.get_resource(
client.cluster_templates, parsed_args.cluster_template).to_dict()
_format_ct_output(data)
_format_ct_output(self.app, data)
data = utils.prepare_data(data, CT_FIELDS)
return self.dict2columns(data)
@ -442,13 +442,7 @@ class UpdateClusterTemplate(command.ShowOne):
use_autoconfig=None)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
ct_id = utils.get_resource_id(
client.cluster_templates, parsed_args.cluster_template)
def _take_action(self, client, parsed_args, ct_id):
if parsed_args.json:
blob = osc_utils.read_blob_file_contents(parsed_args.json)
try:
@ -462,8 +456,9 @@ class UpdateClusterTemplate(command.ShowOne):
else:
plugin, plugin_version, node_groups = None, None, None
if parsed_args.node_groups:
plugin, plugin_version, node_groups = _configure_node_groups(
parsed_args.node_groups, client)
plugin, plugin_version, node_groups = (
utils._cluster_templates_configure_ng(
self.app, parsed_args.node_groups, client))
configs = None
if parsed_args.configs:
@ -485,24 +480,23 @@ class UpdateClusterTemplate(command.ShowOne):
'An error occurred when reading '
'shares from file %s: %s' % (parsed_args.shares, e))
update_dict = utils.create_dict_from_kwargs(
name=parsed_args.name,
plugin_name=plugin,
hadoop_version=plugin_version,
description=parsed_args.description,
node_groups=node_groups,
use_autoconfig=parsed_args.use_autoconfig,
cluster_configs=configs,
shares=shares,
is_public=parsed_args.is_public,
is_protected=parsed_args.is_protected,
domain_name=parsed_args.domain_name
)
data = utils.update_cluster_template(self.app, client, plugin,
plugin_version, parsed_args,
configs, shares, node_groups,
ct_id)
data = client.cluster_templates.update(
ct_id, **update_dict).to_dict()
return data
_format_ct_output(data)
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
ct_id = utils.get_resource_id(
client.cluster_templates, parsed_args.cluster_template)
data = self._take_action(client, parsed_args, ct_id)
_format_ct_output(self.app, data)
data = utils.prepare_data(data, CT_FIELDS)
return self.dict2columns(data)
@ -541,10 +535,7 @@ class ImportClusterTemplate(command.ShowOne):
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
def _take_action(self, client, parsed_args):
if (not parsed_args.node_groups):
raise exceptions.CommandError('--node_groups should be specified')
@ -569,8 +560,9 @@ class ImportClusterTemplate(command.ShowOne):
template['cluster_template']['net_id'] = (
template['cluster_template'].pop('neutron_management_network'))
plugin, plugin_version, node_groups = _configure_node_groups(
parsed_args.node_groups, client)
plugin, plugin_version, node_groups = (
utils._cluster_templates_configure_ng_configure_node_groups(
self.app, parsed_args.node_groups, client))
if (('plugin_version' in template['cluster_template'] and
template['cluster_template']['plugin_version'] !=
plugin_version) or
@ -584,7 +576,15 @@ class ImportClusterTemplate(command.ShowOne):
data = client.cluster_templates.create(
**template['cluster_template']).to_dict()
_format_ct_output(data)
return data
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
_format_ct_output(self.app, data)
data = utils.prepare_data(data, CT_FIELDS)
return self.dict2columns(data)

View File

@ -35,7 +35,7 @@ def _format_node_groups_list(node_groups):
['%s:%s' % (ng['name'], ng['count']) for ng in node_groups])
def _format_cluster_output(data):
def _format_cluster_output(app, data):
data['plugin_version'] = data.pop('hadoop_version')
data['image'] = data.pop('default_image_id')
data['node_groups'] = _format_node_groups_list(data['node_groups'])
@ -54,11 +54,6 @@ def _prepare_health_checks(data):
return additional_data, additional_fields
def _get_plugin_version(cluster_template, client):
ct = utils.get_resource(client.cluster_templates, cluster_template)
return ct.plugin_name, ct.hadoop_version, ct.id
class CreateCluster(command.ShowOne):
"""Creates cluster"""
@ -140,9 +135,7 @@ class CreateCluster(command.ShowOne):
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
def _take_action(self, client, parsed_args):
network_client = self.app.client_manager.network
if parsed_args.json:
@ -169,8 +162,8 @@ class CreateCluster(command.ShowOne):
'should be specified or json template should be provided '
'with --json argument')
plugin, plugin_version, template_id = _get_plugin_version(
parsed_args.cluster_template, client)
plugin, plugin_version, template_id = utils._get_plugin_version(
self.app, parsed_args.cluster_template, client)
image_id = utils.get_resource_id(client.images, parsed_args.image)
@ -178,20 +171,17 @@ class CreateCluster(command.ShowOne):
parsed_args.neutron_network, ignore_missing=False).id if
parsed_args.neutron_network else None)
data = client.clusters.create(
name=parsed_args.name,
plugin_name=plugin,
hadoop_version=plugin_version,
cluster_template_id=template_id,
default_image_id=image_id,
description=parsed_args.description,
is_transient=parsed_args.transient,
user_keypair_id=parsed_args.user_keypair,
net_id=net_id,
count=parsed_args.count,
is_public=parsed_args.public,
is_protected=parsed_args.protected
).to_dict()
data = utils.create_cluster(client, self.app, parsed_args, plugin,
plugin_version, template_id, image_id,
net_id)
return data
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
if parsed_args.count and parsed_args.count > 1:
clusters = [
utils.get_resource(client.clusters, id)
@ -217,7 +207,7 @@ class CreateCluster(command.ShowOne):
'Error occurred during cluster creation: %s',
data['id'])
data = client.clusters.get(data['id']).to_dict()
_format_cluster_output(data)
_format_cluster_output(self.app, data)
data = utils.prepare_data(data, CLUSTER_FIELDS)
return self.dict2columns(data)
@ -277,12 +267,13 @@ class ListClusters(command.Lister):
column_headers = utils.prepare_column_headers(
columns, {'hadoop_version': 'plugin_version',
'default_image_id': 'image'})
else:
columns = ('name', 'id', 'plugin_name', 'hadoop_version', 'status')
columns = ('name', 'id', 'plugin_name', 'hadoop_version',
'status')
column_headers = utils.prepare_column_headers(
columns, {'hadoop_version': 'plugin_version',
'default_image_id': 'image'})
return (
column_headers,
(osc_utils.get_item_properties(
@ -326,10 +317,7 @@ class ShowCluster(command.ShowOne):
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
def _take_action(self, client, parsed_args):
kwargs = {}
if parsed_args.show_progress or parsed_args.full_dump_events:
kwargs['show_progress'] = True
@ -344,8 +332,9 @@ class ShowCluster(command.ShowOne):
with open(file_name, 'w') as file:
jsonutils.dump(provision_steps, file, indent=4)
sys.stdout.write('Event log dump saved to file: %s\n' % file_name)
return data, provision_steps
_format_cluster_output(data)
def _show_cluster_info(self, data, provision_steps, parsed_args):
fields = []
if parsed_args.verification:
ver_data, fields = _prepare_health_checks(data)
@ -370,6 +359,17 @@ class ShowCluster(command.ShowOne):
return data
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data, provision_steps = self._take_action(client, parsed_args)
_format_cluster_output(self.app, data)
data = self._show_cluster_info(data, provision_steps, parsed_args)
return data
class DeleteCluster(command.Command):
"""Deletes cluster"""
@ -477,10 +477,7 @@ class UpdateCluster(command.ShowOne):
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
def _take_action(self, client, parsed_args):
cluster_id = utils.get_resource_id(
client.clusters, parsed_args.cluster)
@ -502,8 +499,15 @@ class UpdateCluster(command.ShowOne):
shares=shares
)
data = client.clusters.update(cluster_id, **update_dict).cluster
return data
_format_cluster_output(data)
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
_format_cluster_output(self.app, data)
data = utils.prepare_data(data, CLUSTER_FIELDS)
return self.dict2columns(data)
@ -545,10 +549,7 @@ class ScaleCluster(command.ShowOne):
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
def _take_action(self, client, parsed_args):
cluster = utils.get_resource(
client.clusters, parsed_args.cluster)
@ -603,7 +604,15 @@ class ScaleCluster(command.ShowOne):
cluster.id)
data = client.clusters.get(cluster.id).cluster
_format_cluster_output(data)
return data
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
_format_cluster_output(self.app, data)
data = utils.prepare_data(data, CLUSTER_FIELDS)
return self.dict2columns(data)

View File

@ -111,7 +111,8 @@ class CreateJobTemplate(command.ShowOne):
raise exceptions.CommandError(
'An error occurred when reading '
'template from file %s: %s' % (parsed_args.json, e))
data = client.jobs.create(**template).to_dict()
data = utils.create_job_template_json(self.app,
client, **template)
else:
if parsed_args.interface:
blob = osc_utils.read_blob_file_contents(parsed_args.json)
@ -127,11 +128,8 @@ class CreateJobTemplate(command.ShowOne):
libs_ids = [utils.get_resource_id(client.job_binaries, m) for m
in parsed_args.libs] if parsed_args.libs else None
data = client.jobs.create(
name=parsed_args.name, type=parsed_args.type, mains=mains_ids,
libs=libs_ids, description=parsed_args.description,
interface=parsed_args.interface, is_public=parsed_args.public,
is_protected=parsed_args.protected).to_dict()
data = utils.create_job_templates(self.app, client, mains_ids,
libs_ids, parsed_args)
_format_job_template_output(data)
data = utils.prepare_data(data, JOB_TEMPLATE_FIELDS)
@ -172,7 +170,7 @@ class ListJobTemplates(command.Lister):
client = self.app.client_manager.data_processing
search_opts = {'type': parsed_args.type} if parsed_args.type else {}
data = client.jobs.list(search_opts=search_opts)
data = utils.list_job_templates(self.app, client, search_opts)
if parsed_args.name:
data = utils.get_by_name_substring(data, parsed_args.name)
@ -214,8 +212,7 @@ class ShowJobTemplate(command.ShowOne):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = utils.get_resource(
client.jobs, parsed_args.job_template).to_dict()
data = utils.get_job_templates_resources(self.app, client, parsed_args)
_format_job_template_output(data)
data = utils.prepare_data(data, JOB_TEMPLATE_FIELDS)
@ -243,8 +240,7 @@ class DeleteJobTemplate(command.Command):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
for jt in parsed_args.job_template:
jt_id = utils.get_resource_id(client.jobs, jt)
client.jobs.delete(jt_id)
utils.delete_job_templates(self.app, client, jt)
sys.stdout.write(
'Job template "{jt}" has been removed '
'successfully.\n'.format(jt=jt))
@ -309,8 +305,7 @@ class UpdateJobTemplate(command.ShowOne):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
jt_id = utils.get_resource_id(
client.jobs, parsed_args.job_template)
jt_id = utils.get_job_template_id(self.app, client, parsed_args)
update_data = utils.create_dict_from_kwargs(
name=parsed_args.name,
@ -319,7 +314,7 @@ class UpdateJobTemplate(command.ShowOne):
is_protected=parsed_args.is_protected
)
data = client.jobs.update(jt_id, **update_data).job
data = utils.update_job_templates(self.app, client, jt_id, update_data)
_format_job_template_output(data)
data = utils.prepare_data(data, JOB_TEMPLATE_FIELDS)

View File

@ -31,7 +31,7 @@ JOB_STATUS_CHOICES = ['done-with-error', 'failed', 'killed', 'pending',
'running', 'succeeded', 'to-be-killed']
def _format_job_output(data):
def _format_job_output(app, data):
data['status'] = data['info']['status']
del data['info']
data['job_template_id'] = data.pop('job_id')
@ -116,9 +116,7 @@ class ExecuteJob(command.ShowOne):
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
def _take_action(self, client, parsed_args):
if parsed_args.json:
blob = osc_utils.read_blob_file_contents(parsed_args.json)
@ -132,7 +130,7 @@ class ExecuteJob(command.ShowOne):
if 'job_configs' in template:
template['configs'] = template.pop('job_configs')
data = client.job_executions.create(**template).to_dict()
data = utils.create_job_json(client, self.app, template)
else:
if not parsed_args.cluster or not parsed_args.job_template:
raise exceptions.CommandError(
@ -170,8 +168,7 @@ class ExecuteJob(command.ShowOne):
job_configs['params'] = dict(
map(lambda x: x.split(':', 1), parsed_args.params))
jt_id = utils.get_resource_id(
client.jobs, parsed_args.job_template)
jt_id = utils.get_job_template_id(self.app, client, parsed_args)
cluster_id = utils.get_resource_id(
client.clusters, parsed_args.cluster)
if parsed_args.input not in [None, "", "None"]:
@ -185,17 +182,22 @@ class ExecuteJob(command.ShowOne):
else:
output_id = None
data = client.job_executions.create(
job_id=jt_id, cluster_id=cluster_id, input_id=input_id,
output_id=output_id, interface=parsed_args.interface,
configs=job_configs, is_public=parsed_args.public,
is_protected=parsed_args.protected).to_dict()
data = utils.create_job(client, self.app, jt_id, cluster_id,
input_id, output_id, job_configs,
parsed_args)
sys.stdout.write(
'Job "{job}" has been started successfully.\n'.format(
job=data['id']))
_format_job_output(data)
return data
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
_format_job_output(self.app, data)
data = utils.prepare_data(data, JOB_FIELDS)
return self.dict2columns(data)
@ -228,6 +230,7 @@ class ListJobs(command.Lister):
client = self.app.client_manager.data_processing
data = client.job_executions.list()
for job in data:
job.status = job.info['status']
@ -275,7 +278,7 @@ class ShowJob(command.ShowOne):
data = client.job_executions.get(parsed_args.job).to_dict()
_format_job_output(data)
_format_job_output(self.app, data)
data = utils.prepare_data(data, JOB_FIELDS)
return self.dict2columns(data)
@ -308,12 +311,16 @@ class DeleteJob(command.Command):
client = self.app.client_manager.data_processing
for job_id in parsed_args.job:
client.job_executions.delete(job_id)
sys.stdout.write(
'Job "{job}" deletion has been started.\n'.format(job=job_id))
if parsed_args.wait:
for job_id in parsed_args.job:
if not utils.wait_for_delete(client.job_executions, job_id):
wait_for_delete = utils.wait_for_delete(
client.job_executions, job_id)
if not wait_for_delete:
self.log.error(
'Error occurred during job deleting: %s' %
job_id)
@ -367,18 +374,22 @@ class UpdateJob(command.ShowOne):
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
def _take_action(self, client, parsed_args):
update_dict = utils.create_dict_from_kwargs(
is_public=parsed_args.is_public,
is_protected=parsed_args.is_protected)
data = client.job_executions.update(
parsed_args.job, **update_dict).job_execution
data = utils.update_job(client, self.app, parsed_args, update_dict)
_format_job_output(data)
return data
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
_format_job_output(self.app, data)
data = utils.prepare_data(data, JOB_FIELDS)
return self.dict2columns(data)

View File

@ -0,0 +1,149 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from osc_lib import utils as osc_utils
from oslo_log import log as logging
from saharaclient.osc import utils
from saharaclient.osc.v1 import cluster_templates as ct_v1
def _format_ct_output(app, data):
data['node_groups'] = ct_v1._format_node_groups_list(data['node_groups'])
data['anti_affinity'] = osc_utils.format_list(data['anti_affinity'])
class CreateClusterTemplate(ct_v1.CreateClusterTemplate):
"""Creates cluster template"""
log = logging.getLogger(__name__ + ".CreateClusterTemplate")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
_format_ct_output(self.app, data)
data = utils.prepare_data(data, ct_v1.CT_FIELDS)
return self.dict2columns(data)
class ListClusterTemplates(ct_v1.ListClusterTemplates):
"""Lists cluster templates"""
log = logging.getLogger(__name__ + ".ListClusterTemplates")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
search_opts = {}
if parsed_args.plugin:
search_opts['plugin_name'] = parsed_args.plugin
if parsed_args.plugin_version:
search_opts['plugin_version'] = parsed_args.plugin_version
data = client.cluster_templates.list(search_opts=search_opts)
if parsed_args.name:
data = utils.get_by_name_substring(data, parsed_args.name)
if parsed_args.long:
columns = ('name', 'id', 'plugin_name', 'plugin_version',
'node_groups', 'description')
column_headers = utils.prepare_column_headers(columns)
else:
columns = ('name', 'id', 'plugin_name', 'plugin_version')
column_headers = utils.prepare_column_headers(columns)
return (
column_headers,
(osc_utils.get_item_properties(
s,
columns,
formatters={
'node_groups': ct_v1._format_node_groups_list
}
) for s in data)
)
class ShowClusterTemplate(ct_v1.ShowClusterTemplate):
"""Display cluster template details"""
log = logging.getLogger(__name__ + ".ShowClusterTemplate")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = utils.get_resource(
client.cluster_templates, parsed_args.cluster_template).to_dict()
_format_ct_output(self.app, data)
data = utils.prepare_data(data, ct_v1.CT_FIELDS)
return self.dict2columns(data)
class DeleteClusterTemplate(ct_v1.DeleteClusterTemplate):
"""Deletes cluster template"""
log = logging.getLogger(__name__ + ".DeleteClusterTemplate")
class UpdateClusterTemplate(ct_v1.UpdateClusterTemplate):
"""Updates cluster template"""
log = logging.getLogger(__name__ + ".UpdateClusterTemplate")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
ct_id = utils.get_resource_id(
client.cluster_templates, parsed_args.cluster_template)
data = self._take_action(client, parsed_args, ct_id)
_format_ct_output(self.app, data)
data = utils.prepare_data(data, ct_v1.CT_FIELDS)
return self.dict2columns(data)
class ImportClusterTemplate(ct_v1.ImportClusterTemplate):
"""Imports cluster template"""
log = logging.getLogger(__name__ + ".ImportClusterTemplate")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
_format_ct_output(self.app, data)
data = utils.prepare_data(data, ct_v1.CT_FIELDS)
return self.dict2columns(data)
class ExportClusterTemplate(ct_v1.ExportClusterTemplate):
"""Export cluster template to JSON"""
log = logging.getLogger(__name__ + ".ExportClusterTemplate")

View File

@ -0,0 +1,172 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from osc_lib import utils as osc_utils
from oslo_log import log as logging
from saharaclient.osc import utils
from saharaclient.osc.v1 import clusters as c_v1
def _format_cluster_output(app, data):
data['image'] = data.pop('default_image_id')
data['node_groups'] = c_v1._format_node_groups_list(data['node_groups'])
data['anti_affinity'] = osc_utils.format_list(data['anti_affinity'])
class CreateCluster(c_v1.CreateCluster):
"""Creates cluster"""
log = logging.getLogger(__name__ + ".CreateCluster")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
if parsed_args.count and parsed_args.count > 1:
clusters = []
for cluster in data['clusters']:
clusters.append(
utils.get_resource(client.clusters,
cluster['cluster']['id']))
if parsed_args.wait:
for cluster in clusters:
if not osc_utils.wait_for_status(
client.clusters.get, cluster.id):
self.log.error(
'Error occurred during cluster creation: %s',
data['id'])
data = {}
for cluster in clusters:
data[cluster.name] = cluster.id
else:
if parsed_args.wait:
if not osc_utils.wait_for_status(
client.clusters.get, data['id']):
self.log.error(
'Error occurred during cluster creation: %s',
data['id'])
data = client.clusters.get(data['id']).to_dict()
_format_cluster_output(self.app, data)
data = utils.prepare_data(data, c_v1.CLUSTER_FIELDS)
return self.dict2columns(data)
class ListClusters(c_v1.ListClusters):
"""Lists clusters"""
log = logging.getLogger(__name__ + ".ListClusters")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
search_opts = {}
if parsed_args.plugin:
search_opts['plugin_name'] = parsed_args.plugin
if parsed_args.plugin_version:
search_opts['plugin_version'] = parsed_args.plugin_version
data = client.clusters.list(search_opts=search_opts)
if parsed_args.name:
data = utils.get_by_name_substring(data, parsed_args.name)
if parsed_args.long:
columns = ('name', 'id', 'plugin_name', 'plugin_version',
'status', 'description', 'default_image_id')
column_headers = utils.prepare_column_headers(
columns, {'default_image_id': 'image'})
else:
columns = ('name', 'id', 'plugin_name', 'plugin_version',
'status')
column_headers = utils.prepare_column_headers(
columns, {'default_image_id': 'image'})
return (
column_headers,
(osc_utils.get_item_properties(
s,
columns
) for s in data)
)
class ShowCluster(c_v1.ShowCluster):
"""Display cluster details"""
log = logging.getLogger(__name__ + ".ShowCluster")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data, provision_steps = self._take_action(client, parsed_args)
_format_cluster_output(self.app, data)
data = self._show_cluster_info(data, provision_steps, parsed_args)
return data
class DeleteCluster(c_v1.DeleteCluster):
"""Deletes cluster"""
log = logging.getLogger(__name__ + ".DeleteCluster")
class UpdateCluster(c_v1.UpdateCluster):
"""Updates cluster"""
log = logging.getLogger(__name__ + ".UpdateCluster")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
_format_cluster_output(self.app, data)
data = utils.prepare_data(data, c_v1.CLUSTER_FIELDS)
return self.dict2columns(data)
class ScaleCluster(c_v1.ScaleCluster):
"""Scales cluster"""
log = logging.getLogger(__name__ + ".ScaleCluster")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
_format_cluster_output(self.app, data)
data = utils.prepare_data(data, c_v1.CLUSTER_FIELDS)
return self.dict2columns(data)
class VerificationUpdateCluster(c_v1.VerificationUpdateCluster):
"""Updates cluster verifications"""
log = logging.getLogger(__name__ + ".VerificationUpdateCluster")

View File

@ -0,0 +1,48 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from saharaclient.osc.v1 import data_sources as ds_v1
class CreateDataSource(ds_v1.CreateDataSource):
"""Creates data source"""
log = logging.getLogger(__name__ + ".CreateDataSource")
class ListDataSources(ds_v1.ListDataSources):
"""Lists data sources"""
log = logging.getLogger(__name__ + ".ListDataSources")
class ShowDataSource(ds_v1.ShowDataSource):
"""Display data source details"""
log = logging.getLogger(__name__ + ".ShowDataSource")
class DeleteDataSource(ds_v1.DeleteDataSource):
"""Delete data source"""
log = logging.getLogger(__name__ + ".DeleteDataSource")
class UpdateDataSource(ds_v1.UpdateDataSource):
"""Update data source"""
log = logging.getLogger(__name__ + ".UpdateDataSource")

View File

@ -0,0 +1,62 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from saharaclient.osc.v1 import images as images_v1
IMAGE_FIELDS = ['name', 'id', 'username', 'tags', 'status', 'description']
class ListImages(images_v1.ListImages):
"""Lists registered images"""
log = logging.getLogger(__name__ + ".ListImages")
class ShowImage(images_v1.ShowImage):
"""Display image details"""
log = logging.getLogger(__name__ + ".ShowImage")
class RegisterImage(images_v1.RegisterImage):
"""Register an image"""
log = logging.getLogger(__name__ + ".RegisterImage")
class UnregisterImage(images_v1.UnregisterImage):
"""Unregister image(s)"""
log = logging.getLogger(__name__ + ".RegisterImage")
class SetImageTags(images_v1.SetImageTags):
"""Set image tags (Replace current image tags with provided ones)"""
log = logging.getLogger(__name__ + ".AddImageTags")
class AddImageTags(images_v1.AddImageTags):
"""Add image tags"""
log = logging.getLogger(__name__ + ".AddImageTags")
class RemoveImageTags(images_v1.RemoveImageTags):
"""Remove image tags"""
log = logging.getLogger(__name__ + ".RemoveImageTags")

View File

@ -0,0 +1,212 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from osc_lib.command import command
from osc_lib import exceptions
from osc_lib import utils as osc_utils
from oslo_log import log as logging
from oslo_serialization import jsonutils
from saharaclient.osc import utils
from saharaclient.osc.v1 import job_binaries as jb_v1
class CreateJobBinary(command.ShowOne):
"""Creates job binary"""
log = logging.getLogger(__name__ + ".CreateJobBinary")
def get_parser(self, prog_name):
parser = super(CreateJobBinary, self).get_parser(prog_name)
parser.add_argument(
'--name',
metavar="<name>",
help="Name of the job binary [REQUIRED if JSON is not provided]",
)
creation_type = parser.add_mutually_exclusive_group()
creation_type.add_argument(
'--url',
metavar='<url>',
help='URL for the job binary [REQUIRED if JSON and file are '
'not provided]'
)
parser.add_argument(
'--description',
metavar="<description>",
help="Description of the job binary"
)
username = parser.add_mutually_exclusive_group()
username.add_argument(
'--username',
metavar='<username>',
help='Username for accessing the job binary URL',
)
username.add_argument(
'--access-key',
metavar='<accesskey>',
help='S3 access key for accessing the job binary URL',
)
password = parser.add_mutually_exclusive_group()
password.add_argument(
'--password',
metavar='<password>',
help='Password for accessing the job binary URL',
)
password.add_argument(
'--secret-key',
metavar='<secretkey>',
help='S3 secret key for accessing the job binary URL',
)
password.add_argument(
'--password-prompt',
dest="password_prompt",
action="store_true",
help='Prompt interactively for password',
)
password.add_argument(
'--secret-key-prompt',
dest="secret_key_prompt",
action="store_true",
help='Prompt interactively for S3 secret key',
)
parser.add_argument(
'--s3-endpoint',
metavar='<endpoint>',
help='S3 endpoint for accessing the job binary URL (ignored if '
'binary not in S3',
)
parser.add_argument(
'--public',
action='store_true',
default=False,
help='Make the job binary public',
)
parser.add_argument(
'--protected',
action='store_true',
default=False,
help='Make the job binary protected',
)
parser.add_argument(
'--json',
metavar='<filename>',
help='JSON representation of the job binary. Other '
'arguments will not be taken into account if this one is '
'provided'
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
if parsed_args.json:
blob = osc_utils.read_blob_file_contents(parsed_args.json)
try:
template = jsonutils.loads(blob)
except ValueError as e:
raise exceptions.CommandError(
'An error occurred when reading '
'template from file %s: %s' % (parsed_args.json, e))
data = client.job_binaries.create(**template).to_dict()
else:
if parsed_args.password_prompt:
parsed_args.password = osc_utils.get_password(
self.app.stdin, confirm=False)
if parsed_args.secret_key_prompt:
parsed_args.secret_key = osc_utils.get_password(
self.app.stdin, confirm=False)
if not parsed_args.password:
parsed_args.password = parsed_args.secret_key
if not parsed_args.username:
parsed_args.username = parsed_args.access_key
if parsed_args.password and not parsed_args.username:
raise exceptions.CommandError(
'Username via --username, or S3 access key via '
'--access-key should be provided with password')
if parsed_args.username and not parsed_args.password:
raise exceptions.CommandError(
'Password should be provided via --password or '
'--secret-key, or entered interactively with '
'--password-prompt or --secret-key-prompt')
if parsed_args.password and parsed_args.username:
if not parsed_args.url:
raise exceptions.CommandError(
'URL must be provided via --url')
if parsed_args.url.startswith('s3'):
if not parsed_args.s3_endpoint:
raise exceptions.CommandError(
'S3 job binaries need an endpoint provided via '
'--s3-endpoint')
extra = {
'accesskey': parsed_args.username,
'secretkey': parsed_args.password,
'endpoint': parsed_args.s3_endpoint,
}
else:
extra = {
'user': parsed_args.username,
'password': parsed_args.password
}
else:
extra = None
data = client.job_binaries.create(
name=parsed_args.name, url=parsed_args.url,
description=parsed_args.description, extra=extra,
is_public=parsed_args.public,
is_protected=parsed_args.protected).to_dict()
data = utils.prepare_data(data, jb_v1.JOB_BINARY_FIELDS)
return self.dict2columns(data)
class ListJobBinaries(jb_v1.ListJobBinaries):
"""Lists job binaries"""
log = logging.getLogger(__name__ + ".ListJobBinaries")
class ShowJobBinary(jb_v1.ShowJobBinary):
"""Display job binary details"""
log = logging.getLogger(__name__ + ".ShowJobBinary")
class DeleteJobBinary(jb_v1.DeleteJobBinary):
"""Deletes job binary"""
log = logging.getLogger(__name__ + ".DeleteJobBinary")
class UpdateJobBinary(jb_v1.UpdateJobBinary):
"""Updates job binary"""
log = logging.getLogger(__name__ + ".UpdateJobBinary")
class DownloadJobBinary(jb_v1.DownloadJobBinary):
"""Downloads job binary"""
log = logging.getLogger(__name__ + ".DownloadJobBinary")

View File

@ -0,0 +1,48 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from saharaclient.osc.v1 import job_templates as jt_v1
class CreateJobTemplate(jt_v1.CreateJobTemplate):
"""Creates job template"""
log = logging.getLogger(__name__ + ".CreateJobTemplate")
class ListJobTemplates(jt_v1.ListJobTemplates):
"""Lists job templates"""
log = logging.getLogger(__name__ + ".ListJobTemplates")
class ShowJobTemplate(jt_v1.ShowJobTemplate):
"""Display job template details"""
log = logging.getLogger(__name__ + ".ShowJobTemplate")
class DeleteJobTemplate(jt_v1.DeleteJobTemplate):
"""Deletes job template"""
log = logging.getLogger(__name__ + ".DeleteJobTemplate")
class UpdateJobTemplate(jt_v1.UpdateJobTemplate):
"""Updates job template"""
log = logging.getLogger(__name__ + ".UpdateJobTemplate")

View File

@ -0,0 +1,54 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import path
import sys
from oslo_log import log as logging
from oslo_serialization import jsonutils
from saharaclient.osc.v1 import job_types as jt_v1
class ListJobTypes(jt_v1.ListJobTypes):
"""Lists job types supported by plugins"""
log = logging.getLogger(__name__ + ".ListJobTypes")
class GetJobTypeConfigs(jt_v1.GetJobTypeConfigs):
"""Get job type configs"""
log = logging.getLogger(__name__ + ".GetJobTypeConfigs")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
if not parsed_args.file:
parsed_args.file = parsed_args.job_type
data = client.job_templates.get_configs(parsed_args.job_type).to_dict()
if path.exists(parsed_args.file):
self.log.error('File "%s" already exists. Choose another one with '
'--file argument.' % parsed_args.file)
else:
with open(parsed_args.file, 'w') as f:
jsonutils.dump(data, f, indent=4)
sys.stdout.write(
'"%(type)s" job configs were saved in "%(file)s"'
'file' % {'type': parsed_args.job_type,
'file': parsed_args.file})

142
saharaclient/osc/v2/jobs.py Normal file
View File

@ -0,0 +1,142 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from osc_lib import utils as osc_utils
from oslo_log import log as logging
from saharaclient.osc import utils
from saharaclient.osc.v1 import jobs as jobs_v1
def _format_job_output(app, data):
data['status'] = data['info']['status']
del data['info']
class ExecuteJob(jobs_v1.ExecuteJob):
"""Executes job"""
log = logging.getLogger(__name__ + ".ExecuteJob")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
_format_job_output(self.app, data)
data = utils.prepare_data(data, jobs_v1.JOB_FIELDS)
return self.dict2columns(data)
class ListJobs(jobs_v1.ListJobs):
"""Lists jobs"""
log = logging.getLogger(__name__ + ".ListJobs")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = client.jobs.list()
for job in data:
job.status = job.info['status']
if parsed_args.status:
data = [job for job in data
if job.info['status'] == parsed_args.status.replace(
'-', '').upper()]
if parsed_args.long:
columns = ('id', 'cluster id', 'job template id', 'status',
'start time', 'end time')
column_headers = utils.prepare_column_headers(columns)
else:
columns = ('id', 'cluster id', 'job template id', 'status')
column_headers = utils.prepare_column_headers(columns)
return (
column_headers,
(osc_utils.get_item_properties(
s,
columns
) for s in data)
)
class ShowJob(jobs_v1.ShowJob):
"""Display job details"""
log = logging.getLogger(__name__ + ".ShowJob")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = client.jobs.get(parsed_args.job).to_dict()
_format_job_output(self.app, data)
data = utils.prepare_data(data, jobs_v1.JOB_FIELDS)
return self.dict2columns(data)
class DeleteJob(jobs_v1.DeleteJob):
"""Deletes job"""
log = logging.getLogger(__name__ + ".DeleteJob")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
for job_id in parsed_args.job:
client.jobs.delete(job_id)
sys.stdout.write(
'Job "{job}" deletion has been started.\n'.format(job=job_id))
if parsed_args.wait:
for job_id in parsed_args.job:
wait_for_delete = utils.wait_for_delete(client.jobs, job_id)
if not wait_for_delete:
self.log.error(
'Error occurred during job deleting: %s' %
job_id)
else:
sys.stdout.write(
'Job "{job}" has been removed successfully.\n'.format(
job=job_id))
class UpdateJob(jobs_v1.UpdateJob):
"""Updates job"""
log = logging.getLogger(__name__ + ".UpdateJob")
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
data = self._take_action(client, parsed_args)
_format_job_output(self.app, data)
data = utils.prepare_data(data, jobs_v1.JOB_FIELDS)
return self.dict2columns(data)

View File

@ -0,0 +1,40 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from saharaclient.osc.v1 import plugins as p_v1
class ListPlugins(p_v1.ListPlugins):
"""Lists plugins"""
log = logging.getLogger(__name__ + ".ListPlugins")
class ShowPlugin(p_v1.ShowPlugin):
"""Display plugin details"""
log = logging.getLogger(__name__ + ".ShowPlugin")
class GetPluginConfigs(p_v1.GetPluginConfigs):
"""Get plugin configs"""
log = logging.getLogger(__name__ + ".GetPluginConfigs")
class UpdatePlugin(p_v1.UpdatePlugin):
log = logging.getLogger(__name__ + ".UpdatePlugin")

View File

@ -53,6 +53,7 @@ class TestClusterTemplates(fakes.TestDataProcessing):
self.app.client_manager.data_processing.node_group_templates)
self.ct_mock.reset_mock()
self.ngt_mock.reset_mock()
self.app.api_version['data_processing'] = '1'
class TestCreateClusterTemplate(TestClusterTemplates):
@ -63,6 +64,7 @@ class TestCreateClusterTemplate(TestClusterTemplates):
None, CT_INFO)
self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate(
None, CT_INFO['node_groups'][0])
self.app.api_version['data_processing'] = '1.1'
# Command to test
self.cmd = osc_ct.CreateClusterTemplate(self.app, None)

View File

@ -91,6 +91,7 @@ class TestClusters(fakes.TestDataProcessing):
self.ngt_mock.reset_mock()
self.ct_mock.reset_mock()
self.img_mock.reset_mock()
self.app.api_version['data_processing'] = '1'
class TestCreateCluster(TestClusters):

View File

@ -33,6 +33,7 @@ class TestDataSources(fakes.TestDataProcessing):
self.ds_mock = (
self.app.client_manager.data_processing.data_sources)
self.ds_mock.reset_mock()
self.app.api_version['data_processing'] = '1'
class TestCreateDataSource(TestDataSources):

View File

@ -32,6 +32,7 @@ class TestImages(fakes.TestDataProcessing):
self.image_mock = (
self.app.client_manager.data_processing.images)
self.image_mock.reset_mock()
self.app.api_version['data_processing'] = '1'
class TestListImages(TestImages):

View File

@ -37,6 +37,7 @@ class TestJobBinaries(fakes.TestDataProcessing):
super(TestJobBinaries, self).setUp()
self.jb_mock = self.app.client_manager.data_processing.job_binaries
self.jb_mock.reset_mock()
self.app.api_version['data_processing'] = '1'
class TestCreateJobBinary(TestJobBinaries):

View File

@ -48,6 +48,7 @@ class TestJobTemplates(fakes.TestDataProcessing):
super(TestJobTemplates, self).setUp()
self.job_mock = self.app.client_manager.data_processing.jobs
self.job_mock.reset_mock()
self.app.api_version['data_processing'] = '1'
class TestCreateJobTemplate(TestJobTemplates):

View File

@ -47,6 +47,7 @@ class TestJobTypes(fakes.TestDataProcessing):
self.jt_mock = self.app.client_manager.data_processing.job_types
self.jt_mock.reset_mock()
self.job_mock.reset_mock()
self.app.api_version['data_processing'] = '1'
class TestListJobTemplates(TestJobTypes):

View File

@ -57,6 +57,7 @@ class TestJobs(fakes.TestDataProcessing):
super(TestJobs, self).setUp()
self.je_mock = self.app.client_manager.data_processing.job_executions
self.je_mock.reset_mock()
self.app.api_version['data_processing'] = '1'
class TestExecuteJob(TestJobs):

View File

@ -38,6 +38,7 @@ class TestPlugins(fakes.TestDataProcessing):
super(TestPlugins, self).setUp()
self.plugins_mock = self.app.client_manager.data_processing.plugins
self.plugins_mock.reset_mock()
self.app.api_version['data_processing'] = '1'
class TestListPlugins(TestPlugins):

View File

@ -0,0 +1,336 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from osc_lib.tests import utils as osc_utils
from saharaclient.api import cluster_templates as api_ct
from saharaclient.api import node_group_templates as api_ngt
from saharaclient.osc.v2 import cluster_templates as osc_ct
from saharaclient.tests.unit.osc.v1 import test_cluster_templates as tct_v1
CT_INFO = {
"description": "Cluster template for tests",
"use_autoconfig": True,
"is_default": False,
"node_groups": [
{
"count": 2,
"id": "d29631fc-0fad-434b-80aa-7a3e9526f57c",
"name": "fakeng",
"plugin_name": 'fake',
"plugin_version": '0.1'
}
],
"plugin_version": "0.1",
"is_public": False,
"plugin_name": "fake",
"id": "0647061f-ab98-4c89-84e0-30738ea55750",
"anti_affinity": [],
"name": "template",
"is_protected": False,
"domain_name": 'domain.org.'
}
class TestClusterTemplates(tct_v1.TestClusterTemplates):
def setUp(self):
super(TestClusterTemplates, self).setUp()
self.app.api_version['data_processing'] = '2'
self.ct_mock = (
self.app.client_manager.data_processing.cluster_templates)
self.ngt_mock = (
self.app.client_manager.data_processing.node_group_templates)
self.ct_mock.reset_mock()
self.ngt_mock.reset_mock()
class TestCreateClusterTemplate(TestClusterTemplates):
# TODO(apavlov): check for creation with --json
def setUp(self):
super(TestCreateClusterTemplate, self).setUp()
self.ct_mock.create.return_value = api_ct.ClusterTemplate(
None, CT_INFO)
self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate(
None, CT_INFO['node_groups'][0])
# Command to test
self.cmd = osc_ct.CreateClusterTemplate(self.app, None)
def test_ct_create_minimum_options(self):
arglist = ['--name', 'template', '--node-groups', 'fakeng:2']
verifylist = [('name', 'template'),
('node_groups', ['fakeng:2'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.ct_mock.create.assert_called_once_with(
description=None, plugin_version='0.1', is_protected=False,
is_public=False, name='template', node_groups=[
{'count': 2, 'name': 'fakeng',
'node_group_template_id':
'd29631fc-0fad-434b-80aa-7a3e9526f57c'}],
plugin_name='fake', use_autoconfig=False, shares=None,
cluster_configs=None, domain_name=None)
def test_ct_create_all_options(self):
arglist = ['--name', 'template', '--node-groups', 'fakeng:2',
'--anti-affinity', 'datanode',
'--description', 'descr',
'--autoconfig', '--public', '--protected',
'--domain-name', 'domain.org.']
verifylist = [('name', 'template'),
('node_groups', ['fakeng:2']),
('description', 'descr'), ('autoconfig', True),
('public', True), ('protected', True),
('domain_name', 'domain.org.')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.ct_mock.create.assert_called_once_with(
description='descr', plugin_version='0.1', is_protected=True,
is_public=True, name='template', node_groups=[
{'count': 2, 'name': 'fakeng',
'node_group_template_id':
'd29631fc-0fad-434b-80aa-7a3e9526f57c'}],
plugin_name='fake', use_autoconfig=True, shares=None,
cluster_configs=None, domain_name='domain.org.')
# Check that columns are correct
expected_columns = ('Anti affinity', 'Description',
'Domain name', 'Id', 'Is default',
'Is protected', 'Is public', 'Name', 'Node groups',
'Plugin name', 'Plugin version', 'Use autoconfig')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('', 'Cluster template for tests', 'domain.org.',
'0647061f-ab98-4c89-84e0-30738ea55750', False, False,
False, 'template', 'fakeng:2', 'fake', '0.1', True)
self.assertEqual(expected_data, data)
class TestListClusterTemplates(TestClusterTemplates):
def setUp(self):
super(TestListClusterTemplates, self).setUp()
self.ct_mock.list.return_value = [api_ct.ClusterTemplate(
None, CT_INFO)]
# Command to test
self.cmd = osc_ct.ListClusterTemplates(self.app, None)
def test_ct_list_no_options(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('template', '0647061f-ab98-4c89-84e0-30738ea55750',
'fake', '0.1')]
self.assertEqual(expected_data, list(data))
def test_ct_list_long(self):
arglist = ['--long']
verifylist = [('long', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version',
'Node groups', 'Description']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('template', '0647061f-ab98-4c89-84e0-30738ea55750',
'fake', '0.1', 'fakeng:2',
'Cluster template for tests')]
self.assertEqual(expected_data, list(data))
def test_ct_list_extra_search_opts(self):
arglist = ['--plugin', 'fake', '--plugin-version', '0.1', '--name',
'templ']
verifylist = [('plugin', 'fake'), ('plugin_version', '0.1'),
('name', 'templ')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('template', '0647061f-ab98-4c89-84e0-30738ea55750',
'fake', '0.1')]
self.assertEqual(expected_data, list(data))
class TestShowClusterTemplate(TestClusterTemplates):
def setUp(self):
super(TestShowClusterTemplate, self).setUp()
self.ct_mock.find_unique.return_value = api_ct.ClusterTemplate(
None, CT_INFO)
# Command to test
self.cmd = osc_ct.ShowClusterTemplate(self.app, None)
def test_ct_show(self):
arglist = ['template']
verifylist = [('cluster_template', 'template')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.ct_mock.find_unique.assert_called_once_with(name='template')
# Check that columns are correct
expected_columns = ('Anti affinity', 'Description',
'Domain name', 'Id', 'Is default',
'Is protected', 'Is public', 'Name', 'Node groups',
'Plugin name', 'Plugin version', 'Use autoconfig')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = (
'', 'Cluster template for tests', 'domain.org.',
'0647061f-ab98-4c89-84e0-30738ea55750', False, False, False,
'template', 'fakeng:2', 'fake', '0.1', True)
self.assertEqual(expected_data, data)
class TestDeleteClusterTemplate(TestClusterTemplates):
def setUp(self):
super(TestDeleteClusterTemplate, self).setUp()
self.ct_mock.find_unique.return_value = api_ct.ClusterTemplate(
None, CT_INFO)
# Command to test
self.cmd = osc_ct.DeleteClusterTemplate(self.app, None)
def test_ct_delete(self):
arglist = ['template']
verifylist = [('cluster_template', ['template'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.ct_mock.delete.assert_called_once_with(
'0647061f-ab98-4c89-84e0-30738ea55750')
class TestUpdateClusterTemplate(TestClusterTemplates):
# TODO(apavlov): check for update with --json
def setUp(self):
super(TestUpdateClusterTemplate, self).setUp()
self.ct_mock.update.return_value = api_ct.ClusterTemplate(
None, CT_INFO)
self.ct_mock.find_unique.return_value = api_ct.ClusterTemplate(
None, CT_INFO)
self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate(
None, CT_INFO['node_groups'][0])
# Command to test
self.cmd = osc_ct.UpdateClusterTemplate(self.app, None)
def test_ct_update_no_options(self):
arglist = []
verifylist = []
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_ct_update_nothing_updated(self):
arglist = ['template']
verifylist = [('cluster_template', 'template')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.ct_mock.update.assert_called_once_with(
'0647061f-ab98-4c89-84e0-30738ea55750')
def test_ct_update_all_options(self):
arglist = ['template', '--name', 'template', '--node-groups',
'fakeng:2', '--anti-affinity', 'datanode',
'--description', 'descr', '--autoconfig-enable',
'--public', '--protected', '--domain-name', 'domain.org.']
verifylist = [('cluster_template', 'template'), ('name', 'template'),
('node_groups', ['fakeng:2']),
('description', 'descr'), ('use_autoconfig', True),
('is_public', True), ('is_protected', True),
('domain_name', 'domain.org.')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.ct_mock.update.assert_called_once_with(
'0647061f-ab98-4c89-84e0-30738ea55750', description='descr',
plugin_version='0.1', is_protected=True, is_public=True,
name='template',
node_groups=[
{'count': 2, 'name': 'fakeng',
'node_group_template_id':
'd29631fc-0fad-434b-80aa-7a3e9526f57c'}],
plugin_name='fake', use_autoconfig=True, domain_name='domain.org.')
# Check that columns are correct
expected_columns = ('Anti affinity', 'Description',
'Domain name', 'Id', 'Is default',
'Is protected', 'Is public', 'Name', 'Node groups',
'Plugin name', 'Plugin version', 'Use autoconfig')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('', 'Cluster template for tests', 'domain.org.',
'0647061f-ab98-4c89-84e0-30738ea55750', False, False,
False, 'template', 'fakeng:2', 'fake', '0.1', True)
self.assertEqual(expected_data, data)
def test_ct_update_private_unprotected(self):
arglist = ['template', '--private', '--unprotected']
verifylist = [('cluster_template', 'template'),
('is_protected', False), ('is_public', False)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.ct_mock.update.assert_called_once_with(
'0647061f-ab98-4c89-84e0-30738ea55750', is_protected=False,
is_public=False)

View File

@ -0,0 +1,549 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from osc_lib.tests import utils as osc_utils
from saharaclient.api import cluster_templates as api_ct
from saharaclient.api import clusters as api_cl
from saharaclient.api import images as api_img
from saharaclient.api import node_group_templates as api_ngt
from saharaclient.osc.v2 import clusters as osc_cl
from saharaclient.tests.unit.osc.v1 import test_clusters as tc_v1
CLUSTER_INFO = {
"description": "Cluster template for tests",
"use_autoconfig": True,
"is_default": False,
"node_groups": [
{
"count": 2,
"id": "ng_id",
"name": "fakeng",
"plugin_name": 'fake',
"plugin_version": '0.1',
"node_group_template_id": 'ngt_id'
}
],
"plugin_version": "0.1",
"is_public": False,
"plugin_name": "fake",
"id": "cluster_id",
"anti_affinity": [],
"name": "fake",
"is_protected": False,
"cluster_template_id": "ct_id",
"neutron_management_network": "net_id",
"user_keypair_id": "test",
"status": 'Active',
"default_image_id": "img_id",
'verification': {
'status': 'GREEN',
'id': 'ver_id',
'cluster_id': 'cluster_id',
'checks': [
{
'status': 'GREEN',
'name': 'Some check'
}
]
}
}
CT_INFO = {
"plugin_name": "fake",
"plugin_version": "0.1",
"name": '"template',
"id": "ct_id"
}
NGT_INFO = {
'id': 'ngt_id',
'name': 'fakeng'
}
class TestClusters(tc_v1.TestClusters):
def setUp(self):
super(TestClusters, self).setUp()
self.app.api_version['data_processing'] = '2'
self.cl_mock = (
self.app.client_manager.data_processing.clusters)
self.ngt_mock = (
self.app.client_manager.data_processing.node_group_templates)
self.ct_mock = (
self.app.client_manager.data_processing.cluster_templates)
self.img_mock = (
self.app.client_manager.data_processing.images)
self.cl_mock.reset_mock()
self.ngt_mock.reset_mock()
self.ct_mock.reset_mock()
self.img_mock.reset_mock()
class TestCreateCluster(TestClusters):
# TODO(apavlov): check for creation with --json
def setUp(self):
super(TestCreateCluster, self).setUp()
self.cl_mock.create.return_value = api_cl.Cluster(
None, CLUSTER_INFO)
self.cl_mock.find_unique.return_value = api_cl.Cluster(
None, CLUSTER_INFO)
self.ct_mock.find_unique.return_value = api_ct.ClusterTemplate(
None, CT_INFO)
self.img_mock.find_unique.return_value = api_img.Image(
None, {'id': 'img_id'})
self.net_mock = self.app.client_manager.network
self.net_mock.find_network.return_value = mock.Mock(id='net_id')
self.net_mock.reset_mock()
# Command to test
self.cmd = osc_cl.CreateCluster(self.app, None)
def test_cluster_create_minimum_options(self):
arglist = ['--name', 'fake', '--cluster-template', 'template',
'--image', 'ubuntu']
verifylist = [('name', 'fake'), ('cluster_template', 'template'),
('image', 'ubuntu')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.create.assert_called_once_with(
cluster_template_id='ct_id', count=None, default_image_id='img_id',
description=None, plugin_version='0.1', is_protected=False,
is_public=False, is_transient=False, name='fake', net_id=None,
plugin_name='fake', user_keypair_id=None)
def test_cluster_create_all_options(self):
arglist = ['--name', 'fake', '--cluster-template', 'template',
'--image', 'ubuntu', '--user-keypair', 'test',
'--neutron-network', 'net', '--description', 'descr',
'--transient', '--public', '--protected']
verifylist = [('name', 'fake'), ('cluster_template', 'template'),
('image', 'ubuntu'), ('user_keypair', 'test'),
('neutron_network', 'net'), ('description', 'descr'),
('transient', True), ('public', True),
('protected', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.create.assert_called_once_with(
cluster_template_id='ct_id', count=None, default_image_id='img_id',
description='descr', plugin_version='0.1', is_protected=True,
is_public=True, is_transient=True, name='fake', net_id='net_id',
plugin_name='fake', user_keypair_id='test')
# Check that columns are correct
expected_columns = ('Anti affinity', 'Cluster template id',
'Description', 'Id', 'Image',
'Is protected', 'Is public', 'Name',
'Neutron management network', 'Node groups',
'Plugin name', 'Plugin version', 'Status',
'Use autoconfig', 'User keypair id')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('', 'ct_id', 'Cluster template for tests',
'cluster_id', 'img_id', False, False, 'fake',
'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True,
'test')
self.assertEqual(expected_data, data)
def test_cluster_create_with_count(self):
clusters_mock = mock.Mock()
clusters_mock.to_dict.return_value = {
'clusters': [{'cluster': {'id': 'cluster1_id'}},
{'cluster': {'id': 'cluster2_id'}}]
}
self.cl_mock.create.return_value = clusters_mock
arglist = ['--name', 'fake', '--cluster-template', 'template',
'--image', 'ubuntu', '--count', '2']
verifylist = [('name', 'fake'), ('cluster_template', 'template'),
('image', 'ubuntu'), ('count', 2)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.create.assert_called_once_with(
cluster_template_id='ct_id', count=2, default_image_id='img_id',
description=None, plugin_version='0.1', is_protected=False,
is_public=False, is_transient=False, name='fake', net_id=None,
plugin_name='fake', user_keypair_id=None)
# Check that columns are correct
expected_columns = ('fake',)
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('cluster_id',)
self.assertEqual(expected_data, data)
class TestListClusters(TestClusters):
def setUp(self):
super(TestListClusters, self).setUp()
self.cl_mock.list.return_value = [api_cl.Cluster(
None, CLUSTER_INFO)]
# Command to test
self.cmd = osc_cl.ListClusters(self.app, None)
def test_clusters_list_no_options(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version',
'Status']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('fake', 'cluster_id', 'fake', '0.1', 'Active')]
self.assertEqual(expected_data, list(data))
def test_clusters_list_long(self):
arglist = ['--long']
verifylist = [('long', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version',
'Status', 'Description', 'Image']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('fake', 'cluster_id', 'fake', '0.1', 'Active',
'Cluster template for tests', 'img_id')]
self.assertEqual(expected_data, list(data))
def test_clusters_list_extra_search_opts(self):
arglist = ['--plugin', 'fake', '--plugin-version', '0.1', '--name',
'fake']
verifylist = [('plugin', 'fake'), ('plugin_version', '0.1'),
('name', 'fake')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version',
'Status']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('fake', 'cluster_id', 'fake', '0.1', 'Active')]
self.assertEqual(expected_data, list(data))
class TestShowCluster(TestClusters):
def setUp(self):
super(TestShowCluster, self).setUp()
self.cl_mock.find_unique.return_value = api_cl.Cluster(
None, CLUSTER_INFO)
# Command to test
self.cmd = osc_cl.ShowCluster(self.app, None)
def test_cluster_show(self):
arglist = ['fake']
verifylist = [('cluster', 'fake')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.find_unique.assert_called_once_with(name='fake')
# Check that columns are correct
expected_columns = ('Anti affinity', 'Cluster template id',
'Description', 'Id', 'Image',
'Is protected', 'Is public', 'Name',
'Neutron management network', 'Node groups',
'Plugin name', 'Plugin version', 'Status',
'Use autoconfig', 'User keypair id')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('', 'ct_id', 'Cluster template for tests',
'cluster_id', 'img_id', False, False, 'fake',
'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True,
'test')
self.assertEqual(expected_data, data)
def test_cluster_show_verification(self):
arglist = ['fake', '--verification']
verifylist = [('cluster', 'fake'), ('verification', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.find_unique.assert_called_once_with(name='fake')
# Check that columns are correct
expected_columns = ('Anti affinity', 'Cluster template id',
'Description', 'Health check (some check)', 'Id',
'Image', 'Is protected', 'Is public', 'Name',
'Neutron management network', 'Node groups',
'Plugin name', 'Plugin version', 'Status',
'Use autoconfig', 'User keypair id',
'Verification status')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('', 'ct_id', 'Cluster template for tests', 'GREEN',
'cluster_id', 'img_id', False, False, 'fake',
'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True,
'test', 'GREEN')
self.assertEqual(expected_data, data)
class TestDeleteCluster(TestClusters):
def setUp(self):
super(TestDeleteCluster, self).setUp()
self.cl_mock.find_unique.return_value = api_cl.Cluster(
None, CLUSTER_INFO)
# Command to test
self.cmd = osc_cl.DeleteCluster(self.app, None)
def test_cluster_delete(self):
arglist = ['fake']
verifylist = [('cluster', ['fake'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.delete.assert_called_once_with('cluster_id')
class TestUpdateCluster(TestClusters):
def setUp(self):
super(TestUpdateCluster, self).setUp()
self.cl_mock.update.return_value = mock.Mock(
cluster=CLUSTER_INFO.copy())
self.cl_mock.find_unique.return_value = api_cl.Cluster(
None, CLUSTER_INFO)
# Command to test
self.cmd = osc_cl.UpdateCluster(self.app, None)
def test_cluster_update_no_options(self):
arglist = []
verifylist = []
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_cluster_update_nothing_updated(self):
arglist = ['fake']
verifylist = [('cluster', 'fake')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.update.assert_called_once_with('cluster_id')
def test_cluster_update_all_options(self):
arglist = ['fake', '--name', 'fake', '--description', 'descr',
'--public', '--protected']
verifylist = [('cluster', 'fake'), ('name', 'fake'),
('description', 'descr'), ('is_public', True),
('is_protected', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.update.assert_called_once_with(
'cluster_id', description='descr', is_protected=True,
is_public=True, name='fake')
# Check that columns are correct
expected_columns = ('Anti affinity', 'Cluster template id',
'Description', 'Id', 'Image',
'Is protected', 'Is public', 'Name',
'Neutron management network', 'Node groups',
'Plugin name', 'Plugin version', 'Status',
'Use autoconfig', 'User keypair id')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('', 'ct_id', 'Cluster template for tests',
'cluster_id', 'img_id', False, False, 'fake',
'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True,
'test')
self.assertEqual(expected_data, data)
def test_cluster_update_private_unprotected(self):
arglist = ['fake', '--private', '--unprotected']
verifylist = [('cluster', 'fake'), ('is_public', False),
('is_protected', False)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.update.assert_called_once_with(
'cluster_id', is_protected=False, is_public=False)
class TestScaleCluster(TestClusters):
def setUp(self):
super(TestScaleCluster, self).setUp()
self.cl_mock.scale.return_value = mock.Mock(
cluster=CLUSTER_INFO.copy())
self.cl_mock.find_unique.return_value = api_cl.Cluster(
None, CLUSTER_INFO)
# Command to test
self.cmd = osc_cl.ScaleCluster(self.app, None)
def test_cluster_scale_no_options(self):
arglist = []
verifylist = []
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_cluster_scale_resize(self):
self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate(
None, NGT_INFO)
arglist = ['fake', '--instances', 'fakeng:1']
verifylist = [('cluster', 'fake'),
('instances', ['fakeng:1'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.scale.assert_called_once_with(
'cluster_id',
{'resize_node_groups': [
{'count': 1,
'name': 'fakeng'}]}
)
# Check that columns are correct
expected_columns = ('Anti affinity', 'Cluster template id',
'Description', 'Id', 'Image',
'Is protected', 'Is public', 'Name',
'Neutron management network', 'Node groups',
'Plugin name', 'Plugin version', 'Status',
'Use autoconfig', 'User keypair id')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('', 'ct_id', 'Cluster template for tests',
'cluster_id', 'img_id', False, False, 'fake',
'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True,
'test')
self.assertEqual(expected_data, data)
def test_cluster_scale_add_ng(self):
new_ng = {'name': 'new', 'id': 'new_id'}
self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate(
None, new_ng)
arglist = ['fake', '--instances', 'new:1']
verifylist = [('cluster', 'fake'), ('instances', ['new:1'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.scale.assert_called_once_with(
'cluster_id',
{'add_node_groups': [
{'count': 1,
'node_group_template_id': 'new_id',
'name': 'new'}
]})
class TestVerificationUpdateCluster(TestClusters):
def setUp(self):
super(TestVerificationUpdateCluster, self).setUp()
self.cl_mock.find_unique.return_value = api_cl.Cluster(
None, CLUSTER_INFO)
self.cl_mock.verification_update.return_value = api_cl.Cluster(
None, CLUSTER_INFO)
# Command to test
self.cmd = osc_cl.VerificationUpdateCluster(self.app, None)
def test_verification_show(self):
arglist = ['fake', '--show']
verifylist = [('cluster', 'fake'), ('show', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.find_unique.assert_called_once_with(name='fake')
# Check that columns are correct
expected_columns = ('Health check (some check)', 'Verification status')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('GREEN', 'GREEN')
self.assertEqual(expected_data, data)
def test_verification_start(self):
arglist = ['fake', '--start']
verifylist = [('cluster', 'fake'), ('status', 'START')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.cl_mock.verification_update.assert_called_once_with(
'cluster_id', 'START')

View File

@ -0,0 +1,309 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from osc_lib.tests import utils as osc_utils
from saharaclient.api import data_sources as api_ds
from saharaclient.osc.v1 import data_sources as osc_ds
from saharaclient.tests.unit.osc.v1 import test_data_sources as tds_v1
DS_INFO = {'id': 'id', 'name': 'source', 'type': 'swift',
'url': 'swift://container.sahara/object',
'description': 'Data Source for tests',
'is_public': True, 'is_protected': True}
class TestDataSources(tds_v1.TestDataSources):
def setUp(self):
super(TestDataSources, self).setUp()
self.app.api_version['data_processing'] = '2'
self.ds_mock = (
self.app.client_manager.data_processing.data_sources)
self.ds_mock.reset_mock()
class TestCreateDataSource(TestDataSources):
def setUp(self):
super(TestCreateDataSource, self).setUp()
self.ds_mock.create.return_value = api_ds.DataSources(
None, DS_INFO)
# Command to test
self.cmd = osc_ds.CreateDataSource(self.app, None)
def test_data_sources_create_no_options(self):
arglist = []
verifylist = []
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_data_sources_create_required_options(self):
arglist = ['source', '--type', 'swift', '--url',
'swift://container.sahara/object']
verifylist = [('name', 'source'), ('type', 'swift'),
('url', 'swift://container.sahara/object')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that data source was created with correct arguments
called_args = {'credential_pass': None, 'credential_user': None,
'data_source_type': 'swift', 'name': 'source',
'description': '',
'url': 'swift://container.sahara/object',
'is_public': False, 'is_protected': False,
's3_credentials': None}
self.ds_mock.create.assert_called_once_with(**called_args)
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Is protected', 'Is public',
'Name', 'Type', 'Url')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('Data Source for tests', 'id', True, True, 'source',
'swift', 'swift://container.sahara/object')
self.assertEqual(expected_data, data)
def test_data_sources_create_all_options(self):
arglist = ['source', '--type', 'swift', '--url',
'swift://container.sahara/object', '--username', 'user',
'--password', 'pass', '--description',
'Data Source for tests', '--public', '--protected']
verifylist = [('name', 'source'), ('type', 'swift'),
('url', 'swift://container.sahara/object'),
('username', 'user'), ('password', 'pass'),
('description', 'Data Source for tests'),
('public', True), ('protected', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that data source was created with correct arguments
called_args = {'credential_pass': 'pass', 'credential_user': 'user',
'data_source_type': 'swift', 'name': 'source',
'description': 'Data Source for tests',
'url': 'swift://container.sahara/object',
'is_protected': True, 'is_public': True,
's3_credentials': None}
self.ds_mock.create.assert_called_once_with(**called_args)
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Is protected', 'Is public',
'Name', 'Type', 'Url')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('Data Source for tests', 'id', True, True, 'source',
'swift', 'swift://container.sahara/object')
self.assertEqual(expected_data, data)
class TestListDataSources(TestDataSources):
def setUp(self):
super(TestListDataSources, self).setUp()
self.ds_mock.list.return_value = [api_ds.DataSources(
None, DS_INFO)]
# Command to test
self.cmd = osc_ds.ListDataSources(self.app, None)
def test_data_sources_list_no_options(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Type']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('source', 'id', 'swift')]
self.assertEqual(expected_data, list(data))
def test_data_sources_list_long(self):
arglist = ['--long']
verifylist = [('long', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Type', 'Url', 'Description',
'Is public', 'Is protected']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('source', 'id', 'swift',
'swift://container.sahara/object',
'Data Source for tests', True, True)]
self.assertEqual(expected_data, list(data))
class TestShowDataSource(TestDataSources):
def setUp(self):
super(TestShowDataSource, self).setUp()
self.ds_mock.find_unique.return_value = api_ds.DataSources(
None, DS_INFO)
# Command to test
self.cmd = osc_ds.ShowDataSource(self.app, None)
def test_data_sources_show(self):
arglist = ['source']
verifylist = [('data_source', 'source')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments was passed
self.ds_mock.find_unique.assert_called_once_with(name='source')
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Is protected', 'Is public',
'Name', 'Type', 'Url')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ['Data Source for tests', 'id', True, True, 'source',
'swift', 'swift://container.sahara/object']
self.assertEqual(expected_data, list(data))
class TestDeleteDataSource(TestDataSources):
def setUp(self):
super(TestDeleteDataSource, self).setUp()
self.ds_mock.find_unique.return_value = api_ds.DataSources(
None, DS_INFO)
# Command to test
self.cmd = osc_ds.DeleteDataSource(self.app, None)
def test_data_sources_delete(self):
arglist = ['source']
verifylist = [('data_source', ['source'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments was passed
self.ds_mock.delete.assert_called_once_with('id')
class TestUpdateDataSource(TestDataSources):
def setUp(self):
super(TestUpdateDataSource, self).setUp()
self.ds_mock.find_unique.return_value = api_ds.DataSources(
None, DS_INFO)
self.ds_mock.update.return_value = mock.Mock(
data_source=DS_INFO)
# Command to test
self.cmd = osc_ds.UpdateDataSource(self.app, None)
def test_data_sources_update_no_options(self):
arglist = []
verifylist = []
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_data_sources_update_nothing_updated(self):
arglist = ['source']
verifylist = [('data_source', 'source')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.ds_mock.update.assert_called_once_with('id', {})
def test_data_sources_update_required_options(self):
arglist = ['source']
verifylist = [('data_source', 'source')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that data source was created with correct arguments
self.ds_mock.update.assert_called_once_with('id', {})
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Is protected', 'Is public',
'Name', 'Type', 'Url')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('Data Source for tests', 'id', True, True, 'source',
'swift', 'swift://container.sahara/object')
self.assertEqual(expected_data, data)
def test_data_sources_update_all_options(self):
arglist = ['source', '--name', 'source', '--type', 'swift', '--url',
'swift://container.sahara/object', '--username', 'user',
'--password', 'pass', '--description',
'Data Source for tests', '--public', '--protected']
verifylist = [('data_source', 'source'), ('name', 'source'),
('type', 'swift'),
('url', 'swift://container.sahara/object'),
('username', 'user'), ('password', 'pass'),
('description', 'Data Source for tests'),
('is_public', True), ('is_protected', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that data source was created with correct arguments
self.ds_mock.update.assert_called_once_with(
'id', {'name': 'source', 'url': 'swift://container.sahara/object',
'is_protected': True,
'credentials': {'password': 'pass', 'user': 'user'},
'is_public': True, 'type': 'swift',
'description': 'Data Source for tests'})
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Is protected', 'Is public',
'Name', 'Type', 'Url')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('Data Source for tests', 'id', True, True, 'source',
'swift', 'swift://container.sahara/object')
self.assertEqual(expected_data, data)
def test_data_sources_update_private_unprotected(self):
arglist = ['source', '--private', '--unprotected']
verifylist = [('data_source', 'source'), ('is_public', False),
('is_protected', False)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that data source was created with correct arguments
self.ds_mock.update.assert_called_once_with(
'id', {'is_public': False, 'is_protected': False})

View File

@ -0,0 +1,367 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from osc_lib.tests import utils as osc_utils
from saharaclient.api import images as api_images
from saharaclient.osc.v1 import images as osc_images
from saharaclient.tests.unit.osc.v1 import test_images as images_v1
IMAGE_INFO = {'id': 'id', 'name': 'image', 'username': 'ubuntu',
'status': "Active", 'tags': ['fake', '0.1'],
'description': 'Image for tests'}
class TestImages(images_v1.TestImages):
def setUp(self):
super(TestImages, self).setUp()
self.app.api_version['data_processing'] = '2'
self.image_mock = (
self.app.client_manager.data_processing.images)
self.image_mock.reset_mock()
class TestListImages(TestImages):
def setUp(self):
super(TestListImages, self).setUp()
self.image_mock.list.return_value = [api_images.Image(
None, IMAGE_INFO)]
# Command to test
self.cmd = osc_images.ListImages(self.app, None)
def test_images_list_no_options(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Username', 'Tags']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('image', 'id', 'ubuntu', '0.1, fake')]
self.assertEqual(expected_data, list(data))
def test_images_list_long(self):
arglist = ['--long']
verifylist = [('long', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Username', 'Tags', 'Status',
'Description']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('image', 'id', 'ubuntu', '0.1, fake', 'Active',
'Image for tests')]
self.assertEqual(expected_data, list(data))
def test_images_list_successful_selection(self):
arglist = ['--name', 'image', '--tags', 'fake', '--username', 'ubuntu']
verifylist = [('name', 'image'), ('tags', ['fake']),
('username', 'ubuntu')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.image_mock.list.assert_called_once_with(
search_opts={'tags': ['fake']})
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Username', 'Tags']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('image', 'id', 'ubuntu', '0.1, fake')]
self.assertEqual(expected_data, list(data))
def test_images_list_with_name_nothing_selected(self):
arglist = ['--name', 'img']
verifylist = [('name', 'img')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Username', 'Tags']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = []
self.assertEqual(expected_data, list(data))
def test_images_list_with_username_nothing_selected(self):
arglist = ['--username', 'fedora']
verifylist = [('username', 'fedora')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Username', 'Tags']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = []
self.assertEqual(expected_data, list(data))
class TestShowImage(TestImages):
def setUp(self):
super(TestShowImage, self).setUp()
self.image_mock.find_unique.return_value = api_images.Image(
None, IMAGE_INFO)
# Command to test
self.cmd = osc_images.ShowImage(self.app, None)
def test_image_show_no_options(self):
arglist = []
verifylist = []
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_image_show(self):
arglist = ['image']
verifylist = [('image', 'image')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.image_mock.find_unique.assert_called_once_with(name='image')
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Name', 'Status', 'Tags',
'Username')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ['Image for tests', 'id', 'image', 'Active',
'0.1, fake', 'ubuntu']
self.assertEqual(expected_data, list(data))
class TestRegisterImage(TestImages):
def setUp(self):
super(TestRegisterImage, self).setUp()
self.image_mock.update_image.return_value = mock.Mock(
image=IMAGE_INFO.copy())
self.app.client_manager.image = mock.Mock()
self.image_client = self.app.client_manager.image.images
self.image_client.get.return_value = mock.Mock(id='id')
# Command to test
self.cmd = osc_images.RegisterImage(self.app, None)
def test_image_register_without_username(self):
arglist = ['id']
verifylist = [('image', 'id')]
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_image_register_required_options(self):
arglist = ['id', '--username', 'ubuntu']
verifylist = [('image', 'id'), ('username', 'ubuntu')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.image_mock.update_image.assert_called_once_with(
'id', desc=None, user_name='ubuntu')
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Name', 'Status', 'Tags',
'Username')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ['Image for tests', 'id', 'image', 'Active',
'0.1, fake', 'ubuntu']
self.assertEqual(expected_data, list(data))
def test_image_register_all_options(self):
arglist = ['id', '--username', 'ubuntu', '--description', 'descr']
verifylist = [('image', 'id'), ('username', 'ubuntu'),
('description', 'descr')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.image_mock.update_image.assert_called_once_with(
'id', desc='descr', user_name='ubuntu')
class TestUnregisterImage(TestImages):
def setUp(self):
super(TestUnregisterImage, self).setUp()
self.image_mock.find_unique.return_value = api_images.Image(
None, IMAGE_INFO)
# Command to test
self.cmd = osc_images.UnregisterImage(self.app, None)
def test_image_unregister_no_options(self):
arglist = []
verifylist = []
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_image_unregister(self):
arglist = ['image']
verifylist = [('image', ['image'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.image_mock.find_unique.assert_called_once_with(name='image')
self.image_mock.unregister_image.assert_called_once_with('id')
class TestSetImageTags(TestImages):
def setUp(self):
super(TestSetImageTags, self).setUp()
image_info = IMAGE_INFO.copy()
image_info['tags'] = []
self.image_mock.find_unique.return_value = api_images.Image(
None, image_info)
self.image_mock.update_tags.return_value = api_images.Image(
None, image_info)
# Command to test
self.cmd = osc_images.SetImageTags(self.app, None)
def test_image_tags_set_without_tags(self):
arglist = ['id']
verifylist = [('image', 'id')]
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_image_tags_set(self):
arglist = ['image', '--tags', 'fake', '0.1']
verifylist = [('image', 'image'), ('tags', ['fake', '0.1'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.image_mock.find_unique.assert_called_with(name='image')
self.image_mock.update_tags.assert_called_once_with(
'id', ['fake', '0.1'])
class TestAddImageTags(TestImages):
def setUp(self):
super(TestAddImageTags, self).setUp()
image_info = IMAGE_INFO.copy()
image_info['tags'] = []
self.image_mock.update_tags.return_value = api_images.Image(
None, image_info)
self.image_mock.find_unique.return_value = api_images.Image(
None, image_info)
# Command to test
self.cmd = osc_images.AddImageTags(self.app, None)
def test_image_tags_add_without_tags(self):
arglist = ['id']
verifylist = [('image', 'id')]
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_image_tags_add(self):
arglist = ['image', '--tags', 'fake']
verifylist = [('image', 'image'), ('tags', ['fake'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.image_mock.find_unique.assert_called_with(name='image')
self.image_mock.update_tags.assert_called_once_with(
'id', ['fake'])
class TestRemoveImageTags(TestImages):
def setUp(self):
super(TestRemoveImageTags, self).setUp()
self.image_mock.update_tags.return_value = api_images.Image(
None, IMAGE_INFO)
self.image_mock.find_unique.return_value = api_images.Image(
None, IMAGE_INFO)
# Command to test
self.cmd = osc_images.RemoveImageTags(self.app, None)
def test_image_tags_remove_both_options(self):
arglist = ['id', '--all', '--tags', 'fake']
verifylist = [('image', 'id'), ('all', True), ('tags', ['fake'])]
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_image_tags_remove(self):
arglist = ['image', '--tags', 'fake']
verifylist = [('image', 'image'), ('tags', ['fake'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.image_mock.find_unique.assert_called_with(name='image')
self.image_mock.update_tags.assert_called_once_with(
'id', ['0.1'])
def test_image_tags_remove_all(self):
arglist = ['image', '--all']
verifylist = [('image', 'image'), ('all', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.image_mock.find_unique.assert_called_with(name='image')
self.image_mock.update_tags.assert_called_once_with(
'id', [])

View File

@ -0,0 +1,326 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from osc_lib.tests import utils as osc_u
import testtools
from saharaclient.api import job_binaries as api_jb
from saharaclient.osc.v1 import job_binaries as osc_jb
from saharaclient.tests.unit.osc.v1 import test_job_binaries as tjb_v1
JOB_BINARY_INFO = {
"name": 'job-binary',
"description": 'descr',
"id": 'jb_id',
"is_protected": False,
"is_public": False,
"url": 'swift://cont/test'
}
class TestJobBinaries(tjb_v1.TestJobBinaries):
def setUp(self):
super(TestJobBinaries, self).setUp()
self.app.api_version['data_processing'] = '2'
self.jb_mock = self.app.client_manager.data_processing.job_binaries
self.jb_mock.reset_mock()
class TestCreateJobBinary(TestJobBinaries):
# TODO(apavlov): check for creation with --json
def setUp(self):
super(TestCreateJobBinary, self).setUp()
self.jb_mock.create.return_value = api_jb.JobBinaries(
None, JOB_BINARY_INFO)
self.jbi_mock = (self.app.client_manager.
data_processing.job_binary_internals)
self.jbi_mock.create.return_value = mock.Mock(id='jbi_id')
self.jbi_mock.reset_mock()
# Command to test
self.cmd = osc_jb.CreateJobBinary(self.app, None)
def test_job_binary_create_swift_public_protected(self):
arglist = ['--name', 'job-binary', '--url', 'swift://cont/test',
'--username', 'user', '--password', 'pass', '--public',
'--protected']
verifylist = [('name', 'job-binary'), ('url', 'swift://cont/test'),
('username', 'user'), ('password', 'pass'),
('public', True), ('protected', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.jb_mock.create.assert_called_once_with(
description=None, extra={'password': 'pass', 'user': 'user'},
is_protected=True, is_public=True, name='job-binary',
url='swift://cont/test')
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Is protected', 'Is public',
'Name', 'Url')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('descr', 'jb_id', False, False, 'job-binary',
'swift://cont/test')
self.assertEqual(expected_data, data)
def test_job_binary_create_mutual_exclusion(self):
arglist = ['job-binary', '--name', 'job-binary', '--access-key', 'ak',
'--secret-key', 'sk', '--url', 's3://abc/def',
'--password', 'pw']
with testtools.ExpectedException(osc_u.ParserException):
self.check_parser(self.cmd, arglist, mock.Mock())
class TestListJobBinaries(TestJobBinaries):
def setUp(self):
super(TestListJobBinaries, self).setUp()
self.jb_mock.list.return_value = [api_jb.JobBinaries(
None, JOB_BINARY_INFO)]
# Command to test
self.cmd = osc_jb.ListJobBinaries(self.app, None)
def test_job_binary_list_no_options(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Url']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('job-binary', 'jb_id', 'swift://cont/test')]
self.assertEqual(expected_data, list(data))
def test_job_binary_list_long(self):
arglist = ['--long']
verifylist = [('long', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Url', 'Description', 'Is public',
'Is protected']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('job-binary', 'jb_id', 'swift://cont/test', 'descr',
False, False)]
self.assertEqual(expected_data, list(data))
def test_job_binary_list_extra_search_opts(self):
arglist = ['--name', 'bin']
verifylist = [('name', 'bin')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Url']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('job-binary', 'jb_id', 'swift://cont/test')]
self.assertEqual(expected_data, list(data))
class TestShowJobBinary(TestJobBinaries):
def setUp(self):
super(TestShowJobBinary, self).setUp()
self.jb_mock.find_unique.return_value = api_jb.JobBinaries(
None, JOB_BINARY_INFO)
# Command to test
self.cmd = osc_jb.ShowJobBinary(self.app, None)
def test_job_binary_show(self):
arglist = ['job-binary']
verifylist = [('job_binary', 'job-binary')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.jb_mock.find_unique.assert_called_once_with(name='job-binary')
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Is protected', 'Is public',
'Name', 'Url')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('descr', 'jb_id', False, False, 'job-binary',
'swift://cont/test')
self.assertEqual(expected_data, data)
class TestDeleteJobBinary(TestJobBinaries):
def setUp(self):
super(TestDeleteJobBinary, self).setUp()
self.jb_mock.find_unique.return_value = api_jb.JobBinaries(
None, JOB_BINARY_INFO)
# Command to test
self.cmd = osc_jb.DeleteJobBinary(self.app, None)
def test_job_binary_delete(self):
arglist = ['job-binary']
verifylist = [('job_binary', ['job-binary'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.jb_mock.delete.assert_called_once_with('jb_id')
class TestUpdateJobBinary(TestJobBinaries):
def setUp(self):
super(TestUpdateJobBinary, self).setUp()
self.jb_mock.find_unique.return_value = api_jb.JobBinaries(
None, JOB_BINARY_INFO)
self.jb_mock.update.return_value = api_jb.JobBinaries(
None, JOB_BINARY_INFO)
# Command to test
self.cmd = osc_jb.UpdateJobBinary(self.app, None)
def test_job_binary_update_all_options(self):
arglist = ['job-binary', '--name', 'job-binary', '--description',
'descr', '--username', 'user', '--password', 'pass',
'--public', '--protected']
verifylist = [('job_binary', 'job-binary'), ('name', 'job-binary'),
('description', 'descr'), ('username', 'user'),
('password', 'pass'), ('is_public', True),
('is_protected', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.jb_mock.update.assert_called_once_with(
'jb_id',
{'is_public': True, 'description': 'descr', 'is_protected': True,
'name': 'job-binary',
'extra': {'password': 'pass', 'user': 'user'}})
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Is protected', 'Is public',
'Name', 'Url')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('descr', 'jb_id', False, False, 'job-binary',
'swift://cont/test')
self.assertEqual(expected_data, data)
def test_job_binary_update_private_unprotected(self):
arglist = ['job-binary', '--private', '--unprotected']
verifylist = [('job_binary', 'job-binary'), ('is_public', False),
('is_protected', False)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.jb_mock.update.assert_called_once_with(
'jb_id', {'is_public': False, 'is_protected': False})
def test_job_binary_update_nothing_updated(self):
arglist = ['job-binary']
verifylist = [('job_binary', 'job-binary')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.jb_mock.update.assert_called_once_with(
'jb_id', {})
def test_job_binary_update_mutual_exclusion(self):
arglist = ['job-binary', '--name', 'job-binary', '--access-key', 'ak',
'--secret-key', 'sk', '--url', 's3://abc/def',
'--password', 'pw']
with testtools.ExpectedException(osc_u.ParserException):
self.check_parser(self.cmd, arglist, mock.Mock())
class TestDownloadJobBinary(TestJobBinaries):
def setUp(self):
super(TestDownloadJobBinary, self).setUp()
self.jb_mock.get_file.return_value = 'data'
self.jb_mock.find_unique.return_value = api_jb.JobBinaries(
None, JOB_BINARY_INFO)
# Command to test
self.cmd = osc_jb.DownloadJobBinary(self.app, None)
def test_download_job_binary_default_file(self):
m_open = mock.mock_open()
with mock.patch('six.moves.builtins.open', m_open, create=True):
arglist = ['job-binary']
verifylist = [('job_binary', 'job-binary')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments was passed
self.jb_mock.get_file.assert_called_once_with(
'jb_id')
# Check that data will be saved to the right file
self.assertEqual('job-binary', m_open.call_args[0][0])
def test_download_job_binary_specified_file(self):
m_open = mock.mock_open()
with mock.patch('six.moves.builtins.open', m_open, create=True):
arglist = ['job-binary', '--file', 'test']
verifylist = [('job_binary', 'job-binary'), ('file', 'test')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments was passed
self.jb_mock.get_file.assert_called_once_with(
'jb_id')
# Check that data will be saved to the right file
self.assertEqual('test', m_open.call_args[0][0])

View File

@ -0,0 +1,292 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from osc_lib.tests import utils as osc_utils
from saharaclient.api.v2 import job_templates as api_j
from saharaclient.osc.v2 import job_templates as osc_j
from saharaclient.tests.unit.osc.v1 import test_job_templates as tjt_v1
JOB_TEMPLATE_INFO = {
"is_public": False,
"id": "job_id",
"name": "pig-job",
"description": "Job for test",
"interface": [],
"libs": [
{
"id": "lib_id",
"name": "lib"
}
],
"type": "Pig",
"is_protected": False,
"mains": [
{
"id": "main_id",
"name": "main"
}
]
}
class TestJobTemplates(tjt_v1.TestJobTemplates):
def setUp(self):
super(TestJobTemplates, self).setUp()
self.app.api_version['data_processing'] = '2'
self.job_mock = self.app.client_manager.data_processing.job_templates
self.job_mock.reset_mock()
class TestCreateJobTemplate(TestJobTemplates):
# TODO(apavlov): check for creation with --interface
def setUp(self):
super(TestCreateJobTemplate, self).setUp()
self.job_mock.create.return_value = api_j.JobTemplate(
None, JOB_TEMPLATE_INFO)
self.jb_mock = self.app.client_manager.data_processing.job_binaries
self.jb_mock.find_unique.return_value = mock.Mock(id='jb_id')
self.jb_mock.reset_mock()
# Command to test
self.cmd = osc_j.CreateJobTemplate(self.app, None)
def test_job_template_create_minimum_options(self):
arglist = ['--name', 'pig-job', '--type', 'Pig']
verifylist = [('name', 'pig-job'), ('type', 'Pig')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.job_mock.create.assert_called_once_with(
description=None, interface=None, is_protected=False,
is_public=False, libs=None, mains=None, name='pig-job', type='Pig')
def test_job_template_create_all_options(self):
arglist = ['--name', 'pig-job', '--type', 'Pig', '--mains', 'main',
'--libs', 'lib', '--description', 'descr', '--public',
'--protected']
verifylist = [('name', 'pig-job'), ('type', 'Pig'),
('mains', ['main']), ('libs', ['lib']),
('description', 'descr'), ('public', True),
('protected', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.job_mock.create.assert_called_once_with(
description='descr', interface=None, is_protected=True,
is_public=True, libs=['jb_id'], mains=['jb_id'], name='pig-job',
type='Pig')
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Is protected', 'Is public',
'Libs', 'Mains', 'Name', 'Type')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('Job for test', 'job_id', False, False, 'lib:lib_id',
'main:main_id', 'pig-job', 'Pig')
self.assertEqual(expected_data, data)
class TestListJobTemplates(TestJobTemplates):
def setUp(self):
super(TestListJobTemplates, self).setUp()
self.job_mock.list.return_value = [api_j.JobTemplate(
None, JOB_TEMPLATE_INFO)]
# Command to test
self.cmd = osc_j.ListJobTemplates(self.app, None)
def test_job_templates_list_no_options(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Type']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('pig-job', 'job_id', 'Pig')]
self.assertEqual(expected_data, list(data))
def test_job_template_list_long(self):
arglist = ['--long']
verifylist = [('long', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Type', 'Description', 'Is public',
'Is protected']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('pig-job', 'job_id', 'Pig', 'Job for test',
False, False)]
self.assertEqual(expected_data, list(data))
def test_job_template_list_extra_search_opts(self):
arglist = ['--type', 'Pig', '--name', 'pig']
verifylist = [('type', 'Pig'), ('name', 'pig')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Id', 'Type']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('pig-job', 'job_id', 'Pig')]
self.assertEqual(expected_data, list(data))
class TestShowJobTemplate(TestJobTemplates):
def setUp(self):
super(TestShowJobTemplate, self).setUp()
self.job_mock.find_unique.return_value = api_j.JobTemplate(
None, JOB_TEMPLATE_INFO)
# Command to test
self.cmd = osc_j.ShowJobTemplate(self.app, None)
def test_job_template_show(self):
arglist = ['pig-job']
verifylist = [('job_template', 'pig-job')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.job_mock.find_unique.assert_called_once_with(name='pig-job')
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Is protected', 'Is public',
'Libs', 'Mains', 'Name', 'Type')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('Job for test', 'job_id', False, False, 'lib:lib_id',
'main:main_id', 'pig-job', 'Pig')
self.assertEqual(expected_data, data)
class TestDeleteJobTemplate(TestJobTemplates):
def setUp(self):
super(TestDeleteJobTemplate, self).setUp()
self.job_mock.find_unique.return_value = api_j.JobTemplate(
None, JOB_TEMPLATE_INFO)
# Command to test
self.cmd = osc_j.DeleteJobTemplate(self.app, None)
def test_job_template_delete(self):
arglist = ['pig-job']
verifylist = [('job_template', ['pig-job'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.job_mock.delete.assert_called_once_with('job_id')
class TestUpdateJobTemplate(TestJobTemplates):
def setUp(self):
super(TestUpdateJobTemplate, self).setUp()
self.job_mock.find_unique.return_value = api_j.JobTemplate(
None, JOB_TEMPLATE_INFO)
self.job_mock.update.return_value = mock.Mock(
job_template=JOB_TEMPLATE_INFO.copy())
# Command to test
self.cmd = osc_j.UpdateJobTemplate(self.app, None)
def test_job_template_update_no_options(self):
arglist = []
verifylist = []
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_job_template_update_nothing_updated(self):
arglist = ['pig-job']
verifylist = [('job_template', 'pig-job')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.job_mock.update.assert_called_once_with('job_id')
def test_job_template_update_all_options(self):
arglist = ['pig-job', '--name', 'pig-job', '--description', 'descr',
'--public', '--protected']
verifylist = [('job_template', 'pig-job'), ('name', 'pig-job'),
('description', 'descr'), ('is_public', True),
('is_protected', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.job_mock.update.assert_called_once_with(
'job_id', description='descr', is_protected=True, is_public=True,
name='pig-job')
# Check that columns are correct
expected_columns = ('Description', 'Id', 'Is protected', 'Is public',
'Libs', 'Mains', 'Name', 'Type')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('Job for test', 'job_id', False, False, 'lib:lib_id',
'main:main_id', 'pig-job', 'Pig')
self.assertEqual(expected_data, data)
def test_job_template_update_private_unprotected(self):
arglist = ['pig-job', '--private', '--unprotected']
verifylist = [('job_template', 'pig-job'), ('is_public', False),
('is_protected', False)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.job_mock.update.assert_called_once_with(
'job_id', is_protected=False, is_public=False)

View File

@ -0,0 +1,148 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from saharaclient.api import job_types as api_jt
from saharaclient.api.v2 import job_templates as api_job_templates
from saharaclient.osc.v2 import job_types as osc_jt
from saharaclient.tests.unit.osc.v1 import test_job_types as tjt_v1
JOB_TYPE_INFO = {
"name": 'Pig',
"plugins": [
{
'versions': {
'0.1': {},
'0.2': {}
},
'name': 'fake'
},
{
'versions': {
'6.2.2': {}
},
'name': 'wod'
}
]
}
class TestJobTypes(tjt_v1.TestJobTypes):
def setUp(self):
super(TestJobTypes, self).setUp()
self.app.api_version['data_processing'] = '2'
self.job_template_mock = (
self.app.client_manager.data_processing.job_templates)
self.jt_mock = self.app.client_manager.data_processing.job_types
self.jt_mock.reset_mock()
self.job_template_mock.reset_mock()
class TestListJobTemplates(TestJobTypes):
def setUp(self):
super(TestListJobTemplates, self).setUp()
self.jt_mock.list.return_value = [api_jt.JobType(None, JOB_TYPE_INFO)]
# Command to test
self.cmd = osc_jt.ListJobTypes(self.app, None)
def test_job_types_list_no_options(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Plugins']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('Pig', 'fake(0.1, 0.2), wod(6.2.2)')]
self.assertEqual(expected_data, list(data))
def test_job_types_list_extra_search_opts(self):
arglist = ['--type', 'Pig', '--plugin', 'fake', '--plugin-version',
'0.1']
verifylist = [('type', 'Pig'), ('plugin', 'fake'),
('plugin_version', '0.1')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Plugins']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('Pig', 'fake(0.1, 0.2), wod(6.2.2)')]
self.assertEqual(expected_data, list(data))
class TestGetJobTypeConfigs(TestJobTypes):
def setUp(self):
super(TestGetJobTypeConfigs, self).setUp()
self.job_template_mock.get_configs.return_value = (
api_job_templates.JobTemplate(None, JOB_TYPE_INFO))
# Command to test
self.cmd = osc_jt.GetJobTypeConfigs(self.app, None)
@mock.patch('oslo_serialization.jsonutils.dump')
def test_get_job_type_configs_default_file(self, p_dump):
m_open = mock.mock_open()
with mock.patch('six.moves.builtins.open', m_open, create=True):
arglist = ['Pig']
verifylist = [('job_type', 'Pig')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments was passed
self.job_template_mock.get_configs.assert_called_once_with(
'Pig')
args_to_dump = p_dump.call_args[0]
# Check that the right data will be saved
self.assertEqual(JOB_TYPE_INFO, args_to_dump[0])
# Check that data will be saved to the right file
self.assertEqual('Pig', m_open.call_args[0][0])
@mock.patch('oslo_serialization.jsonutils.dump')
def test_get_job_type_configs_specified_file(self, p_dump):
m_open = mock.mock_open()
with mock.patch('six.moves.builtins.open', m_open):
arglist = ['Pig', '--file', 'testfile']
verifylist = [('job_type', 'Pig'), ('file', 'testfile')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments was passed
self.job_template_mock.get_configs.assert_called_once_with(
'Pig')
args_to_dump = p_dump.call_args[0]
# Check that the right data will be saved
self.assertEqual(JOB_TYPE_INFO, args_to_dump[0])
# Check that data will be saved to the right file
self.assertEqual('testfile', m_open.call_args[0][0])

View File

@ -0,0 +1,362 @@
# Copyright (c) 2018 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from osc_lib.tests import utils as osc_utils
from saharaclient.api.v2 import jobs as api_j
from saharaclient.osc.v2 import jobs as osc_j
from saharaclient.tests.unit.osc.v1 import test_jobs as tj_v1
JOB_INFO = {
"is_public": False,
"id": "j_id",
"interface": [],
"is_protected": False,
"input_id": 'input_id',
"output_id": 'output_id',
"job_template_id": "job_template_id",
"cluster_id": 'cluster_id',
"start_time": "start",
"end_time": "end",
"engine_job_id": "engine_job_id",
"info": {
"status": 'SUCCEEDED'
},
"job_configs": {
"configs": {
"config1": "1",
"config2": "2"
},
"args": [
"arg1",
"arg2"
],
"params": {
"param2": "value2",
"param1": "value1"
}
}
}
class TestJobs(tj_v1.TestJobs):
def setUp(self):
super(TestJobs, self).setUp()
self.app.api_version['data_processing'] = '2'
self.j_mock = self.app.client_manager.data_processing.jobs
self.j_mock.reset_mock()
class TestExecuteJob(TestJobs):
# TODO(apavlov): check for execution with --interface, --configs, --json
def setUp(self):
super(TestExecuteJob, self).setUp()
self.j_mock.create.return_value = api_j.Job(None, JOB_INFO)
self.ds_mock = self.app.client_manager.data_processing.data_sources
self.ds_mock.find_unique.return_value = mock.Mock(id='ds_id')
self.c_mock = self.app.client_manager.data_processing.clusters
self.c_mock.find_unique.return_value = mock.Mock(id='cluster_id')
self.jt_mock = self.app.client_manager.data_processing.job_templates
self.jt_mock.find_unique.return_value = mock.Mock(id='job_template_id')
self.ds_mock.reset_mock()
self.c_mock.reset_mock()
self.jt_mock.reset_mock()
# Command to test
self.cmd = osc_j.ExecuteJob(self.app, None)
def test_job_execute_minimum_options(self):
arglist = ['--job-template', 'job-template', '--cluster', 'cluster']
verifylist = [('job_template', 'job-template'), ('cluster', 'cluster')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.j_mock.create.assert_called_once_with(
cluster_id='cluster_id', configs={}, input_id=None,
interface=None, is_protected=False, is_public=False,
job_template_id='job_template_id', output_id=None)
def test_job_execute_with_input_output_option(self):
arglist = ['--job-template', 'job-template', '--cluster', 'cluster',
'--input', 'input', '--output', 'output']
verifylist = [('job_template', 'job-template'), ('cluster', 'cluster'),
('input', 'input'), ('output', 'output')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.j_mock.create.assert_called_once_with(
cluster_id='cluster_id', configs={}, input_id='ds_id',
interface=None, is_protected=False, is_public=False,
job_template_id='job_template_id', output_id='ds_id')
# without option --output
arglist = ['--job-template', 'job-template', '--cluster', 'cluster',
'--input', 'input']
verifylist = [('job_template', 'job-template'), ('cluster', 'cluster'),
('input', 'input')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.j_mock.create.assert_called_with(
cluster_id='cluster_id', configs={}, input_id='ds_id',
interface=None, is_protected=False, is_public=False,
job_template_id='job_template_id', output_id=None)
# without options --output and --input
arglist = ['--job-template', 'job-template', '--cluster', 'cluster']
verifylist = [('job_template', 'job-template'), ('cluster', 'cluster')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.j_mock.create.assert_called_with(
cluster_id='cluster_id', configs={}, input_id=None,
interface=None, is_protected=False, is_public=False,
job_template_id='job_template_id', output_id=None)
def test_job_execute_all_options(self):
arglist = ['--job-template', 'job-template', '--cluster', 'cluster',
'--input', 'input', '--output', 'output', '--params',
'param1:value1', 'param2:value2', '--args', 'arg1', 'arg2',
'--configs', 'config1:1', 'config2:2', '--public',
'--protected']
verifylist = [('job_template', 'job-template'), ('cluster', 'cluster'),
('input', 'input'), ('output', 'output'),
('params', ['param1:value1', 'param2:value2']),
('args', ['arg1', 'arg2']),
('configs', ['config1:1', 'config2:2']),
('public', True),
('protected', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.j_mock.create.assert_called_once_with(
cluster_id='cluster_id',
configs={'configs': {'config1': '1', 'config2': '2'},
'args': ['arg1', 'arg2'],
'params': {'param2': 'value2', 'param1': 'value1'}},
input_id='ds_id', interface=None, is_protected=True,
is_public=True, job_template_id='job_template_id',
output_id='ds_id')
# Check that columns are correct
expected_columns = ('Cluster id', 'End time', 'Engine job id', 'Id',
'Input id', 'Is protected', 'Is public',
'Job template id', 'Output id', 'Start time',
'Status')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('cluster_id', 'end', 'engine_job_id', 'j_id',
'input_id', False, False, 'job_template_id',
'output_id', 'start', 'SUCCEEDED')
self.assertEqual(expected_data, data)
class TestListJobs(TestJobs):
def setUp(self):
super(TestListJobs, self).setUp()
self.j_mock.list.return_value = [api_j.Job(None, JOB_INFO)]
# Command to test
self.cmd = osc_j.ListJobs(self.app, None)
def test_jobs_list_no_options(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Id', 'Cluster id', 'Job template id', 'Status']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('j_id', 'cluster_id', 'job_template_id',
'SUCCEEDED')]
self.assertEqual(expected_data, list(data))
def test_jobs_list_long(self):
arglist = ['--long']
verifylist = [('long', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Id', 'Cluster id', 'Job template id', 'Status',
'Start time', 'End time']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('j_id', 'cluster_id', 'job_template_id', 'SUCCEEDED',
'start', 'end')]
self.assertEqual(expected_data, list(data))
def test_jobs_list_extra_search_opts(self):
arglist = ['--status', 'succeeded']
verifylist = [('status', 'succeeded')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Id', 'Cluster id', 'Job template id', 'Status']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('j_id', 'cluster_id', 'job_template_id',
'SUCCEEDED')]
self.assertEqual(expected_data, list(data))
class TestShowJob(TestJobs):
def setUp(self):
super(TestShowJob, self).setUp()
self.j_mock.get.return_value = api_j.Job(None, JOB_INFO)
# Command to test
self.cmd = osc_j.ShowJob(self.app, None)
def test_job_show(self):
arglist = ['job_id']
verifylist = [('job', 'job_id')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.j_mock.get.assert_called_once_with('job_id')
# Check that columns are correct
expected_columns = ('Cluster id', 'End time', 'Engine job id', 'Id',
'Input id', 'Is protected', 'Is public',
'Job template id', 'Output id', 'Start time',
'Status')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('cluster_id', 'end', 'engine_job_id', 'j_id',
'input_id', False, False, 'job_template_id',
'output_id', 'start', 'SUCCEEDED')
self.assertEqual(expected_data, data)
class TestDeleteJob(TestJobs):
def setUp(self):
super(TestDeleteJob, self).setUp()
self.j_mock.get.return_value = api_j.Job(None, JOB_INFO)
# Command to test
self.cmd = osc_j.DeleteJob(self.app, None)
def test_job_delete(self):
arglist = ['job_id']
verifylist = [('job', ['job_id'])]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.j_mock.delete.assert_called_once_with('job_id')
class TestUpdateJob(TestJobs):
def setUp(self):
super(TestUpdateJob, self).setUp()
self.j_mock.get.return_value = api_j.Job(None, JOB_INFO)
self.j_mock.update.return_value = mock.Mock(job=JOB_INFO.copy())
# Command to test
self.cmd = osc_j.UpdateJob(self.app, None)
def test_job_update_no_options(self):
arglist = []
verifylist = []
self.assertRaises(osc_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_job_update_nothing_updated(self):
arglist = ['job_id']
verifylist = [('job', 'job_id')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.j_mock.update.assert_called_once_with('job_id')
def test_job_update_public_protected(self):
arglist = ['job_id', '--public', '--protected']
verifylist = [('job', 'job_id'), ('is_public', True),
('is_protected', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.j_mock.update.assert_called_once_with(
'job_id', is_protected=True, is_public=True)
# Check that columns are correct
expected_columns = ('Cluster id', 'End time', 'Engine job id', 'Id',
'Input id', 'Is protected', 'Is public',
'Job template id', 'Output id', 'Start time',
'Status')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('cluster_id', 'end', 'engine_job_id', 'j_id',
'input_id', False, False, 'job_template_id',
'output_id', 'start', 'SUCCEEDED')
self.assertEqual(expected_data, data)
def test_job_update_private_unprotected(self):
arglist = ['job_id', '--private', '--unprotected']
verifylist = [('job', 'job_id'), ('is_public', False),
('is_protected', False)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.j_mock.update.assert_called_once_with(
'job_id', is_protected=False, is_public=False)

View File

@ -57,10 +57,10 @@ NGT_INFO = {
class TestNodeGroupTemplates(fakes.TestDataProcessing):
def setUp(self):
super(TestNodeGroupTemplates, self).setUp()
self.app.api_version['data_processing'] = '2'
self.ngt_mock = (
self.app.client_manager.data_processing.node_group_templates)
self.ngt_mock.reset_mock()
self.app.api_version['data_processing'] = '2'
class TestCreateNodeGroupTemplate(TestNodeGroupTemplates):

View File

@ -0,0 +1,233 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_serialization import jsonutils as json
from saharaclient.api import plugins as api_plugins
from saharaclient.osc.v1 import plugins as osc_plugins
from saharaclient.tests.unit.osc.v1 import fakes
PLUGIN_INFO = {'name': 'fake',
'title': 'Fake Plugin',
'versions': ['0.1', '0.2'],
'description': 'Plugin for tests',
'required_image_tags': ['fake', '0.1'],
'node_processes': {
'HDFS': ['datanode', 'namenode'],
'MapReduce': ['jobtracker', 'tasktracker']
}, 'plugin_labels': {'enabled': {'status': True}},
'version_labels': {'0.1': {'enabled': {'status': True}}}}
class TestPlugins(fakes.TestDataProcessing):
def setUp(self):
super(TestPlugins, self).setUp()
self.app.api_version['data_processing'] = '2'
self.plugins_mock = self.app.client_manager.data_processing.plugins
self.plugins_mock.reset_mock()
class TestListPlugins(TestPlugins):
def setUp(self):
super(TestListPlugins, self).setUp()
self.plugins_mock.list.return_value = [api_plugins.Plugin(
None, PLUGIN_INFO)]
# Command to test
self.cmd = osc_plugins.ListPlugins(self.app, None)
def test_plugins_list_no_options(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Versions']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('fake', '0.1, 0.2')]
self.assertEqual(expected_data, list(data))
def test_plugins_list_long(self):
arglist = ['--long']
verifylist = [('long', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that columns are correct
expected_columns = ['Name', 'Title', 'Versions', 'Description']
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = [('fake', 'Fake Plugin', '0.1, 0.2',
'Plugin for tests')]
self.assertEqual(expected_data, list(data))
class TestShowPlugin(TestPlugins):
def setUp(self):
super(TestShowPlugin, self).setUp()
self.plugins_mock.get.return_value = api_plugins.Plugin(
None, PLUGIN_INFO)
self.plugins_mock.get_version_details.return_value = (
api_plugins.Plugin(None, PLUGIN_INFO))
# Command to test
self.cmd = osc_plugins.ShowPlugin(self.app, None)
def test_plugin_show(self):
arglist = ['fake']
verifylist = [('plugin', 'fake')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.plugins_mock.get.assert_called_once_with('fake')
# Check that columns are correct
expected_columns = ('Description', 'Name', 'Title', 'Versions', '',
'Plugin version 0.1: enabled', 'Plugin: enabled')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('Plugin for tests', 'fake', 'Fake Plugin',
'0.1, 0.2', '', True, True)
self.assertEqual(expected_data, data)
def test_plugin_version_show(self):
arglist = ['fake', '--plugin-version', '0.1']
verifylist = [('plugin', 'fake'), ('plugin_version', '0.1')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.plugins_mock.get_version_details.assert_called_once_with(
'fake', '0.1')
# Check that columns are correct
expected_columns = ('Description', 'Name', 'Required image tags',
'Title', '', 'Plugin version 0.1: enabled',
'Plugin: enabled', '', 'Service:', '', 'HDFS',
'MapReduce')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('Plugin for tests', 'fake', '0.1, fake',
'Fake Plugin', '', True, True, '',
'Available processes:', '',
'datanode, namenode', 'jobtracker, tasktracker')
self.assertEqual(expected_data, data)
class TestGetPluginConfigs(TestPlugins):
def setUp(self):
super(TestGetPluginConfigs, self).setUp()
self.plugins_mock.get_version_details.return_value = (
api_plugins.Plugin(None, PLUGIN_INFO))
# Command to test
self.cmd = osc_plugins.GetPluginConfigs(self.app, None)
@mock.patch('oslo_serialization.jsonutils.dump')
def test_get_plugin_configs_default_file(self, p_dump):
m_open = mock.mock_open()
with mock.patch('six.moves.builtins.open', m_open, create=True):
arglist = ['fake', '0.1']
verifylist = [('plugin', 'fake'), ('plugin_version', '0.1')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.plugins_mock.get_version_details.assert_called_once_with(
'fake', '0.1')
args_to_dump = p_dump.call_args[0]
# Check that the right data will be saved
self.assertEqual(PLUGIN_INFO, args_to_dump[0])
# Check that data will be saved to the right file
self.assertEqual('fake-0.1', m_open.call_args[0][0])
@mock.patch('oslo_serialization.jsonutils.dump')
def test_get_plugin_configs_specified_file(self, p_dump):
m_open = mock.mock_open()
with mock.patch('six.moves.builtins.open', m_open):
arglist = ['fake', '0.1', '--file', 'testfile']
verifylist = [('plugin', 'fake'), ('plugin_version', '0.1'),
('file', 'testfile')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.plugins_mock.get_version_details.assert_called_once_with(
'fake', '0.1')
args_to_dump = p_dump.call_args[0]
# Check that the right data will be saved
self.assertEqual(PLUGIN_INFO, args_to_dump[0])
# Check that data will be saved to the right file
self.assertEqual('testfile', m_open.call_args[0][0])
class TestUpdatePlugin(TestPlugins):
def setUp(self):
super(TestUpdatePlugin, self).setUp()
self.plugins_mock.update.return_value = api_plugins.Plugin(
None, PLUGIN_INFO)
# Command to test
self.cmd = osc_plugins.UpdatePlugin(self.app, None)
@mock.patch('osc_lib.utils.read_blob_file_contents')
def test_plugin_update(self, read):
arglist = ['fake', 'update.json']
verifylist = [('plugin', 'fake'), ('json', 'update.json')]
value = {'plugin_labels': {'enabled': {'status': True}}}
value = json.dumps(value)
read.return_value = value
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
# Check that correct arguments were passed
self.plugins_mock.update.assert_called_once_with(
'fake', {'plugin_labels': {'enabled': {'status': True}}})
# Check that columns are correct
expected_columns = ('Description', 'Name', 'Title', 'Versions', '',
'Plugin version 0.1: enabled', 'Plugin: enabled')
self.assertEqual(expected_columns, columns)
# Check that data is correct
expected_data = ('Plugin for tests', 'fake', 'Fake Plugin',
'0.1, 0.2', '', True, True)
self.assertEqual(expected_data, data)

View File

@ -105,5 +105,62 @@ openstack.data_processing.v2 =
dataprocessing_node_group_template_import = saharaclient.osc.v2.node_group_templates:ImportNodeGroupTemplate
dataprocessing_node_group_template_export = saharaclient.osc.v2.node_group_templates:ExportNodeGroupTemplate
dataprocessing_plugin_list = saharaclient.osc.v2.plugins:ListPlugins
dataprocessing_plugin_show = saharaclient.osc.v2.plugins:ShowPlugin
dataprocessing_plugin_configs_get = saharaclient.osc.v2.plugins:GetPluginConfigs
dataprocessing_plugin_update = saharaclient.osc.v2.plugins:UpdatePlugin
dataprocessing_data_source_create = saharaclient.osc.v2.data_sources:CreateDataSource
dataprocessing_data_source_list = saharaclient.osc.v2.data_sources:ListDataSources
dataprocessing_data_source_show = saharaclient.osc.v2.data_sources:ShowDataSource
dataprocessing_data_source_delete = saharaclient.osc.v2.data_sources:DeleteDataSource
dataprocessing_data_source_update = saharaclient.osc.v2.data_sources:UpdateDataSource
dataprocessing_image_list = saharaclient.osc.v2.images:ListImages
dataprocessing_image_show = saharaclient.osc.v2.images:ShowImage
dataprocessing_image_register = saharaclient.osc.v2.images:RegisterImage
dataprocessing_image_unregister = saharaclient.osc.v2.images:UnregisterImage
dataprocessing_image_tags_add = saharaclient.osc.v2.images:AddImageTags
dataprocessing_image_tags_remove = saharaclient.osc.v2.images:RemoveImageTags
dataprocessing_image_tags_set = saharaclient.osc.v2.images:SetImageTags
dataprocessing_cluster_template_create = saharaclient.osc.v2.cluster_templates:CreateClusterTemplate
dataprocessing_cluster_template_list = saharaclient.osc.v2.cluster_templates:ListClusterTemplates
dataprocessing_cluster_template_show = saharaclient.osc.v2.cluster_templates:ShowClusterTemplate
dataprocessing_cluster_template_update = saharaclient.osc.v2.cluster_templates:UpdateClusterTemplate
dataprocessing_cluster_template_delete = saharaclient.osc.v2.cluster_templates:DeleteClusterTemplate
dataprocessing_cluster_template_import = saharaclient.osc.v2.cluster_templates:ImportClusterTemplate
dataprocessing_cluster_template_export = saharaclient.osc.v2.cluster_templates:ExportClusterTemplate
dataprocessing_cluster_create = saharaclient.osc.v2.clusters:CreateCluster
dataprocessing_cluster_list = saharaclient.osc.v2.clusters:ListClusters
dataprocessing_cluster_show = saharaclient.osc.v2.clusters:ShowCluster
dataprocessing_cluster_update = saharaclient.osc.v2.clusters:UpdateCluster
dataprocessing_cluster_delete = saharaclient.osc.v2.clusters:DeleteCluster
dataprocessing_cluster_scale = saharaclient.osc.v2.clusters:ScaleCluster
dataprocessing_cluster_verification = saharaclient.osc.v2.clusters:VerificationUpdateCluster
dataprocessing_job_template_create = saharaclient.osc.v2.job_templates:CreateJobTemplate
dataprocessing_job_template_list = saharaclient.osc.v2.job_templates:ListJobTemplates
dataprocessing_job_template_show = saharaclient.osc.v2.job_templates:ShowJobTemplate
dataprocessing_job_template_update = saharaclient.osc.v2.job_templates:UpdateJobTemplate
dataprocessing_job_template_delete = saharaclient.osc.v2.job_templates:DeleteJobTemplate
dataprocessing_job_type_list = saharaclient.osc.v2.job_types:ListJobTypes
dataprocessing_job_type_configs_get = saharaclient.osc.v2.job_types:GetJobTypeConfigs
dataprocessing_job_execute = saharaclient.osc.v2.jobs:ExecuteJob
dataprocessing_job_list = saharaclient.osc.v2.jobs:ListJobs
dataprocessing_job_show = saharaclient.osc.v2.jobs:ShowJob
dataprocessing_job_update = saharaclient.osc.v2.jobs:UpdateJob
dataprocessing_job_delete = saharaclient.osc.v2.jobs:DeleteJob
dataprocessing_job_binary_create = saharaclient.osc.v2.job_binaries:CreateJobBinary
dataprocessing_job_binary_list = saharaclient.osc.v2.job_binaries:ListJobBinaries
dataprocessing_job_binary_show = saharaclient.osc.v2.job_binaries:ShowJobBinary
dataprocessing_job_binary_update = saharaclient.osc.v2.job_binaries:UpdateJobBinary
dataprocessing_job_binary_delete = saharaclient.osc.v2.job_binaries:DeleteJobBinary
dataprocessing_job_binary_download = saharaclient.osc.v2.job_binaries:DownloadJobBinary
[wheel]
universal = 1