+ {% blocktrans %}The Cluster Template object should specify Node Group Templates that will be used to build a Cluster.
+ You can add Node Groups using Node Group Templates on a "Node Groups" tab.{% endblocktrans %}
+
+
+ {% blocktrans %}You may set cluster scoped configurations on corresponding tabs.{% endblocktrans %}
+
+
+ {% blocktrans %}The Cluster Template object may specify a list of processes in anti-affinity group.
+ That means these processes may not be launched more than once on a single host.{% endblocktrans %}
+
+ {% blocktrans %}Image Registry is used to provide additional information about images for Data Processing.{% endblocktrans %}
+
+
+ {% blocktrans %}Specified User Name will be used by Data Processing to apply configs and manage processes on instances.{% endblocktrans %}
+
+
+ {% blocktrans %}Tags are used for filtering images suitable for each plugin and each Data Processing version.
+ To add required tags, select a plugin and a Data Processing version and click "Add plugin tags" button.{% endblocktrans %}
+
+
+ {% blocktrans %}You may also add any custom tag.{% endblocktrans %}
+
+
+ {% blocktrans %}Unnecessary tags may be removed by clicking a cross near tag's name.{% endblocktrans %}
+
+
\ No newline at end of file
diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_list_tags.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_list_tags.html
new file mode 100644
index 00000000..4359c9d7
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_list_tags.html
@@ -0,0 +1,5 @@
+
+ {% for tag in image.tags %}
+
{{ tag }}
+ {% endfor %}
+
\ No newline at end of file
diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_register_image.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_register_image.html
new file mode 100644
index 00000000..70460f4b
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/_register_image.html
@@ -0,0 +1,26 @@
+{% extends "horizon/common/_modal_form.html" %}
+
+
+{% load i18n %}
+
+{% block form_id %}register_image_form{% endblock %}
+{% block form_action %}{% url 'horizon:project:data_processing.data_image_registry:register' %}{% endblock %}
+
+{% block modal-header %}{% trans "Register Image" %}{% endblock %}
+
+{% block modal-body %}
+
+
+
+
+ {% include 'project/data_processing.data_image_registry/_help.html' %}
+
{% trans "Register tags required for the Plugin with specified Data Processing Version" %}
+
+ {% trans "Plugin" %}
+ {% trans "Version" %}
+
+
+
+
+
+
+ {% for plugin, version_dict in plugins.items %}
+
+
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/edit_tags.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/edit_tags.html
new file mode 100644
index 00000000..28b616ee
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/edit_tags.html
@@ -0,0 +1,7 @@
+{% extends 'base.html' %}
+{% load i18n %}
+{% block title %}{% trans "Edit Image Tags" %}{% endblock %}
+
+{% block main %}
+ {% include 'project/data_processing.data_image_registry/_edit_tags.html' %}
+{% endblock %}
diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/image_registry.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/image_registry.html
new file mode 100644
index 00000000..5d0acc74
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/image_registry.html
@@ -0,0 +1,24 @@
+{% extends 'base.html' %}
+{% load i18n %}
+{% block title %}{% trans "Data Processing" %}{% endblock %}
+
+{% block main %}
+
+
+ {{ image_registry_table.render }}
+
+
+
+{% endblock %}
diff --git a/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/register_image.html b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/register_image.html
new file mode 100644
index 00000000..c78d065c
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_image_registry/templates/data_processing.data_image_registry/register_image.html
@@ -0,0 +1,7 @@
+{% extends 'base.html' %}
+{% load i18n %}
+{% block title %}{% trans "Register Image" %}{% endblock %}
+
+{% block main %}
+ {% include 'project/data_processing.data_image_registry/_register_image.html' %}
+{% endblock %}
diff --git a/sahara_dashboard/content/data_processing/data_image_registry/tests.py b/sahara_dashboard/content/data_processing/data_image_registry/tests.py
new file mode 100644
index 00000000..a0b40f82
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_image_registry/tests.py
@@ -0,0 +1,131 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from django.core.urlresolvers import reverse
+from django import http
+
+from mox3.mox import IsA # noqa
+
+from openstack_dashboard import api as dash_api
+from openstack_dashboard.test import helpers as test
+
+from sahara_dashboard import api
+
+INDEX_URL = reverse(
+ 'horizon:project:data_processing.data_image_registry:index')
+REGISTER_URL = reverse(
+ 'horizon:project:data_processing.data_image_registry:register')
+
+
+class DataProcessingImageRegistryTests(test.TestCase):
+ @test.create_stubs({api.sahara: ('image_list',)})
+ def test_index(self):
+ api.sahara.image_list(IsA(http.HttpRequest)) \
+ .AndReturn(self.images.list())
+ self.mox.ReplayAll()
+
+ res = self.client.get(INDEX_URL)
+
+ self.assertTemplateUsed(
+ res,
+ 'project/data_processing.data_image_registry/image_registry.html')
+ self.assertContains(res, 'Image Registry')
+ self.assertContains(res, 'Image')
+ self.assertContains(res, 'Tags')
+
+ @test.create_stubs({api.sahara: ('image_get',
+ 'image_update',
+ 'image_tags_update',
+ 'image_list'),
+ dash_api.glance: ('image_list_detailed',)})
+ def test_register(self):
+ image = self.images.first()
+ image_id = image.id
+ test_username = 'myusername'
+ test_description = 'mydescription'
+ api.sahara.image_get(IsA(http.HttpRequest),
+ image_id).MultipleTimes().AndReturn(image)
+ dash_api.glance.image_list_detailed(IsA(http.HttpRequest),
+ filters={'owner': self.user.id,
+ 'status': 'active'}) \
+ .AndReturn((self.images.list(), False, False))
+ api.sahara.image_update(IsA(http.HttpRequest),
+ image_id,
+ test_username,
+ test_description) \
+ .AndReturn(True)
+ api.sahara.image_tags_update(IsA(http.HttpRequest),
+ image_id,
+ {}) \
+ .AndReturn(True)
+ api.sahara.image_list(IsA(http.HttpRequest)) \
+ .AndReturn([])
+ self.mox.ReplayAll()
+
+ res = self.client.post(
+ REGISTER_URL,
+ {'image_id': image_id,
+ 'user_name': test_username,
+ 'description': test_description,
+ 'tags_list': '{}'})
+
+ self.assertNoFormErrors(res)
+ self.assertRedirectsNoFollow(res, INDEX_URL)
+ self.assertMessageCount(success=1)
+
+ @test.create_stubs({api.sahara: ('image_list',
+ 'image_unregister')})
+ def test_unregister(self):
+ image = self.images.first()
+ api.sahara.image_list(IsA(http.HttpRequest)) \
+ .AndReturn(self.images.list())
+ api.sahara.image_unregister(IsA(http.HttpRequest), image.id)
+ self.mox.ReplayAll()
+
+ form_data = {'action': 'image_registry__delete__%s' % image.id}
+ res = self.client.post(INDEX_URL, form_data)
+
+ self.assertNoFormErrors(res)
+ self.assertRedirectsNoFollow(res, INDEX_URL)
+ self.assertMessageCount(success=1)
+
+ @test.create_stubs({api.sahara: ('image_get',
+ 'image_update',
+ 'image_tags_update')})
+ def test_edit_tags(self):
+ image = self.registered_images.first()
+ api.sahara.image_get(IsA(http.HttpRequest),
+ image.id).MultipleTimes().AndReturn(image)
+ api.sahara.image_update(IsA(http.HttpRequest),
+ image.id,
+ image.username,
+ image.description) \
+ .AndReturn(True)
+ api.sahara.image_tags_update(IsA(http.HttpRequest),
+ image.id,
+ {"0": "mytag"}) \
+ .AndReturn(True)
+ self.mox.ReplayAll()
+
+ edit_tags_url = reverse(
+ 'horizon:project:data_processing.data_image_registry:edit_tags',
+ args=[image.id])
+ res = self.client.post(
+ edit_tags_url,
+ {'image_id': image.id,
+ 'user_name': image.username,
+ 'description': image.description,
+ 'tags_list': '{"0": "mytag"}'})
+
+ self.assertNoFormErrors(res)
+ self.assertRedirectsNoFollow(res, INDEX_URL)
+ self.assertMessageCount(success=1)
diff --git a/sahara_dashboard/content/data_processing/data_image_registry/urls.py b/sahara_dashboard/content/data_processing/data_image_registry/urls.py
new file mode 100644
index 00000000..5f62977c
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_image_registry/urls.py
@@ -0,0 +1,33 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from django.conf.urls import patterns
+from django.conf.urls import url
+
+import sahara_dashboard.content. \
+ data_processing.data_image_registry.views as views
+
+
+urlpatterns = patterns('',
+ url(r'^$', views.ImageRegistryView.as_view(),
+ name='index'),
+ url(r'^$', views.ImageRegistryView.as_view(),
+ name='image_registry'),
+ url(r'^edit_tags/(?P[^/]+)/$',
+ views.EditTagsView.as_view(),
+ name='edit_tags'),
+ url(r'^register/$',
+ views.RegisterImageView.as_view(),
+ name='register'),
+ )
diff --git a/sahara_dashboard/content/data_processing/data_image_registry/views.py b/sahara_dashboard/content/data_processing/data_image_registry/views.py
new file mode 100644
index 00000000..2d26a022
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_image_registry/views.py
@@ -0,0 +1,129 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+
+from django.core.urlresolvers import reverse_lazy
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import exceptions
+from horizon import forms
+from horizon import tables
+from horizon.utils import memoized
+
+from sahara_dashboard.api import sahara as saharaclient
+from sahara_dashboard.content. \
+ data_processing.data_image_registry.forms import EditTagsForm
+from sahara_dashboard.content. \
+ data_processing.data_image_registry.forms import RegisterImageForm
+from sahara_dashboard.content. \
+ data_processing.data_image_registry.tables import ImageRegistryTable
+
+
+LOG = logging.getLogger(__name__)
+
+
+class ImageRegistryView(tables.DataTableView):
+ table_class = ImageRegistryTable
+ template_name = (
+ 'project/data_processing.data_image_registry/image_registry.html')
+ page_title = _("Image Registry")
+
+ def get_data(self):
+ try:
+ images = saharaclient.image_list(self.request)
+ except Exception:
+ images = []
+ msg = _('Unable to retrieve image list')
+ exceptions.handle(self.request, msg)
+ return images
+
+
+def update_context_with_plugin_tags(request, context):
+ try:
+ plugins = saharaclient.plugin_list(request)
+ except Exception:
+ plugins = []
+ msg = _("Unable to process plugin tags")
+ exceptions.handle(request, msg)
+
+ plugins_object = dict()
+ for plugin in plugins:
+ plugins_object[plugin.name] = dict()
+ for version in plugin.versions:
+ try:
+ details = saharaclient. \
+ plugin_get_version_details(request,
+ plugin.name,
+ version)
+ plugins_object[plugin.name][version] = (
+ details.required_image_tags)
+ except Exception:
+ msg = _("Unable to process plugin tags")
+ exceptions.handle(request, msg)
+
+ context["plugins"] = plugins_object
+
+
+class EditTagsView(forms.ModalFormView):
+ form_class = EditTagsForm
+ template_name = (
+ 'project/data_processing.data_image_registry/edit_tags.html')
+ success_url = reverse_lazy(
+ 'horizon:project:data_processing.data_image_registry:index')
+ page_title = _("Edit Image Tags")
+
+ def get_context_data(self, **kwargs):
+ context = super(EditTagsView, self).get_context_data(**kwargs)
+ context['image'] = self.get_object()
+ update_context_with_plugin_tags(self.request, context)
+ return context
+
+ @memoized.memoized_method
+ def get_object(self):
+ try:
+ image = saharaclient.image_get(self.request,
+ self.kwargs["image_id"])
+ except Exception:
+ image = None
+ msg = _("Unable to fetch the image details")
+ exceptions.handle(self.request, msg)
+ return image
+
+ def get_initial(self):
+ image = self.get_object()
+
+ return {"image_id": image.id,
+ "tags_list": json.dumps(image.tags),
+ "user_name": image.username,
+ "description": image.description}
+
+
+class RegisterImageView(forms.ModalFormView):
+ form_class = RegisterImageForm
+ template_name = (
+ 'project/data_processing.data_image_registry/register_image.html')
+ success_url = reverse_lazy(
+ 'horizon:project:data_processing.data_image_registry:index')
+ page_title = _("Register Image")
+
+ def get_context_data(self, **kwargs):
+ context = super(RegisterImageView, self).get_context_data(**kwargs)
+ update_context_with_plugin_tags(self.request, context)
+ return context
+
+ def get_initial(self):
+ # need this initialization to allow registration
+ # of images without tags
+ return {"tags_list": json.dumps([])}
diff --git a/sahara_dashboard/content/data_processing/data_plugins/__init__.py b/sahara_dashboard/content/data_processing/data_plugins/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/sahara_dashboard/content/data_processing/data_plugins/panel.py b/sahara_dashboard/content/data_processing/data_plugins/panel.py
new file mode 100644
index 00000000..72cf7f61
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_plugins/panel.py
@@ -0,0 +1,28 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.utils.translation import ugettext_lazy as _
+
+import horizon
+
+from openstack_dashboard.dashboards.project import dashboard
+
+
+class PluginsPanel(horizon.Panel):
+ name = _("Plugins")
+ slug = 'data_processing.data_plugins'
+ permissions = (('openstack.services.data-processing',
+ 'openstack.services.data_processing'),)
+
+
+dashboard.Project.register(PluginsPanel)
diff --git a/sahara_dashboard/content/data_processing/data_plugins/tables.py b/sahara_dashboard/content/data_processing/data_plugins/tables.py
new file mode 100644
index 00000000..15e01160
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_plugins/tables.py
@@ -0,0 +1,40 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.template import defaultfilters as filters
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import tables
+
+LOG = logging.getLogger(__name__)
+
+
+class PluginsTable(tables.DataTable):
+ title = tables.Column("title",
+ verbose_name=_("Title"),
+ link=("horizon:project:data_processing."
+ "data_plugins:details"))
+
+ versions = tables.Column("versions",
+ verbose_name=_("Supported Versions"),
+ wrap_list=True,
+ filters=(filters.unordered_list,))
+
+ description = tables.Column("description",
+ verbose_name=_("Description"))
+
+ class Meta(object):
+ name = "plugins"
+ verbose_name = _("Plugins")
diff --git a/sahara_dashboard/content/data_processing/data_plugins/tabs.py b/sahara_dashboard/content/data_processing/data_plugins/tabs.py
new file mode 100644
index 00000000..e309d072
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_plugins/tabs.py
@@ -0,0 +1,46 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import exceptions
+from horizon import tabs
+from sahara_dashboard.api import sahara as saharaclient
+
+LOG = logging.getLogger(__name__)
+
+
+class DetailsTab(tabs.Tab):
+ name = _("Details")
+ slug = "plugin_details_tab"
+ template_name = ("project/data_processing.data_plugins/_details.html")
+
+ def get_context_data(self, request):
+ plugin_id = self.tab_group.kwargs['plugin_id']
+ plugin = None
+ try:
+ plugin = saharaclient.plugin_get(request, plugin_id)
+ except Exception as e:
+ LOG.error("Unable to get plugin with plugin_id %s (%s)" %
+ (plugin_id, str(e)))
+ exceptions.handle(self.tab_group.request,
+ _('Unable to retrieve plugin.'))
+ return {"plugin": plugin}
+
+
+class PluginDetailsTabs(tabs.TabGroup):
+ slug = "cluster_details"
+ tabs = (DetailsTab,)
+ sticky = True
diff --git a/sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/_details.html b/sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/_details.html
new file mode 100644
index 00000000..1082fbf4
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/_details.html
@@ -0,0 +1,20 @@
+{% load i18n %}
+
+
+
+
{% trans "Name" %}
+
{{ plugin.name }}
+
{% trans "Title" %}
+
{{ plugin.title }}
+
{% trans "Description" %}
+
{{ plugin.description }}
+
{% trans "Supported Versions" %}
+
+
+ {% for version in plugin.versions %}
+
{{ version }}
+ {% endfor %}
+
+
+
+
diff --git a/sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/plugins.html b/sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/plugins.html
new file mode 100644
index 00000000..ac74f8fa
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_plugins/templates/data_processing.data_plugins/plugins.html
@@ -0,0 +1,11 @@
+{% extends 'base.html' %}
+{% load i18n %}
+{% block title %}{% trans "Data Processing" %}{% endblock %}
+
+{% block main %}
+
+
+ {{ plugins_table.render }}
+
+
+{% endblock %}
diff --git a/sahara_dashboard/content/data_processing/data_plugins/tests.py b/sahara_dashboard/content/data_processing/data_plugins/tests.py
new file mode 100644
index 00000000..14091d74
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_plugins/tests.py
@@ -0,0 +1,49 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from django.core.urlresolvers import reverse
+from django import http
+
+from mox3.mox import IsA # noqa
+from openstack_dashboard.test import helpers as test
+import six
+
+from sahara_dashboard import api
+
+
+INDEX_URL = reverse(
+ 'horizon:project:data_processing.data_plugins:index')
+DETAILS_URL = reverse(
+ 'horizon:project:data_processing.data_plugins:details', args=['id'])
+
+
+class DataProcessingPluginsTests(test.TestCase):
+ @test.create_stubs({api.sahara: ('plugin_list',)})
+ def test_index(self):
+ api.sahara.plugin_list(IsA(http.HttpRequest)) \
+ .AndReturn(self.plugins.list())
+ self.mox.ReplayAll()
+ res = self.client.get(INDEX_URL)
+ self.assertTemplateUsed(
+ res, 'project/data_processing.data_plugins/plugins.html')
+ self.assertContains(res, 'vanilla')
+ self.assertContains(res, 'plugin')
+
+ @test.create_stubs({api.sahara: ('plugin_get',)})
+ def test_details(self):
+ api.sahara.plugin_get(IsA(http.HttpRequest), IsA(six.text_type)) \
+ .AndReturn(self.plugins.list()[0])
+ self.mox.ReplayAll()
+ res = self.client.get(DETAILS_URL)
+ self.assertTemplateUsed(res, 'horizon/common/_detail.html')
+ self.assertContains(res, 'vanilla')
+ self.assertContains(res, 'plugin')
diff --git a/sahara_dashboard/content/data_processing/data_plugins/urls.py b/sahara_dashboard/content/data_processing/data_plugins/urls.py
new file mode 100644
index 00000000..60251b83
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_plugins/urls.py
@@ -0,0 +1,25 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.conf.urls import patterns
+from django.conf.urls import url
+
+from sahara_dashboard.content.\
+ data_processing.data_plugins import views
+
+
+urlpatterns = patterns('',
+ url(r'^$', views.PluginsView.as_view(), name='index'),
+ url(r'^(?P[^/]+)$',
+ views.PluginDetailsView.as_view(), name='details'),
+ )
diff --git a/sahara_dashboard/content/data_processing/data_plugins/views.py b/sahara_dashboard/content/data_processing/data_plugins/views.py
new file mode 100644
index 00000000..eabf7e79
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_plugins/views.py
@@ -0,0 +1,49 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import exceptions
+from horizon import tables
+from horizon import tabs
+
+from sahara_dashboard.api import sahara as saharaclient
+import sahara_dashboard.content.data_processing. \
+ data_plugins.tables as p_tables
+import sahara_dashboard.content.data_processing. \
+ data_plugins.tabs as p_tabs
+
+LOG = logging.getLogger(__name__)
+
+
+class PluginsView(tables.DataTableView):
+ table_class = p_tables.PluginsTable
+ template_name = 'project/data_processing.data_plugins/plugins.html'
+ page_title = _("Data Processing Plugins")
+
+ def get_data(self):
+ try:
+ plugins = saharaclient.plugin_list(self.request)
+ except Exception:
+ plugins = []
+ msg = _('Unable to retrieve data processing plugins.')
+ exceptions.handle(self.request, msg)
+ return plugins
+
+
+class PluginDetailsView(tabs.TabView):
+ tab_group_class = p_tabs.PluginDetailsTabs
+ template_name = 'horizon/common/_detail.html'
+ page_title = _("Data Processing Plugin Details")
diff --git a/sahara_dashboard/content/data_processing/data_sources/__init__.py b/sahara_dashboard/content/data_processing/data_sources/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/sahara_dashboard/content/data_processing/data_sources/panel.py b/sahara_dashboard/content/data_processing/data_sources/panel.py
new file mode 100644
index 00000000..7a265ef4
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_sources/panel.py
@@ -0,0 +1,28 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.utils.translation import ugettext_lazy as _
+
+import horizon
+
+from openstack_dashboard.dashboards.project import dashboard
+
+
+class DataSourcesPanel(horizon.Panel):
+ name = _("Data Sources")
+ slug = 'data_processing.data_sources'
+ permissions = (('openstack.services.data-processing',
+ 'openstack.services.data_processing'),)
+
+
+dashboard.Project.register(DataSourcesPanel)
diff --git a/sahara_dashboard/content/data_processing/data_sources/tables.py b/sahara_dashboard/content/data_processing/data_sources/tables.py
new file mode 100644
index 00000000..bf636ef0
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_sources/tables.py
@@ -0,0 +1,78 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+from django.utils.translation import ungettext_lazy
+
+from horizon import tables
+
+from sahara_dashboard.api import sahara as saharaclient
+
+LOG = logging.getLogger(__name__)
+
+
+class CreateDataSource(tables.LinkAction):
+ name = "create data source"
+ verbose_name = _("Create Data Source")
+ url = "horizon:project:data_processing.data_sources:create-data-source"
+ classes = ("ajax-modal",)
+ icon = "plus"
+
+
+class DeleteDataSource(tables.DeleteAction):
+ @staticmethod
+ def action_present(count):
+ return ungettext_lazy(
+ u"Delete Data Source",
+ u"Delete Data Sources",
+ count
+ )
+
+ @staticmethod
+ def action_past(count):
+ return ungettext_lazy(
+ u"Deleted Data Source",
+ u"Deleted Data Sources",
+ count
+ )
+
+ def delete(self, request, obj_id):
+ saharaclient.data_source_delete(request, obj_id)
+
+
+class EditDataSource(tables.LinkAction):
+ name = "edit data source"
+ verbose_name = _("Edit Data Source")
+ url = "horizon:project:data_processing.data_sources:edit-data-source"
+ classes = ("ajax-modal",)
+
+
+class DataSourcesTable(tables.DataTable):
+ name = tables.Column("name",
+ verbose_name=_("Name"),
+ link=("horizon:project:data_processing."
+ "data_sources:details"))
+ type = tables.Column("type",
+ verbose_name=_("Type"))
+ description = tables.Column("description",
+ verbose_name=_("Description"))
+
+ class Meta(object):
+ name = "data_sources"
+ verbose_name = _("Data Sources")
+ table_actions = (CreateDataSource,
+ DeleteDataSource)
+ row_actions = (DeleteDataSource,
+ EditDataSource,)
diff --git a/sahara_dashboard/content/data_processing/data_sources/tabs.py b/sahara_dashboard/content/data_processing/data_sources/tabs.py
new file mode 100644
index 00000000..0034f9e1
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_sources/tabs.py
@@ -0,0 +1,44 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import tabs
+
+from sahara_dashboard.api import sahara as saharaclient
+
+LOG = logging.getLogger(__name__)
+
+
+class GeneralTab(tabs.Tab):
+ name = _("General Info")
+ slug = "data_source_details_tab"
+ template_name = ("project/data_processing.data_sources/_details.html")
+
+ def get_context_data(self, request):
+ data_source_id = self.tab_group.kwargs['data_source_id']
+ try:
+ data_source = saharaclient.data_source_get(request, data_source_id)
+ except Exception as e:
+ data_source = {}
+ LOG.error("Unable to fetch data source details: %s" % str(e))
+
+ return {"data_source": data_source}
+
+
+class DataSourceDetailsTabs(tabs.TabGroup):
+ slug = "data_source_details"
+ tabs = (GeneralTab,)
+ sticky = True
diff --git a/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_create_data_source_help.html b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_create_data_source_help.html
new file mode 100644
index 00000000..5f1610cd
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_create_data_source_help.html
@@ -0,0 +1,15 @@
+{% load i18n horizon %}
+
+
+ {% blocktrans %}Create a Data Source with a specified name.{% endblocktrans %}
+
+
+ {% blocktrans %}Select the type of your Data Source.{% endblocktrans %}
+
+
+ {% blocktrans %}You may need to enter the username and password for your Data Source.{% endblocktrans %}
+
+
+ {% blocktrans %}You may also enter an optional description for your Data Source.{% endblocktrans %}
+
+
\ No newline at end of file
diff --git a/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_details.html b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_details.html
new file mode 100644
index 00000000..f06be545
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/_details.html
@@ -0,0 +1,18 @@
+{% load i18n sizeformat %}
+
+
+
+
{% trans "Name" %}
+
{{ data_source.name }}
+
{% trans "ID" %}
+
{{ data_source.id }}
+
{% trans "Type" %}
+
{{ data_source.type }}
+
{% trans "URL" %}
+
{{ data_source.url }}
+
{% trans "Description" %}
+
{{ data_source.description|default:_("None") }}
+
{% trans "Create time" %}
+
{{ data_source.created_at }}
+
+
diff --git a/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/create.html b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/create.html
new file mode 100644
index 00000000..00a7d0ac
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/create.html
@@ -0,0 +1,7 @@
+{% extends 'base.html' %}
+{% load i18n %}
+{% block title %}{% trans "Create Data Source" %}{% endblock %}
+
+{% block main %}
+ {% include 'horizon/common/_workflow.html' %}
+{% endblock %}
diff --git a/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/data_sources.html b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/data_sources.html
new file mode 100644
index 00000000..21b43bce
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_sources/templates/data_processing.data_sources/data_sources.html
@@ -0,0 +1,11 @@
+{% extends 'base.html' %}
+{% load i18n %}
+{% block title %}{% trans "Data Processing" %}{% endblock %}
+
+{% block main %}
+
+
+ {{ data_sources_table.render }}
+
+
+{% endblock %}
diff --git a/sahara_dashboard/content/data_processing/data_sources/tests.py b/sahara_dashboard/content/data_processing/data_sources/tests.py
new file mode 100644
index 00000000..0a090e1a
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_sources/tests.py
@@ -0,0 +1,124 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from django.core.urlresolvers import reverse
+from django import http
+
+from mox3.mox import IsA # noqa
+from openstack_dashboard.test import helpers as test
+import six
+
+from sahara_dashboard import api
+
+INDEX_URL = reverse('horizon:project:data_processing.data_sources:index')
+DETAILS_URL = reverse(
+ 'horizon:project:data_processing.data_sources:details', args=['id'])
+CREATE_URL = reverse(
+ 'horizon:project:data_processing.data_sources:create-data-source')
+EDIT_URL = reverse(
+ 'horizon:project:data_processing.data_sources:edit-data-source',
+ args=['id'])
+
+
+class DataProcessingDataSourceTests(test.TestCase):
+ @test.create_stubs({api.sahara: ('data_source_list',)})
+ def test_index(self):
+ api.sahara.data_source_list(IsA(http.HttpRequest)) \
+ .AndReturn(self.data_sources.list())
+ self.mox.ReplayAll()
+ res = self.client.get(INDEX_URL)
+ self.assertTemplateUsed(
+ res, 'project/data_processing.data_sources/data_sources.html')
+ self.assertContains(res, 'Data Sources')
+ self.assertContains(res, 'Name')
+ self.assertContains(res, 'sampleOutput')
+ self.assertContains(res, 'sampleOutput2')
+
+ @test.create_stubs({api.sahara: ('data_source_get',)})
+ def test_details(self):
+ api.sahara.data_source_get(IsA(http.HttpRequest), IsA(six.text_type)) \
+ .MultipleTimes().AndReturn(self.data_sources.first())
+ self.mox.ReplayAll()
+ res = self.client.get(DETAILS_URL)
+ self.assertTemplateUsed(res, 'horizon/common/_detail.html')
+ self.assertContains(res, 'sampleOutput')
+
+ @test.create_stubs({api.sahara: ('data_source_list',
+ 'data_source_delete')})
+ def test_delete(self):
+ data_source = self.data_sources.first()
+ api.sahara.data_source_list(IsA(http.HttpRequest)) \
+ .AndReturn(self.data_sources.list())
+ api.sahara.data_source_delete(IsA(http.HttpRequest), data_source.id)
+ self.mox.ReplayAll()
+
+ form_data = {'action': 'data_sources__delete__%s' % data_source.id}
+ res = self.client.post(INDEX_URL, form_data)
+
+ self.assertNoFormErrors(res)
+ self.assertRedirectsNoFollow(res, INDEX_URL)
+ self.assertMessageCount(success=1)
+
+ @test.create_stubs({api.sahara: ('data_source_create',)})
+ def test_create(self):
+ data_source = self.data_sources.first()
+ api.sahara.data_source_create(IsA(http.HttpRequest),
+ data_source.name,
+ data_source.description,
+ data_source.type,
+ data_source.url,
+ "",
+ "") \
+ .AndReturn(self.data_sources.first())
+ self.mox.ReplayAll()
+ form_data = {
+ 'data_source_url': data_source.url,
+ 'data_source_name': data_source.name,
+ 'data_source_description': data_source.description,
+ 'data_source_type': data_source.type
+ }
+ res = self.client.post(CREATE_URL, form_data)
+ self.assertNoFormErrors(res)
+ self.assertRedirectsNoFollow(res, INDEX_URL)
+ self.assertMessageCount(success=1)
+
+ @test.create_stubs({api.sahara: ('data_source_update',
+ 'data_source_get',)})
+ def test_edit(self):
+ data_source = self.data_sources.first()
+ api_data = {
+ 'url': data_source.url,
+ 'credentials': {'user': '', 'pass': ''},
+ 'type': data_source.type,
+ 'name': data_source.name,
+ 'description': data_source.description
+ }
+ api.sahara.data_source_get(IsA(http.HttpRequest),
+ IsA(six.text_type)) \
+ .AndReturn(self.data_sources.first())
+ api.sahara.data_source_update(IsA(http.HttpRequest),
+ IsA(six.text_type),
+ api_data) \
+ .AndReturn(self.data_sources.first())
+ self.mox.ReplayAll()
+
+ form_data = {
+ 'data_source_url': data_source.url,
+ 'data_source_name': data_source.name,
+ 'data_source_description': data_source.description,
+ 'data_source_type': data_source.type
+ }
+ res = self.client.post(EDIT_URL, form_data)
+
+ self.assertNoFormErrors(res)
+ self.assertRedirectsNoFollow(res, INDEX_URL)
+ self.assertMessageCount(success=1)
diff --git a/sahara_dashboard/content/data_processing/data_sources/urls.py b/sahara_dashboard/content/data_processing/data_sources/urls.py
new file mode 100644
index 00000000..33bda039
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_sources/urls.py
@@ -0,0 +1,35 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from django.conf.urls import patterns
+from django.conf.urls import url
+
+import sahara_dashboard.content.data_processing. \
+ data_sources.views as views
+
+
+urlpatterns = patterns('',
+ url(r'^$', views.DataSourcesView.as_view(),
+ name='index'),
+ url(r'^$', views.DataSourcesView.as_view(),
+ name='data-sources'),
+ url(r'^create-data-source$',
+ views.CreateDataSourceView.as_view(),
+ name='create-data-source'),
+ url(r'^(?P[^/]+)/edit$',
+ views.EditDataSourceView.as_view(),
+ name='edit-data-source'),
+ url(r'^(?P[^/]+)$',
+ views.DataSourceDetailsView.as_view(),
+ name='details'))
diff --git a/sahara_dashboard/content/data_processing/data_sources/views.py b/sahara_dashboard/content/data_processing/data_sources/views.py
new file mode 100644
index 00000000..3a5ac9f3
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_sources/views.py
@@ -0,0 +1,99 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import exceptions
+from horizon import tables
+from horizon import tabs
+from horizon.utils import memoized
+from horizon.utils.urlresolvers import reverse # noqa
+from horizon import workflows
+
+from sahara_dashboard.api import sahara as saharaclient
+
+import sahara_dashboard.content.data_processing. \
+ data_sources.tables as ds_tables
+import sahara_dashboard.content.data_processing. \
+ data_sources.tabs as _tabs
+import sahara_dashboard.content.data_processing. \
+ data_sources.workflows.create as create_flow
+import sahara_dashboard.content.data_processing. \
+ data_sources.workflows.edit as edit_flow
+
+LOG = logging.getLogger(__name__)
+
+
+class DataSourcesView(tables.DataTableView):
+ table_class = ds_tables.DataSourcesTable
+ template_name = 'project/data_processing.data_sources/data_sources.html'
+ page_title = _("Data Sources")
+
+ def get_data(self):
+ try:
+ data_sources = saharaclient.data_source_list(self.request)
+ except Exception:
+ data_sources = []
+ exceptions.handle(self.request,
+ _("Unable to fetch data sources."))
+ return data_sources
+
+
+class CreateDataSourceView(workflows.WorkflowView):
+ workflow_class = create_flow.CreateDataSource
+ success_url = \
+ "horizon:project:data_processing.data-sources:create-data-source"
+ classes = ("ajax-modal",)
+ template_name = "project/data_processing.data_sources/create.html"
+ page_title = _("Create Data Source")
+
+
+class EditDataSourceView(CreateDataSourceView):
+ workflow_class = edit_flow.EditDataSource
+ page_title = _("Edit Data Source")
+
+ def get_context_data(self, **kwargs):
+ context = super(EditDataSourceView, self) \
+ .get_context_data(**kwargs)
+
+ context["data_source_id"] = kwargs["data_source_id"]
+ return context
+
+ def get_initial(self):
+ initial = super(EditDataSourceView, self).get_initial()
+ initial['data_source_id'] = self.kwargs['data_source_id']
+ return initial
+
+
+class DataSourceDetailsView(tabs.TabView):
+ tab_group_class = _tabs.DataSourceDetailsTabs
+ template_name = 'horizon/common/_detail.html'
+ page_title = "{{ data_source.name|default:data_source.id }}"
+
+ @memoized.memoized_method
+ def get_object(self):
+ ds_id = self.kwargs["data_source_id"]
+ try:
+ return saharaclient.data_source_get(self.request, ds_id)
+ except Exception:
+ msg = _('Unable to retrieve details for data source "%s".') % ds_id
+ redirect = reverse(
+ "horizon:project:data_processing.data_sources:data-sources")
+ exceptions.handle(self.request, msg, redirect=redirect)
+
+ def get_context_data(self, **kwargs):
+ context = super(DataSourceDetailsView, self).get_context_data(**kwargs)
+ context['data_source'] = self.get_object()
+ return context
diff --git a/sahara_dashboard/content/data_processing/data_sources/workflows/__init__.py b/sahara_dashboard/content/data_processing/data_sources/workflows/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/sahara_dashboard/content/data_processing/data_sources/workflows/create.py b/sahara_dashboard/content/data_processing/data_sources/workflows/create.py
new file mode 100644
index 00000000..ffd8fb9c
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_sources/workflows/create.py
@@ -0,0 +1,121 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import exceptions
+from horizon import forms
+from horizon import workflows
+
+from sahara_dashboard.api import sahara as saharaclient
+from sahara_dashboard.content.data_processing \
+ .utils import helpers
+
+LOG = logging.getLogger(__name__)
+
+
+class GeneralConfigAction(workflows.Action):
+ data_source_name = forms.CharField(label=_("Name"))
+
+ data_source_type = forms.ChoiceField(
+ label=_("Data Source Type"),
+ choices=[("swift", "Swift"), ("hdfs", "HDFS"), ("maprfs", "MapR FS")],
+ widget=forms.Select(attrs={
+ "class": "switchable",
+ "data-slug": "ds_type"
+ }))
+
+ data_source_url = forms.CharField(label=_("URL"))
+
+ data_source_credential_user = forms.CharField(
+ label=_("Source username"),
+ required=False,
+ widget=forms.TextInput(attrs={
+ "class": "switched",
+ "data-switch-on": "ds_type",
+ "data-ds_type-swift": _("Source username")
+ }))
+
+ data_source_credential_pass = forms.CharField(
+ widget=forms.PasswordInput(attrs={
+ 'class': 'switched',
+ 'data-switch-on': 'ds_type',
+ 'data-ds_type-swift': _("Source password"),
+ 'autocomplete': 'off'
+ }),
+ label=_("Source password"),
+ required=False)
+
+ data_source_description = forms.CharField(
+ label=_("Description"),
+ required=False,
+ widget=forms.Textarea(attrs={'rows': 4}))
+
+ def __init__(self, request, *args, **kwargs):
+ super(GeneralConfigAction, self).__init__(request, *args, **kwargs)
+
+ class Meta(object):
+ name = _("Create Data Source")
+ help_text_template = ("project/data_processing.data_sources/"
+ "_create_data_source_help.html")
+
+
+class GeneralConfig(workflows.Step):
+ action_class = GeneralConfigAction
+
+ def contribute(self, data, context):
+ for k, v in data.items():
+ context["general_" + k] = v
+
+ context["source_url"] = context["general_data_source_url"]
+
+ if context["general_data_source_type"] == "swift":
+ if not context["general_data_source_url"].startswith("swift://"):
+ context["source_url"] = "swift://{0}".format(
+ context["general_data_source_url"])
+
+ return context
+
+
+class CreateDataSource(workflows.Workflow):
+ slug = "create_data_source"
+ name = _("Create Data Source")
+ finalize_button_name = _("Create")
+ success_message = _("Data source created")
+ failure_message = _("Could not create data source")
+ success_url = "horizon:project:data_processing.data_sources:index"
+ default_steps = (GeneralConfig, )
+
+ def handle(self, request, context):
+ try:
+ self.object = saharaclient.data_source_create(
+ request,
+ context["general_data_source_name"],
+ context["general_data_source_description"],
+ context["general_data_source_type"],
+ context["source_url"],
+ context.get("general_data_source_credential_user", None),
+ context.get("general_data_source_credential_pass", None))
+
+ hlps = helpers.Helpers(request)
+ if hlps.is_from_guide():
+ request.session["guide_datasource_id"] = self.object.id
+ request.session["guide_datasource_name"] = self.object.name
+ self.success_url = (
+ "horizon:project:data_processing.wizard:jobex_guide")
+ return True
+ except Exception:
+ exceptions.handle(request)
+ return False
diff --git a/sahara_dashboard/content/data_processing/data_sources/workflows/edit.py b/sahara_dashboard/content/data_processing/data_sources/workflows/edit.py
new file mode 100644
index 00000000..4a11e90d
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/data_sources/workflows/edit.py
@@ -0,0 +1,79 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import exceptions
+
+from sahara_dashboard.api import sahara as saharaclient
+from sahara_dashboard.content.data_processing \
+ .data_sources.workflows import create
+
+LOG = logging.getLogger(__name__)
+
+
+class EditDataSource(create.CreateDataSource):
+ slug = "edit_data_source"
+ name = _("Edit Data Source")
+ finalize_button_name = _("Update")
+ success_message = _("Data source updated")
+ failure_message = _("Could not update data source")
+ success_url = "horizon:project:data_processing.data_sources:index"
+ default_steps = (create.GeneralConfig,)
+
+ FIELD_MAP = {
+ "data_source_name": "name",
+ "data_source_type": "type",
+ "data_source_description": "description",
+ "data_source_url": "url",
+ "data_source_credential_user": None,
+ "data_source_credential_pass": None,
+ }
+
+ def __init__(self, request, context_seed, entry_point, *args, **kwargs):
+ self.data_source_id = context_seed["data_source_id"]
+ data_source = saharaclient.data_source_get(request,
+ self.data_source_id)
+ super(EditDataSource, self).__init__(request, context_seed,
+ entry_point, *args, **kwargs)
+ for step in self.steps:
+ if isinstance(step, create.GeneralConfig):
+ fields = step.action.fields
+ for field in fields:
+ if self.FIELD_MAP[field]:
+ fields[field].initial = getattr(data_source,
+ self.FIELD_MAP[field],
+ None)
+
+ def handle(self, request, context):
+ try:
+ update_data = {
+ "name": context["general_data_source_name"],
+ "description": context["general_data_source_description"],
+ "type": context["general_data_source_type"],
+ "url": context["source_url"],
+ "credentials": {
+ "user": context.get("general_data_source_credential_user",
+ None),
+ "pass": context.get("general_data_source_credential_pass",
+ None)
+ }
+ }
+ return saharaclient.data_source_update(request,
+ self.data_source_id,
+ update_data)
+ except Exception:
+ exceptions.handle(request)
+ return False
diff --git a/sahara_dashboard/content/data_processing/job_binaries/__init__.py b/sahara_dashboard/content/data_processing/job_binaries/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/sahara_dashboard/content/data_processing/job_binaries/forms.py b/sahara_dashboard/content/data_processing/job_binaries/forms.py
new file mode 100644
index 00000000..a0a012b4
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/job_binaries/forms.py
@@ -0,0 +1,311 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import uuid
+
+from django.forms import widgets
+from django import template
+from django.template import defaultfilters
+from django.utils.encoding import force_text
+from django.utils.safestring import mark_safe
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import exceptions
+from horizon import forms
+from horizon import messages
+
+from sahara_dashboard.api import sahara as saharaclient
+
+LOG = logging.getLogger(__name__)
+
+
+class LabeledInput(widgets.TextInput):
+ def render(self, name, values, attrs=None):
+ input = super(LabeledInput, self).render(name, values, attrs)
+ label = "%s" %\
+ ("id_%s_label" % name,
+ "swift://")
+ result = "%s%s" % (label, input)
+ return mark_safe(result)
+
+
+class JobBinaryCreateForm(forms.SelfHandlingForm):
+ NEW_SCRIPT = "newscript"
+ UPLOAD_BIN = "uploadfile"
+ action_url = ('horizon:project:data_processing.'
+ 'job_binaries:create-job-binary')
+
+ def __init__(self, request, *args, **kwargs):
+ super(JobBinaryCreateForm, self).__init__(request, *args, **kwargs)
+
+ self.help_text_template = ("project/data_processing.job_binaries/"
+ "_create_job_binary_help.html")
+
+ self.fields["job_binary_name"] = forms.CharField(label=_("Name"))
+
+ self.fields["job_binary_type"] = forms.ChoiceField(
+ label=_("Storage type"),
+ widget=forms.Select(
+ attrs={
+ 'class': 'switchable',
+ 'data-slug': 'jb_type'
+ }))
+
+ self.fields["job_binary_url"] = forms.CharField(
+ label=_("URL"),
+ required=False,
+ widget=LabeledInput(
+ attrs={
+ 'class': 'switched',
+ 'data-switch-on': 'jb_type',
+ 'data-jb_type-swift': _('URL')
+ }))
+
+ self.fields["job_binary_internal"] = forms.ChoiceField(
+ label=_("Internal binary"),
+ required=False,
+ widget=forms.Select(
+ attrs={
+ 'class': 'switched switchable',
+ 'data-slug': 'jb_internal',
+ 'data-switch-on': 'jb_type',
+ 'data-jb_type-internal-db': _('Internal Binary')
+ }))
+
+ self.fields["job_binary_file"] = forms.FileField(
+ label=_("Upload File"),
+ required=False,
+ widget=forms.ClearableFileInput(
+ attrs={
+ 'class': 'switched',
+ 'data-switch-on': 'jb_internal',
+ 'data-jb_internal-uploadfile': _("Upload File")
+ }))
+
+ self.fields["job_binary_script_name"] = forms.CharField(
+ label=_("Script name"),
+ required=False,
+ widget=forms.TextInput(
+ attrs={
+ 'class': 'switched',
+ 'data-switch-on': 'jb_internal',
+ 'data-jb_internal-newscript': _("Script name")
+ }))
+
+ self.fields["job_binary_script"] = forms.CharField(
+ label=_("Script text"),
+ required=False,
+ widget=forms.Textarea(
+ attrs={
+ 'rows': 4,
+ 'class': 'switched',
+ 'data-switch-on': 'jb_internal',
+ 'data-jb_internal-newscript': _("Script text")
+ }))
+
+ self.fields["job_binary_username"] = forms.CharField(
+ label=_("Username"),
+ required=False,
+ widget=forms.TextInput(
+ attrs={
+ 'class': 'switched',
+ 'data-switch-on': 'jb_type',
+ 'data-jb_type-swift': _('Username')
+ }))
+
+ self.fields["job_binary_password"] = forms.CharField(
+ label=_("Password"),
+ required=False,
+ widget=forms.PasswordInput(
+ attrs={
+ 'autocomplete': 'off',
+ 'class': 'switched',
+ 'data-switch-on': 'jb_type',
+ 'data-jb_type-swift': _('Password')
+ }))
+
+ self.fields["job_binary_description"] = (
+ forms.CharField(label=_("Description"),
+ required=False,
+ widget=forms.Textarea()))
+
+ self.fields["job_binary_type"].choices =\
+ [("internal-db", "Internal database"),
+ ("swift", "Swift")]
+
+ self.fields["job_binary_internal"].choices =\
+ self.populate_job_binary_internal_choices(request)
+
+ self.load_form_values()
+
+ def load_form_values(self):
+ if "job_binary" in self.initial:
+ jb = self.initial["job_binary"]
+ for field in self.fields:
+ if self.FIELD_MAP[field]:
+ if field == "job_binary_url":
+ url = getattr(jb, self.FIELD_MAP[field], None)
+ (type, loc) = url.split("://")
+ self.fields['job_binary_type'].initial = type
+ self.fields[field].initial = loc
+ else:
+ self.fields[field].initial = (
+ getattr(jb, self.FIELD_MAP[field], None))
+
+ def populate_job_binary_internal_choices(self, request):
+ try:
+ job_binaries = saharaclient.job_binary_internal_list(request)
+ except Exception:
+ exceptions.handle(request,
+ _("Failed to get list of internal binaries."))
+ job_binaries = []
+
+ choices = [(job_binary.id, job_binary.name)
+ for job_binary in job_binaries]
+ choices.insert(0, (self.NEW_SCRIPT, '*Create a script'))
+ choices.insert(0, (self.UPLOAD_BIN, '*Upload a new file'))
+
+ return choices
+
+ def handle(self, request, context):
+ try:
+ extra = {}
+ bin_url = "%s://%s" % (context["job_binary_type"],
+ context["job_binary_url"])
+ if(context["job_binary_type"] == "internal-db"):
+ bin_url = self.handle_internal(request, context)
+ elif(context["job_binary_type"] == "swift"):
+ extra = self.handle_swift(request, context)
+
+ bin_object = saharaclient.job_binary_create(
+ request,
+ context["job_binary_name"],
+ bin_url,
+ context["job_binary_description"],
+ extra)
+ messages.success(request, "Successfully created job binary")
+ return bin_object
+ except Exception:
+ exceptions.handle(request,
+ _("Unable to create job binary"))
+ return False
+
+ def get_help_text(self, extra_context=None):
+ text = ""
+ extra_context = extra_context or {}
+ if self.help_text_template:
+ tmpl = template.loader.get_template(self.help_text_template)
+ context = template.RequestContext(self.request, extra_context)
+ text += tmpl.render(context)
+ else:
+ text += defaultfilters.linebreaks(force_text(self.help_text))
+ return defaultfilters.safe(text)
+
+ class Meta(object):
+ name = _("Create Job Binary")
+ help_text_template = ("project/data_processing.job_binaries/"
+ "_create_job_binary_help.html")
+
+ def handle_internal(self, request, context):
+ result = ""
+
+ bin_id = context["job_binary_internal"]
+ if(bin_id == self.UPLOAD_BIN):
+ try:
+ result = saharaclient.job_binary_internal_create(
+ request,
+ self.get_unique_binary_name(
+ request, request.FILES["job_binary_file"].name),
+ request.FILES["job_binary_file"].read())
+ bin_id = result.id
+ except Exception:
+ exceptions.handle(request,
+ _("Unable to upload job binary"))
+ return None
+ elif(bin_id == self.NEW_SCRIPT):
+ try:
+ result = saharaclient.job_binary_internal_create(
+ request,
+ self.get_unique_binary_name(
+ request, context["job_binary_script_name"]),
+ context["job_binary_script"])
+ bin_id = result.id
+ except Exception:
+ exceptions.handle(request,
+ _("Unable to create job binary"))
+ return None
+
+ return "internal-db://%s" % bin_id
+
+ def handle_swift(self, request, context):
+ username = context["job_binary_username"]
+ password = context["job_binary_password"]
+
+ extra = {
+ "user": username,
+ "password": password
+ }
+ return extra
+
+ def get_unique_binary_name(self, request, base_name):
+ try:
+ internals = saharaclient.job_binary_internal_list(request)
+ except Exception:
+ internals = []
+ exceptions.handle(request,
+ _("Failed to fetch internal binary list"))
+ names = [internal.name for internal in internals]
+ if base_name in names:
+ return "%s_%s" % (base_name, uuid.uuid1())
+ return base_name
+
+
+class JobBinaryEditForm(JobBinaryCreateForm):
+ FIELD_MAP = {
+ 'job_binary_description': 'description',
+ 'job_binary_file': None,
+ 'job_binary_internal': None,
+ 'job_binary_name': 'name',
+ 'job_binary_password': None,
+ 'job_binary_script': None,
+ 'job_binary_script_name': None,
+ 'job_binary_type': None,
+ 'job_binary_url': 'url',
+ 'job_binary_username': None,
+ }
+
+ def handle(self, request, context):
+ try:
+ extra = {}
+ bin_url = "%s://%s" % (context["job_binary_type"],
+ context["job_binary_url"])
+ if (context["job_binary_type"] == "swift"):
+ extra = self.handle_swift(request, context)
+
+ update_data = {
+ "name": context["job_binary_name"],
+ "description": context["job_binary_description"],
+ "extra": extra,
+ "url": bin_url,
+ }
+
+ bin_object = saharaclient.job_binary_update(
+ request, self.initial["job_binary"].id, update_data)
+
+ messages.success(request, "Successfully updated job binary")
+ return bin_object
+ except Exception:
+ exceptions.handle(request,
+ _("Unable to update job binary"))
+ return False
diff --git a/sahara_dashboard/content/data_processing/job_binaries/panel.py b/sahara_dashboard/content/data_processing/job_binaries/panel.py
new file mode 100644
index 00000000..69eb98ef
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/job_binaries/panel.py
@@ -0,0 +1,28 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.utils.translation import ugettext_lazy as _
+
+import horizon
+
+from openstack_dashboard.dashboards.project import dashboard
+
+
+class JobBinariesPanel(horizon.Panel):
+ name = _("Job Binaries")
+ slug = 'data_processing.job_binaries'
+ permissions = (('openstack.services.data-processing',
+ 'openstack.services.data_processing'),)
+
+
+dashboard.Project.register(JobBinariesPanel)
diff --git a/sahara_dashboard/content/data_processing/job_binaries/tables.py b/sahara_dashboard/content/data_processing/job_binaries/tables.py
new file mode 100644
index 00000000..8a30fbe8
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/job_binaries/tables.py
@@ -0,0 +1,98 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+from django.utils.translation import ungettext_lazy
+
+from horizon import tables
+
+from sahara_dashboard.api import sahara as saharaclient
+
+from saharaclient.api import base as api_base
+
+
+LOG = logging.getLogger(__name__)
+
+
+class CreateJobBinary(tables.LinkAction):
+ name = "create job binary"
+ verbose_name = _("Create Job Binary")
+ url = "horizon:project:data_processing.job_binaries:create-job-binary"
+ classes = ("ajax-modal",)
+ icon = "plus"
+
+
+class DeleteJobBinary(tables.DeleteAction):
+ @staticmethod
+ def action_present(count):
+ return ungettext_lazy(
+ u"Delete Job Binary",
+ u"Delete Job Binaries",
+ count
+ )
+
+ @staticmethod
+ def action_past(count):
+ return ungettext_lazy(
+ u"Deleted Job Binary",
+ u"Deleted Job Binaries",
+ count
+ )
+
+ def delete(self, request, obj_id):
+ jb = saharaclient.job_binary_get(request, obj_id)
+ (jb_type, jb_internal_id) = jb.url.split("://")
+ if jb_type == "internal-db":
+ try:
+ saharaclient.job_binary_internal_delete(request,
+ jb_internal_id)
+ except api_base.APIException:
+ # nothing to do for job-binary-internal if
+ # it does not exist.
+ pass
+
+ saharaclient.job_binary_delete(request, obj_id)
+
+
+class DownloadJobBinary(tables.LinkAction):
+ name = "download job binary"
+ verbose_name = _("Download Job Binary")
+ url = "horizon:project:data_processing.job_binaries:download"
+ classes = ("btn-edit",)
+
+
+class EditJobBinary(tables.LinkAction):
+ name = "edit job binary"
+ verbose_name = _("Edit Job Binary")
+ url = "horizon:project:data_processing.job_binaries:edit-job-binary"
+ classes = ("btn-edit", "ajax-modal",)
+
+
+class JobBinariesTable(tables.DataTable):
+ name = tables.Column(
+ "name",
+ verbose_name=_("Name"),
+ link="horizon:project:data_processing.job_binaries:details")
+ type = tables.Column("url",
+ verbose_name=_("Url"))
+ description = tables.Column("description",
+ verbose_name=_("Description"))
+
+ class Meta(object):
+ name = "job_binaries"
+ verbose_name = _("Job Binaries")
+ table_actions = (CreateJobBinary,
+ DeleteJobBinary)
+ row_actions = (DeleteJobBinary, DownloadJobBinary, EditJobBinary)
diff --git a/sahara_dashboard/content/data_processing/job_binaries/tabs.py b/sahara_dashboard/content/data_processing/job_binaries/tabs.py
new file mode 100644
index 00000000..bc462b86
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/job_binaries/tabs.py
@@ -0,0 +1,43 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import tabs
+
+from sahara_dashboard.api import sahara as saharaclient
+
+LOG = logging.getLogger(__name__)
+
+
+class JobBinaryDetailsTab(tabs.Tab):
+ name = _("General Info")
+ slug = "job_binaries_details_tab"
+ template_name = ("project/data_processing.job_binaries/_details.html")
+
+ def get_context_data(self, request):
+ job_binary_id = self.tab_group.kwargs['job_binary_id']
+ try:
+ job_binary = saharaclient.job_binary_get(request, job_binary_id)
+ except Exception as e:
+ job_binary = {}
+ LOG.error("Unable to fetch job binary details: %s" % str(e))
+ return {"job_binary": job_binary}
+
+
+class JobBinaryDetailsTabs(tabs.TabGroup):
+ slug = "job_binary_details"
+ tabs = (JobBinaryDetailsTab,)
+ sticky = True
diff --git a/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_create.html b/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_create.html
new file mode 100644
index 00000000..888c001a
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/job_binaries/templates/data_processing.job_binaries/_create.html
@@ -0,0 +1,26 @@
+{% extends "horizon/common/_modal_form.html" %}
+
+
+{% load i18n %}
+
+{% block form_id %}create-job-binary{% endblock %}
+{% block form_action %}{{ submit_url }}{% endblock %}
+{% block form_attrs %}enctype="multipart/form-data"{% endblock %}
+
+{% block modal-header %}{{ page_title }}{% endblock %}
+
+{% block modal-body %}
+
+ {% blocktrans %}Important: The name that you give your job binary will be the name used in your job execution.
+ If your binary requires a particular name or extension (ie: ".jar"), be sure to include it here.{% endblocktrans %}
+
+
+ {% blocktrans %}Select the storage type for your job binary.{% endblocktrans %}
+
{% for group, vals in job_execution.job_configs.iteritems %}
+
{% blocktrans %}{{ group }}:{% endblocktrans %}
+ {% if group == "args" %}
+
{% for val in vals %}
{{ val }}
{% endfor %}
+ {% else %}
+
{% for key, val in vals.iteritems %}
{{ key }} = {{ val }}
{% endfor %}
+ {% endif %}
+
+ {% endfor %}
+
+
+
diff --git a/sahara_dashboard/content/data_processing/job_executions/templates/data_processing.job_executions/job_executions.html b/sahara_dashboard/content/data_processing/job_executions/templates/data_processing.job_executions/job_executions.html
new file mode 100644
index 00000000..436624cf
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/job_executions/templates/data_processing.job_executions/job_executions.html
@@ -0,0 +1,63 @@
+{% extends 'base.html' %}
+{% load i18n %}
+{% block title %}{% trans "Data Processing" %}{% endblock %}
+
+{% block main %}
+
+
+ {{ job_executions_table.render }}
+
+
+
+
+{% endblock %}
diff --git a/sahara_dashboard/content/data_processing/job_executions/tests.py b/sahara_dashboard/content/data_processing/job_executions/tests.py
new file mode 100644
index 00000000..ae439361
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/job_executions/tests.py
@@ -0,0 +1,68 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from django.core.urlresolvers import reverse
+from django import http
+
+from mox3.mox import IsA # noqa
+from openstack_dashboard.test import helpers as test
+import six
+
+from sahara_dashboard import api
+
+
+INDEX_URL = reverse('horizon:project:data_processing.job_executions:index')
+DETAILS_URL = reverse(
+ 'horizon:project:data_processing.job_executions:details', args=['id'])
+
+
+class DataProcessingJobExecutionTests(test.TestCase):
+ @test.create_stubs({api.sahara: ('job_execution_list',)})
+ def test_index(self):
+ api.sahara.job_execution_list(IsA(http.HttpRequest), {}) \
+ .AndReturn(self.job_executions.list())
+ self.mox.ReplayAll()
+ res = self.client.get(INDEX_URL)
+ self.assertEqual(
+ "cluster-1",
+ res.context_data["job_executions_table"].data[0].cluster_name)
+ self.assertEqual(
+ "job-1",
+ res.context_data["job_executions_table"].data[0].job_name)
+ self.assertTemplateUsed(
+ res, 'project/data_processing.job_executions/job_executions.html')
+ self.assertContains(res, 'Jobs')
+
+ @test.create_stubs({api.sahara: ('job_execution_get',)})
+ def test_details(self):
+ api.sahara.job_execution_get(IsA(http.HttpRequest), IsA(six.text_type)) \
+ .MultipleTimes().AndReturn(self.job_executions.first())
+ self.mox.ReplayAll()
+ res = self.client.get(DETAILS_URL)
+ self.assertTemplateUsed(res, 'horizon/common/_detail.html')
+ self.assertContains(res, 'RUNNING')
+
+ @test.create_stubs({api.sahara: ('job_execution_list',
+ 'job_execution_delete')})
+ def test_delete(self):
+ job_exec = self.job_executions.first()
+ api.sahara.job_execution_list(IsA(http.HttpRequest), {}) \
+ .AndReturn(self.job_executions.list())
+ api.sahara.job_execution_delete(IsA(http.HttpRequest), job_exec.id)
+ self.mox.ReplayAll()
+
+ form_data = {'action': 'job_executions__delete__%s' % job_exec.id}
+ res = self.client.post(INDEX_URL, form_data)
+
+ self.assertNoFormErrors(res)
+ self.assertRedirectsNoFollow(res, INDEX_URL)
+ self.assertMessageCount(success=1)
diff --git a/sahara_dashboard/content/data_processing/job_executions/urls.py b/sahara_dashboard/content/data_processing/job_executions/urls.py
new file mode 100644
index 00000000..0feffe03
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/job_executions/urls.py
@@ -0,0 +1,35 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from django.conf.urls import patterns
+from django.conf.urls import url
+
+import sahara_dashboard.content.data_processing. \
+ job_executions.views as views
+from sahara_dashboard.content.data_processing. \
+ jobs import views as job_views
+
+
+urlpatterns = patterns('',
+ url(r'^$', views.JobExecutionsView.as_view(),
+ name='index'),
+ url(r'^$', views.JobExecutionsView.as_view(),
+ name='job-executions'),
+ url(r'^launch-job$',
+ job_views.LaunchJobView.as_view()),
+ url(r'^launch-job-new-cluster$',
+ job_views.LaunchJobNewClusterView.as_view()),
+ url(r'^(?P[^/]+)$',
+ views.JobExecutionDetailsView.as_view(),
+ name='details'))
diff --git a/sahara_dashboard/content/data_processing/job_executions/views.py b/sahara_dashboard/content/data_processing/job_executions/views.py
new file mode 100644
index 00000000..671abb10
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/job_executions/views.py
@@ -0,0 +1,83 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import exceptions
+from horizon import tables
+from horizon import tabs
+from horizon.utils import memoized
+from horizon.utils.urlresolvers import reverse # noqa
+
+from sahara_dashboard.api import sahara as saharaclient
+
+from sahara_dashboard.content.data_processing.job_executions \
+ import tables as je_tables
+import sahara_dashboard.content.data_processing. \
+ job_executions.tabs as _tabs
+
+LOG = logging.getLogger(__name__)
+
+
+class JobExecutionsView(tables.DataTableView):
+ SEARCH_MAPPING = {"cluster": "cluster.name",
+ "job": "job.name"}
+
+ table_class = je_tables.JobExecutionsTable
+ template_name = (
+ 'project/data_processing.job_executions/job_executions.html')
+ page_title = _("Jobs")
+
+ def get_data(self):
+ try:
+ search_opts = {}
+ filter = self.get_server_filter_info(self.request)
+ if filter['value'] and filter['field']:
+ if filter['field'] in self.SEARCH_MAPPING:
+ # Handle special cases for cluster and job
+ # since they are in different database tables.
+ search_opts = {
+ self.SEARCH_MAPPING[filter['field']]: filter['value']}
+ else:
+ search_opts = {filter['field']: filter['value']}
+ jobs = saharaclient.job_execution_list(self.request, search_opts)
+ except Exception:
+ jobs = []
+ exceptions.handle(self.request,
+ _("Unable to fetch job executions."))
+ return jobs
+
+
+class JobExecutionDetailsView(tabs.TabView):
+ tab_group_class = _tabs.JobExecutionDetailsTabs
+ template_name = 'horizon/common/_detail.html'
+ page_title = "{{ job_execution.name|default:job_execution.id }}"
+
+ @memoized.memoized_method
+ def get_object(self):
+ jex_id = self.kwargs["job_execution_id"]
+ try:
+ return saharaclient.job_execution_get(self.request, jex_id)
+ except Exception:
+ msg = _('Unable to retrieve details for job "%s".') % jex_id
+ redirect = reverse("horizon:project:data_processing."
+ "job_executions:job-executions")
+ exceptions.handle(self.request, msg, redirect=redirect)
+
+ def get_context_data(self, **kwargs):
+ context = super(JobExecutionDetailsView, self)\
+ .get_context_data(**kwargs)
+ context['job_execution'] = self.get_object()
+ return context
diff --git a/sahara_dashboard/content/data_processing/jobs/__init__.py b/sahara_dashboard/content/data_processing/jobs/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/sahara_dashboard/content/data_processing/jobs/panel.py b/sahara_dashboard/content/data_processing/jobs/panel.py
new file mode 100644
index 00000000..87085fad
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/jobs/panel.py
@@ -0,0 +1,28 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.utils.translation import ugettext_lazy as _
+
+import horizon
+
+from openstack_dashboard.dashboards.project import dashboard
+
+
+class JobsPanel(horizon.Panel):
+ name = _("Job Templates")
+ slug = 'data_processing.jobs'
+ permissions = (('openstack.services.data-processing',
+ 'openstack.services.data_processing'),)
+
+
+dashboard.Project.register(JobsPanel)
diff --git a/sahara_dashboard/content/data_processing/jobs/tables.py b/sahara_dashboard/content/data_processing/jobs/tables.py
new file mode 100644
index 00000000..94687482
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/jobs/tables.py
@@ -0,0 +1,116 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.core import urlresolvers
+from django.utils import http
+from django.utils.translation import ugettext_lazy as _
+from django.utils.translation import ungettext_lazy
+
+from horizon import tables
+
+from sahara_dashboard.api import sahara as saharaclient
+
+LOG = logging.getLogger(__name__)
+
+
+class JobsFilterAction(tables.FilterAction):
+ filter_type = "server"
+ filter_choices = (('name', _("Name"), True),
+ ('type', _("Type"), True),
+ ('description', _("Description"), True))
+
+
+class CreateJob(tables.LinkAction):
+ name = "create job"
+ verbose_name = _("Create Job Template")
+ url = "horizon:project:data_processing.jobs:create-job"
+ classes = ("ajax-modal", "create_job_class")
+ icon = "plus"
+
+
+class DeleteJob(tables.DeleteAction):
+ @staticmethod
+ def action_present(count):
+ return ungettext_lazy(
+ u"Delete Job Template",
+ u"Delete Job Templates",
+ count
+ )
+
+ @staticmethod
+ def action_past(count):
+ return ungettext_lazy(
+ u"Deleted Job Template",
+ u"Deleted Jobs Templates",
+ count
+ )
+
+ def delete(self, request, obj_id):
+ saharaclient.job_delete(request, obj_id)
+
+
+class LaunchJobExistingCluster(tables.LinkAction):
+ name = "launch-job-existing"
+ verbose_name = _("Launch On Existing Cluster")
+ url = "horizon:project:data_processing.jobs:launch-job"
+ classes = ('ajax-modal', 'btn-launch')
+
+ def get_link_url(self, datum):
+ base_url = urlresolvers.reverse(self.url)
+
+ params = http.urlencode({"job_id": datum.id})
+ return "?".join([base_url, params])
+
+
+class LaunchJobNewCluster(tables.LinkAction):
+ name = "launch-job-new"
+ verbose_name = _("Launch On New Cluster")
+ url = "horizon:project:data_processing.jobs:launch-job-new-cluster"
+ classes = ('ajax-modal', 'btn-launch')
+
+ def get_link_url(self, datum):
+ base_url = urlresolvers.reverse(self.url)
+
+ params = http.urlencode({"job_id": datum.id})
+ return "?".join([base_url, params])
+
+
+class ChoosePlugin(tables.LinkAction):
+ name = "launch-job-new"
+ verbose_name = _("Launch On New Cluster")
+ url = "horizon:project:data_processing.jobs:choose-plugin"
+ classes = ('ajax-modal', 'btn-launch')
+
+ def get_link_url(self, datum):
+ base_url = urlresolvers.reverse(self.url)
+
+ params = http.urlencode({"job_id": datum.id})
+ return "?".join([base_url, params])
+
+
+class JobsTable(tables.DataTable):
+ name = tables.Column("name",
+ verbose_name=_("Name"),
+ link="horizon:project:data_processing.jobs:details")
+ type = tables.Column("type",
+ verbose_name=_("Type"))
+ description = tables.Column("description",
+ verbose_name=_("Description"))
+
+ class Meta(object):
+ name = "jobs"
+ verbose_name = _("Job Templates")
+ table_actions = (CreateJob, DeleteJob, JobsFilterAction,)
+ row_actions = (LaunchJobExistingCluster, ChoosePlugin, DeleteJob,)
diff --git a/sahara_dashboard/content/data_processing/jobs/tabs.py b/sahara_dashboard/content/data_processing/jobs/tabs.py
new file mode 100644
index 00000000..25fe1395
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/jobs/tabs.py
@@ -0,0 +1,43 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import tabs
+
+from sahara_dashboard.api import sahara as saharaclient
+
+LOG = logging.getLogger(__name__)
+
+
+class GeneralTab(tabs.Tab):
+ name = _("General Info")
+ slug = "job_details_tab"
+ template_name = ("project/data_processing.jobs/_details.html")
+
+ def get_context_data(self, request):
+ job_id = self.tab_group.kwargs['job_id']
+ try:
+ job = saharaclient.job_get(request, job_id)
+ except Exception as e:
+ job = {}
+ LOG.error("Unable to fetch job template details: %s" % str(e))
+ return {"job": job}
+
+
+class JobDetailsTabs(tabs.TabGroup):
+ slug = "job_details"
+ tabs = (GeneralTab,)
+ sticky = True
diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_create_job_help.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_create_job_help.html
new file mode 100644
index 00000000..a9cead21
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_create_job_help.html
@@ -0,0 +1,31 @@
+{% load i18n horizon %}
+
+
+ {% blocktrans %}Create a job template with a specified name.{% endblocktrans %}
+
+
+ {% blocktrans %}Select the type of your job:{% endblocktrans %}
+
+
{% blocktrans %}Pig{% endblocktrans %}
+
{% blocktrans %}Hive{% endblocktrans %}
+
{% blocktrans %}Spark{% endblocktrans %}
+
{% blocktrans %}Storm{% endblocktrans %}
+
{% blocktrans %}MapReduce{% endblocktrans %}
+
{% blocktrans %}Java Action{% endblocktrans %}
+
{% blocktrans %}Shell Action{% endblocktrans %}
+
+
+
+ {% blocktrans %}Choose or create your main binary. Additional libraries can be added from the "Libs" tab.{% endblocktrans %}
+
+
+ {% blocktrans %}For Spark and Shell jobs, only a main is required, "libs" are optional.{% endblocktrans %}
+
+
+ {% blocktrans %}For MapReduce or Java Action jobs, "mains" are not applicable. You are required to add one
+ or more "libs" for these jobs.{% endblocktrans %}
+
+
+ {% blocktrans %}You may also enter an optional description for your job template.{% endblocktrans %}
+
+ {% blocktrans %}Add libraries to your job template.{% endblocktrans %}
+
+
+ {% blocktrans %}Choose from the list of binaries and click "choose" to add the library to your job template. This can be repeated for additional libraries.{% endblocktrans %}
+
+
+ {% blocktrans %}For Shell Action jobs, any required files beyond the main script may be added as "libraries".{% endblocktrans %}
+
+
\ No newline at end of file
diff --git a/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_details.html b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_details.html
new file mode 100644
index 00000000..723e6495
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/jobs/templates/data_processing.jobs/_details.html
@@ -0,0 +1,30 @@
+{% load i18n sizeformat %}
+
+
diff --git a/sahara_dashboard/content/data_processing/jobs/tests.py b/sahara_dashboard/content/data_processing/jobs/tests.py
new file mode 100644
index 00000000..3855ea23
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/jobs/tests.py
@@ -0,0 +1,200 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from django.core.urlresolvers import reverse
+from django import http
+
+from mox3.mox import IsA # noqa
+from openstack_dashboard.test import helpers as test
+import six
+
+from sahara_dashboard import api
+
+
+INDEX_URL = reverse('horizon:project:data_processing.jobs:index')
+DETAILS_URL = reverse(
+ 'horizon:project:data_processing.jobs:details', args=['id'])
+
+
+class DataProcessingJobTests(test.TestCase):
+ @test.create_stubs({api.sahara: ('job_list',)})
+ def test_index(self):
+ api.sahara.job_list(IsA(http.HttpRequest), {}) \
+ .AndReturn(self.jobs.list())
+ self.mox.ReplayAll()
+ res = self.client.get(INDEX_URL)
+ self.assertTemplateUsed(res,
+ 'project/data_processing.jobs/jobs.html')
+ self.assertContains(res, 'Job Templates')
+ self.assertContains(res, 'Name')
+
+ @test.create_stubs({api.sahara: ('job_get',)})
+ def test_details(self):
+ api.sahara.job_get(IsA(http.HttpRequest), IsA(six.text_type)) \
+ .MultipleTimes().AndReturn(self.jobs.first())
+ self.mox.ReplayAll()
+ res = self.client.get(DETAILS_URL)
+ self.assertTemplateUsed(res, 'horizon/common/_detail.html')
+ self.assertContains(res, 'pigjob')
+
+ @test.create_stubs({api.sahara: ('job_binary_list',
+ 'job_create',
+ 'job_types_list')})
+ def test_create(self):
+ api.sahara.job_binary_list(IsA(http.HttpRequest)).AndReturn([])
+ api.sahara.job_binary_list(IsA(http.HttpRequest)).AndReturn([])
+ api.sahara.job_create(IsA(http.HttpRequest),
+ 'test', 'Pig', [], [], 'test create',
+ interface=[])
+ api.sahara.job_types_list(IsA(http.HttpRequest)) \
+ .AndReturn(self.job_types.list())
+ self.mox.ReplayAll()
+ form_data = {'job_name': 'test',
+ 'job_type': 'pig',
+ 'lib_binaries': [],
+ 'lib_ids': '[]',
+ 'job_description': 'test create',
+ 'hidden_arguments_field': [],
+ 'argument_ids': '[]'}
+ url = reverse('horizon:project:data_processing.jobs:create-job')
+ res = self.client.post(url, form_data)
+
+ self.assertNoFormErrors(res)
+
+ @test.create_stubs({api.sahara: ('job_binary_list',
+ 'job_create',
+ 'job_types_list')})
+ def test_create_with_interface(self):
+ api.sahara.job_binary_list(IsA(http.HttpRequest)).AndReturn([])
+ api.sahara.job_binary_list(IsA(http.HttpRequest)).AndReturn([])
+ api.sahara.job_create(IsA(http.HttpRequest),
+ 'test_interface', 'Pig', [], [], 'test create',
+ interface=[
+ {
+ "name": "argument",
+ "description": None,
+ "mapping_type": "args",
+ "location": "0",
+ "value_type": "number",
+ "required": True,
+ "default": None
+ },
+ {
+ "name": "config",
+ "description": "Really great config",
+ "mapping_type": "configs",
+ "location": "edp.important.config",
+ "value_type": "string",
+ "required": False,
+ "default": "A value"
+ }])
+ api.sahara.job_types_list(IsA(http.HttpRequest)) \
+ .AndReturn(self.job_types.list())
+ self.mox.ReplayAll()
+ form_data = {'job_name': 'test_interface',
+ 'job_type': 'pig',
+ 'lib_binaries': [],
+ 'lib_ids': '[]',
+ 'job_description': 'test create',
+ 'hidden_arguments_field': [],
+ 'argument_ids': '["0", "1"]',
+ 'argument_id_0': '0',
+ 'argument_name_0': 'argument',
+ 'argument_description_0': '',
+ 'argument_mapping_type_0': 'args',
+ 'argument_location_0': '0',
+ 'argument_value_type_0': 'number',
+ 'argument_required_0': True,
+ 'argument_default_value_0': '',
+ 'argument_id_1': '1',
+ 'argument_name_1': 'config',
+ 'argument_description_1': 'Really great config',
+ 'argument_mapping_type_1': 'configs',
+ 'argument_location_1': 'edp.important.config',
+ 'argument_value_type_1': 'string',
+ 'argument_default_value_1': 'A value'}
+ url = reverse('horizon:project:data_processing.jobs:create-job')
+ res = self.client.post(url, form_data)
+
+ self.assertNoFormErrors(res)
+
+ @test.create_stubs({api.sahara: ('job_list',
+ 'job_delete')})
+ def test_delete(self):
+ job = self.jobs.first()
+ api.sahara.job_list(IsA(http.HttpRequest), {}) \
+ .AndReturn(self.jobs.list())
+ api.sahara.job_delete(IsA(http.HttpRequest), job.id)
+ self.mox.ReplayAll()
+
+ form_data = {'action': 'jobs__delete__%s' % job.id}
+ res = self.client.post(INDEX_URL, form_data)
+
+ self.assertNoFormErrors(res)
+ self.assertRedirectsNoFollow(res, INDEX_URL)
+ self.assertMessageCount(success=1)
+
+ @test.create_stubs({api.sahara: ('job_execution_create',
+ 'job_get',
+ 'job_get_configs',
+ 'job_list',
+ 'cluster_list',
+ 'data_source_list')})
+ def test_launch(self):
+ job = self.jobs.first()
+ job_execution = self.job_executions.first()
+ cluster = self.clusters.first()
+ input_ds = self.data_sources.first()
+ output_ds = self.data_sources.first()
+ api.sahara.job_get(IsA(http.HttpRequest), IsA(six.text_type)) \
+ .AndReturn(job)
+ api.sahara.job_get_configs(IsA(http.HttpRequest), job.type) \
+ .AndReturn(job)
+ api.sahara.cluster_list(IsA(http.HttpRequest)) \
+ .AndReturn(self.clusters.list())
+ api.sahara.data_source_list(IsA(http.HttpRequest)) \
+ .MultipleTimes().AndReturn(self.data_sources.list())
+ api.sahara.job_list(IsA(http.HttpRequest)) \
+ .AndReturn(self.jobs.list())
+ api.sahara.job_get(IsA(http.HttpRequest), IsA(six.text_type)) \
+ .AndReturn(job)
+ api.sahara.job_execution_create(IsA(http.HttpRequest),
+ IsA(six.text_type),
+ IsA(six.text_type),
+ IsA(six.text_type),
+ IsA(six.text_type),
+ IsA(dict),
+ IsA(dict)).AndReturn(job_execution)
+ self.mox.ReplayAll()
+
+ url = reverse('horizon:project:data_processing.jobs:launch-job')
+ form_data = {
+ 'job': self.jobs.first().id,
+ 'cluster': cluster.id,
+ 'job_input': input_ds.id,
+ 'job_output': output_ds.id,
+ 'config': {},
+ 'argument_ids': '{}',
+ 'adapt_oozie': 'on',
+ 'adapt_swift_spark': 'on',
+ 'hbase_common_lib': 'on',
+ 'java_opts': '',
+ 'job_args_array': [[], []],
+ 'job_configs': [{}, {}],
+ 'job_params': [{}, {}],
+ 'job_type': 'Pig',
+ 'streaming_mapper': '',
+ 'streaming_reducer': ''
+ }
+
+ res = self.client.post(url, form_data)
+ self.assertNoFormErrors(res)
diff --git a/sahara_dashboard/content/data_processing/jobs/urls.py b/sahara_dashboard/content/data_processing/jobs/urls.py
new file mode 100644
index 00000000..2858a6f4
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/jobs/urls.py
@@ -0,0 +1,41 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from django.conf.urls import patterns
+from django.conf.urls import url
+
+import sahara_dashboard.content.data_processing. \
+ jobs.views as views
+
+
+urlpatterns = patterns('',
+ url(r'^$', views.JobsView.as_view(),
+ name='index'),
+ url(r'^$', views.JobsView.as_view(),
+ name='jobs'),
+ url(r'^create-job$',
+ views.CreateJobView.as_view(),
+ name='create-job'),
+ url(r'^launch-job$',
+ views.LaunchJobView.as_view(),
+ name='launch-job'),
+ url(r'^launch-job-new-cluster$',
+ views.LaunchJobNewClusterView.as_view(),
+ name='launch-job-new-cluster'),
+ url(r'^choose-plugin$',
+ views.ChoosePluginView.as_view(),
+ name='choose-plugin'),
+ url(r'^(?P[^/]+)$',
+ views.JobDetailsView.as_view(),
+ name='details'))
diff --git a/sahara_dashboard/content/data_processing/jobs/views.py b/sahara_dashboard/content/data_processing/jobs/views.py
new file mode 100644
index 00000000..032bbf8c
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/jobs/views.py
@@ -0,0 +1,135 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+
+from django import http
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import exceptions
+from horizon import tables
+from horizon import tabs
+from horizon.utils import memoized
+from horizon.utils.urlresolvers import reverse # noqa
+from horizon import workflows
+
+from sahara_dashboard.api import sahara as saharaclient
+
+import sahara_dashboard.content.data_processing.jobs.tables \
+ as _tables
+import sahara_dashboard.content.data_processing.jobs.tabs \
+ as _tabs
+import sahara_dashboard.content.data_processing.jobs. \
+ workflows.create as create_flow
+import sahara_dashboard.content.data_processing.jobs. \
+ workflows.launch as launch_flow
+
+LOG = logging.getLogger(__name__)
+
+
+class JobsView(tables.DataTableView):
+ table_class = _tables.JobsTable
+ template_name = 'project/data_processing.jobs/jobs.html'
+ page_title = _("Job Templates")
+
+ def get_data(self):
+ try:
+ search_opts = {}
+ filter = self.get_server_filter_info(self.request)
+ if filter['value'] and filter['field']:
+ search_opts = {filter['field']: filter['value']}
+ jobs = saharaclient.job_list(self.request, search_opts)
+ except Exception:
+ jobs = []
+ exceptions.handle(self.request,
+ _("Unable to fetch jobs."))
+
+ jobs = sorted(jobs, key=lambda job: job.created_at)
+ return jobs
+
+
+class CreateJobView(workflows.WorkflowView):
+ workflow_class = create_flow.CreateJob
+ success_url = "horizon:project:data_processing.jobs:create-job"
+ classes = ("ajax-modal",)
+ template_name = "project/data_processing.jobs/create.html"
+ page_title = _("Create Job Template")
+
+
+class JobDetailsView(tabs.TabView):
+ tab_group_class = _tabs.JobDetailsTabs
+ template_name = 'horizon/common/_detail.html'
+ page_title = "{{ job.name|default:job.id }}"
+
+ @memoized.memoized_method
+ def get_object(self):
+ j_id = self.kwargs["job_id"]
+ try:
+ return saharaclient.job_get(self.request, j_id)
+ except Exception:
+ msg = _('Unable to retrieve details for job template "%s".') % j_id
+ redirect = reverse(
+ "horizon:project:data_processing.jobs:jobs")
+ exceptions.handle(self.request, msg, redirect=redirect)
+
+ def get_context_data(self, **kwargs):
+ context = super(JobDetailsView, self).get_context_data(**kwargs)
+ context['job'] = self.get_object()
+ return context
+
+
+class LaunchJobView(workflows.WorkflowView):
+ workflow_class = launch_flow.LaunchJob
+ success_url = "horizon:project:data_processing.jobs"
+ classes = ("ajax-modal",)
+ template_name = "project/data_processing.jobs/launch.html"
+ page_title = _("Launch Job")
+
+ def get(self, request, *args, **kwargs):
+ if request.is_ajax():
+ if request.REQUEST.get("json", None):
+ job_id = request.REQUEST.get("job_id")
+ job_type = saharaclient.job_get(request, job_id).type
+ return http.HttpResponse(json.dumps({"job_type": job_type}),
+ content_type='application/json')
+ return super(LaunchJobView, self).get(request, args, kwargs)
+
+ def get_context_data(self, **kwargs):
+ context = super(LaunchJobView, self).get_context_data(**kwargs)
+ return context
+
+
+class LaunchJobNewClusterView(workflows.WorkflowView):
+ workflow_class = launch_flow.LaunchJobNewCluster
+ success_url = "horizon:project:data_processing.jobs"
+ classes = ("ajax-modal",)
+ template_name = "project/data_processing.jobs/launch.html"
+ page_title = _("Launch Job")
+
+ def get_context_data(self, **kwargs):
+ context = super(LaunchJobNewClusterView, self).\
+ get_context_data(**kwargs)
+ return context
+
+
+class ChoosePluginView(workflows.WorkflowView):
+ workflow_class = launch_flow.ChosePluginVersion
+ success_url = "horizon:project:data_processing.jobs"
+ classes = ("ajax-modal",)
+ template_name = "project/data_processing.jobs/launch.html"
+ page_title = _("Launch Job")
+
+ def get_context_data(self, **kwargs):
+ context = super(ChoosePluginView, self).get_context_data(**kwargs)
+ return context
diff --git a/sahara_dashboard/content/data_processing/jobs/workflows/__init__.py b/sahara_dashboard/content/data_processing/jobs/workflows/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/sahara_dashboard/content/data_processing/jobs/workflows/create.py b/sahara_dashboard/content/data_processing/jobs/workflows/create.py
new file mode 100644
index 00000000..364fc58c
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/jobs/workflows/create.py
@@ -0,0 +1,281 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import exceptions
+from horizon import forms
+from horizon.forms import fields
+from horizon import workflows
+
+from sahara_dashboard.content.data_processing \
+ .utils import helpers
+import sahara_dashboard.content.data_processing \
+ .utils.workflow_helpers as whelpers
+from sahara_dashboard.api import sahara as saharaclient
+
+
+LOG = logging.getLogger(__name__)
+
+JOB_BINARY_CREATE_URL = ("horizon:project:data_processing.job_binaries"
+ ":create-job-binary")
+
+
+class AdditionalLibsAction(workflows.Action):
+
+ lib_binaries = forms.DynamicChoiceField(
+ label=_("Choose libraries"),
+ required=False,
+ add_item_link=JOB_BINARY_CREATE_URL,
+ widget=forms.Select(
+ attrs={
+ 'class': 'switched',
+ 'data-switch-on': 'jobtype',
+ 'data-jobtype-pig': _("Choose libraries"),
+ 'data-jobtype-hive': _("Choose libraries"),
+ 'data-jobtype-shell': _("Choose additional files"),
+ 'data-jobtype-spark': _("Choose libraries"),
+ 'data-jobtype-java': _("Choose libraries"),
+ 'data-jobtype-mapreduce.streaming': _("Choose libraries")
+ }))
+
+ lib_ids = forms.CharField(
+ required=False,
+ widget=forms.HiddenInput())
+
+ def populate_lib_binaries_choices(self, request, context):
+ job_binaries = saharaclient.job_binary_list(request)
+
+ choices = [(job_binary.id, job_binary.name)
+ for job_binary in job_binaries]
+ choices.insert(0, ('', _("-- not selected --")))
+
+ return choices
+
+ class Meta(object):
+ name = _("Libs")
+ help_text_template = (
+ "project/data_processing.jobs/_create_job_libs_help.html")
+
+
+class GeneralConfigAction(workflows.Action):
+ job_name = forms.CharField(label=_("Name"))
+
+ job_type = forms.ChoiceField(label=_("Job Type"),
+ widget=forms.Select(attrs={
+ 'class': 'switchable',
+ 'data-slug': 'jobtype'
+ }))
+
+ main_binary = forms.DynamicChoiceField(
+ label=_("Choose a main binary"),
+ required=False,
+ help_text=_("Choose the binary which "
+ "should be used in this Job."),
+ add_item_link=JOB_BINARY_CREATE_URL,
+ widget=fields.DynamicSelectWidget(
+ attrs={
+ 'class': 'switched',
+ 'data-switch-on': 'jobtype',
+ 'data-jobtype-pig': _("Choose a main binary"),
+ 'data-jobtype-hive': _("Choose a main binary"),
+ 'data-jobtype-shell': _("Choose a shell script"),
+ 'data-jobtype-spark': _("Choose a main binary"),
+ 'data-jobtype-storm': _("Choose a main binary"),
+ 'data-jobtype-mapreduce.streaming': _("Choose a main binary")
+ }))
+
+ job_description = forms.CharField(label=_("Description"),
+ required=False,
+ widget=forms.Textarea(attrs={'rows': 4}))
+
+ def __init__(self, request, context, *args, **kwargs):
+ super(GeneralConfigAction,
+ self).__init__(request, context, *args, **kwargs)
+ if request.REQUEST.get("guide_job_type"):
+ self.fields["job_type"].initial = (
+ request.REQUEST.get("guide_job_type").lower())
+
+ def populate_job_type_choices(self, request, context):
+ choices = []
+ choices_list = saharaclient.job_types_list(request)
+
+ for choice in choices_list:
+ job_type = choice.name.lower()
+ if job_type in helpers.JOB_TYPE_MAP:
+ choices.append((job_type, helpers.JOB_TYPE_MAP[job_type][0]))
+ return choices
+
+ def populate_main_binary_choices(self, request, context):
+ job_binaries = saharaclient.job_binary_list(request)
+
+ choices = [(job_binary.id, job_binary.name)
+ for job_binary in job_binaries]
+ choices.insert(0, ('', _("-- not selected --")))
+ return choices
+
+ def clean(self):
+ cleaned_data = super(workflows.Action, self).clean()
+ job_type = cleaned_data.get("job_type", "")
+
+ if job_type in ["Java", "MapReduce"]:
+ cleaned_data['main_binary'] = None
+
+ return cleaned_data
+
+ class Meta(object):
+ name = _("Create Job Template")
+ help_text_template = (
+ "project/data_processing.jobs/_create_job_help.html")
+
+
+class ConfigureInterfaceArgumentsAction(workflows.Action):
+ hidden_arguments_field = forms.CharField(
+ required=False,
+ widget=forms.HiddenInput(attrs={"class": "hidden_arguments_field"}))
+ argument_ids = forms.CharField(
+ required=False,
+ widget=forms.HiddenInput())
+
+ def __init__(self, request, *args, **kwargs):
+ super(ConfigureInterfaceArgumentsAction, self).__init__(
+ request, *args, **kwargs)
+ request_source = None
+ if 'argument_ids' in request.POST:
+ request_source = request.POST
+ elif 'argument_ids' in request.REQUEST:
+ request_source = request.REQUEST
+ if request_source:
+ self.arguments = []
+ for id in json.loads(request_source['argument_ids']):
+ fields = {
+ "name": "argument_name_" + str(id),
+ "description": "argument_description_" + str(id),
+ "mapping_type": "argument_mapping_type_" + str(id),
+ "location": "argument_location_" + str(id),
+ "value_type": "argument_value_type_" + str(id),
+ "default_value": "argument_default_value_" + str(id)}
+ argument = {k: request_source[v]
+ for k, v in fields.items()}
+ required_field = "argument_required_" + str(id)
+ fields.update({"required": required_field})
+ argument.update(
+ {"required": required_field in request_source})
+ self.arguments.append(argument)
+
+ whelpers.build_interface_argument_fields(self, **fields)
+
+ def clean(self):
+ cleaned_data = super(ConfigureInterfaceArgumentsAction, self).clean()
+ return cleaned_data
+
+ class Meta(object):
+ name = _("Interface Arguments")
+
+
+class ConfigureArguments(workflows.Step):
+ action_class = ConfigureInterfaceArgumentsAction
+ contributes = ("hidden_arguments_field", )
+ template_name = ("project/data_processing.jobs/"
+ "job_interface_arguments_template.html")
+
+ def contribute(self, data, context):
+ for k, v in data.items():
+ context[k] = v
+ return context
+
+
+class GeneralConfig(workflows.Step):
+ action_class = GeneralConfigAction
+ contributes = ("job_name", "job_type", "job_description", "main_binary")
+
+ def contribute(self, data, context):
+ for k, v in data.items():
+ if k == "job_type":
+ context[k] = helpers.JOB_TYPE_MAP[v][1]
+ else:
+ context[k] = v
+ return context
+
+
+class ConfigureLibs(workflows.Step):
+ action_class = AdditionalLibsAction
+ template_name = "project/data_processing.jobs/library_template.html"
+
+ def contribute(self, data, context):
+ chosen_libs = json.loads(data.get("lib_ids", '[]'))
+ for index, library in enumerate(chosen_libs):
+ context["lib_%s" % index] = library
+ return context
+
+
+class CreateJob(workflows.Workflow):
+ slug = "create_job"
+ name = _("Create Job Template")
+ finalize_button_name = _("Create")
+ success_message = _("Job created")
+ failure_message = _("Could not create job template")
+ success_url = "horizon:project:data_processing.jobs:index"
+ default_steps = (GeneralConfig, ConfigureLibs, ConfigureArguments)
+
+ def handle(self, request, context):
+ main_locations = []
+ lib_locations = []
+
+ for k in context.keys():
+ if k.startswith('lib_'):
+ lib_locations.append(context.get(k))
+
+ if context.get("main_binary", None):
+ main_locations.append(context["main_binary"])
+
+ argument_ids = json.loads(context['argument_ids'])
+ interface = [
+ {
+ "name": context['argument_name_' + str(arg_id)],
+ "description": (context['argument_description_' + str(arg_id)]
+ or None),
+ "mapping_type": context['argument_mapping_type_'
+ + str(arg_id)],
+ "location": context['argument_location_' + str(arg_id)],
+ "value_type": context['argument_value_type_' + str(arg_id)],
+ "required": context['argument_required_' + str(arg_id)],
+ "default": (context['argument_default_value_' + str(arg_id)]
+ or None)
+ } for arg_id in argument_ids
+ ]
+
+ try:
+ job = saharaclient.job_create(
+ request,
+ context["job_name"],
+ context["job_type"],
+ main_locations,
+ lib_locations,
+ context["job_description"],
+ interface=interface)
+
+ hlps = helpers.Helpers(request)
+ if hlps.is_from_guide():
+ request.session["guide_job_id"] = job.id
+ request.session["guide_job_type"] = context["job_type"]
+ request.session["guide_job_name"] = context["job_name"]
+ self.success_url = (
+ "horizon:project:data_processing.wizard:jobex_guide")
+ return True
+ except Exception:
+ exceptions.handle(request)
+ return False
diff --git a/sahara_dashboard/content/data_processing/jobs/workflows/launch.py b/sahara_dashboard/content/data_processing/jobs/workflows/launch.py
new file mode 100644
index 00000000..3b648422
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/jobs/workflows/launch.py
@@ -0,0 +1,609 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+import six
+
+from horizon import exceptions
+from horizon import forms
+from horizon import workflows
+
+from sahara_dashboard.api import sahara as saharaclient
+import sahara_dashboard.content.data_processing. \
+ cluster_templates.workflows.create as t_flows
+import sahara_dashboard.content.data_processing. \
+ clusters.workflows.create as c_flow
+import sahara_dashboard.content.data_processing. \
+ utils.workflow_helpers as whelpers
+
+
+LOG = logging.getLogger(__name__)
+
+DATA_SOURCE_CREATE_URL = ("horizon:project:data_processing.data_sources"
+ ":create-data-source")
+
+
+class JobExecutionGeneralConfigAction(workflows.Action):
+ job_input = forms.DynamicChoiceField(
+ label=_("Input"),
+ initial=(None, "None"),
+ add_item_link=DATA_SOURCE_CREATE_URL,
+ required=False)
+
+ job_output = forms.DynamicChoiceField(
+ label=_("Output"),
+ initial=(None, "None"),
+ add_item_link=DATA_SOURCE_CREATE_URL,
+ required=False)
+
+ def __init__(self, request, *args, **kwargs):
+ super(JobExecutionGeneralConfigAction, self).__init__(request,
+ *args,
+ **kwargs)
+
+ if request.REQUEST.get("job_id", None) is None:
+ self.fields["job"] = forms.ChoiceField(
+ label=_("Job"))
+ self.fields["job"].choices = self.populate_job_choices(request)
+ else:
+ self.fields["job"] = forms.CharField(
+ widget=forms.HiddenInput(),
+ initial=request.REQUEST.get("job_id", None))
+
+ def populate_job_input_choices(self, request, context):
+ return self.get_data_source_choices(request, context)
+
+ def populate_job_output_choices(self, request, context):
+ return self.get_data_source_choices(request, context)
+
+ def get_data_source_choices(self, request, context):
+ try:
+ data_sources = saharaclient.data_source_list(request)
+ except Exception:
+ data_sources = []
+ exceptions.handle(request,
+ _("Unable to fetch data sources."))
+
+ choices = [(data_source.id, data_source.name)
+ for data_source in data_sources]
+ choices.insert(0, (None, 'None'))
+
+ return choices
+
+ def populate_job_choices(self, request):
+ try:
+ jobs = saharaclient.job_list(request)
+ except Exception:
+ jobs = []
+ exceptions.handle(request,
+ _("Unable to fetch jobs."))
+
+ choices = [(job.id, job.name)
+ for job in jobs]
+
+ return choices
+
+ class Meta(object):
+ name = _("Job")
+ help_text_template = (
+ "project/data_processing.jobs/_launch_job_help.html")
+
+
+class JobExecutionExistingGeneralConfigAction(JobExecutionGeneralConfigAction):
+ cluster = forms.ChoiceField(
+ label=_("Cluster"),
+ initial=(None, "None"),
+ widget=forms.Select(attrs={"class": "cluster_choice"}))
+
+ def populate_cluster_choices(self, request, context):
+ try:
+ clusters = saharaclient.cluster_list(request)
+ except Exception:
+ clusters = []
+ exceptions.handle(request,
+ _("Unable to fetch clusters."))
+
+ choices = [(cluster.id, cluster.name)
+ for cluster in clusters]
+
+ return choices
+
+ class Meta(object):
+ name = _("Job")
+ help_text_template = (
+ "project/data_processing.jobs/_launch_job_help.html")
+
+
+def _merge_interface_with_configs(interface, job_configs):
+ interface_by_mapping = {(arg['mapping_type'], arg['location']): arg
+ for arg in interface}
+ mapped_types = ("configs", "params")
+ mapped_configs = {
+ (mapping_type, key): value for mapping_type in mapped_types
+ for key, value in job_configs.get(mapping_type, {}).items()
+ }
+ for index, arg in enumerate(job_configs.get('args', [])):
+ mapped_configs['args', str(index)] = arg
+ free_arguments, interface_arguments = {}, {}
+ for mapping, value in mapped_configs.items():
+ if mapping in interface_by_mapping:
+ arg = interface_by_mapping[mapping]
+ interface_arguments[arg['id']] = value
+ else:
+ free_arguments[mapping] = value
+ configs = {"configs": {}, "params": {}, "args": {}}
+ for mapping, value in free_arguments.items():
+ mapping_type, location = mapping
+ configs[mapping_type][location] = value
+ configs["args"] = [
+ value for key, value in sorted(configs["args"].items(),
+ key=lambda x: int(x[0]))]
+ return configs, interface_arguments
+
+
+class JobConfigAction(workflows.Action):
+ MAIN_CLASS = "edp.java.main_class"
+ JAVA_OPTS = "edp.java.java_opts"
+ EDP_MAPPER = "edp.streaming.mapper"
+ EDP_REDUCER = "edp.streaming.reducer"
+ EDP_PREFIX = "edp."
+ EDP_HBASE_COMMON_LIB = "edp.hbase_common_lib"
+ EDP_ADAPT_FOR_OOZIE = "edp.java.adapt_for_oozie"
+ EDP_ADAPT_SPARK_SWIFT = "edp.spark.adapt_for_swift"
+
+ property_name = forms.ChoiceField(
+ required=False,
+ )
+
+ job_configs = forms.CharField(
+ required=False,
+ widget=forms.HiddenInput())
+
+ job_params = forms.CharField(
+ required=False,
+ widget=forms.HiddenInput())
+
+ job_args_array = forms.CharField(
+ required=False,
+ widget=forms.HiddenInput())
+
+ job_type = forms.CharField(
+ required=False,
+ widget=forms.HiddenInput())
+
+ main_class = forms.CharField(label=_("Main Class"),
+ required=False)
+
+ java_opts = forms.CharField(label=_("Java Opts"),
+ required=False)
+
+ streaming_mapper = forms.CharField(label=_("Mapper"))
+
+ streaming_reducer = forms.CharField(label=_("Reducer"))
+
+ hbase_common_lib = forms.BooleanField(
+ label=_("Use HBase Common library"),
+ help_text=_("Run HBase EDP Jobs with common HBase library on HDFS"),
+ required=False, initial=True)
+
+ adapt_oozie = forms.BooleanField(
+ label=_("Adapt For Oozie"),
+ help_text=_("Automatically modify the Hadoop configuration"
+ " so that job config values are set and so that"
+ " Oozie will handle exit codes correctly."),
+ required=False, initial=True)
+
+ adapt_spark_swift = forms.BooleanField(
+ label=_("Enable Swift Paths"),
+ help_text=_("Modify the configuration so that swift URLs can "
+ "be dereferenced through HDFS at runtime."),
+ required=False, initial=True)
+
+ def __init__(self, request, *args, **kwargs):
+ super(JobConfigAction, self).__init__(request, *args, **kwargs)
+ job_ex_id = request.REQUEST.get("job_execution_id")
+ if job_ex_id is not None:
+ job_ex = saharaclient.job_execution_get(request, job_ex_id)
+ job = saharaclient.job_get(request, job_ex.job_id)
+ job_configs, interface_args = _merge_interface_with_configs(
+ job.interface, job_ex.job_configs)
+ edp_configs = {}
+
+ if 'configs' in job_configs:
+ configs, edp_configs = (
+ self.clean_edp_configs(job_configs['configs']))
+ self.fields['job_configs'].initial = (
+ json.dumps(configs))
+
+ if 'params' in job_configs:
+ self.fields['job_params'].initial = (
+ json.dumps(job_configs['params']))
+
+ if 'args' in job_configs:
+ self.fields['job_args_array'].initial = (
+ json.dumps(job_configs['args']))
+
+ if self.MAIN_CLASS in edp_configs:
+ self.fields['main_class'].initial = (
+ edp_configs[self.MAIN_CLASS])
+ if self.JAVA_OPTS in edp_configs:
+ self.fields['java_opts'].initial = (
+ edp_configs[self.JAVA_OPTS])
+
+ if self.EDP_MAPPER in edp_configs:
+ self.fields['streaming_mapper'].initial = (
+ edp_configs[self.EDP_MAPPER])
+ if self.EDP_REDUCER in edp_configs:
+ self.fields['streaming_reducer'].initial = (
+ edp_configs[self.EDP_REDUCER])
+ if self.EDP_HBASE_COMMON_LIB in edp_configs:
+ self.fields['hbase_common_lib'].initial = (
+ edp_configs[self.EDP_HBASE_COMMON_LIB])
+ if self.EDP_ADAPT_FOR_OOZIE in edp_configs:
+ self.fields['adapt_oozie'].initial = (
+ edp_configs[self.EDP_ADAPT_FOR_OOZIE])
+ if self.EDP_ADAPT_SPARK_SWIFT in edp_configs:
+ self.fields['adapt_spark_swift'].initial = (
+ edp_configs[self.EDP_ADAPT_SPARK_SWIFT])
+
+ def clean(self):
+ cleaned_data = super(workflows.Action, self).clean()
+ job_type = cleaned_data.get("job_type", None)
+
+ if job_type != "MapReduce.Streaming":
+ if "streaming_mapper" in self._errors:
+ del self._errors["streaming_mapper"]
+ if "streaming_reducer" in self._errors:
+ del self._errors["streaming_reducer"]
+
+ return cleaned_data
+
+ def populate_property_name_choices(self, request, context):
+ job_id = request.REQUEST.get("job_id") or request.REQUEST.get("job")
+ job_type = saharaclient.job_get(request, job_id).type
+ job_configs = (
+ saharaclient.job_get_configs(request, job_type).job_config)
+ choices = [(param['value'], param['name'])
+ for param in job_configs['configs']]
+ return choices
+
+ def clean_edp_configs(self, configs):
+ edp_configs = {}
+ for key, value in six.iteritems(configs):
+ if key.startswith(self.EDP_PREFIX):
+ edp_configs[key] = value
+ for rmkey in edp_configs.keys():
+ # remove all configs handled via other controls
+ # so they do not show up in the free entry inputs
+ if rmkey in [self.EDP_HBASE_COMMON_LIB,
+ self.EDP_MAPPER,
+ self.EDP_REDUCER,
+ self.MAIN_CLASS,
+ self.JAVA_OPTS,
+ self.EDP_ADAPT_FOR_OOZIE,
+ self.EDP_ADAPT_SPARK_SWIFT]:
+ del configs[rmkey]
+ return (configs, edp_configs)
+
+ class Meta(object):
+ name = _("Configure")
+ help_text_template = (
+ "project/data_processing.jobs/_launch_job_configure_help.html")
+
+
+class JobExecutionGeneralConfig(workflows.Step):
+ action_class = JobExecutionGeneralConfigAction
+
+ def contribute(self, data, context):
+ for k, v in data.items():
+ if k in ["job_input", "job_output"]:
+ context["job_general_" + k] = None if (v in [None, ""]) else v
+ else:
+ context["job_general_" + k] = v
+
+ return context
+
+
+class JobExecutionExistingGeneralConfig(workflows.Step):
+ action_class = JobExecutionExistingGeneralConfigAction
+
+ def contribute(self, data, context):
+ for k, v in data.items():
+ if k in ["job_input", "job_output"]:
+ context["job_general_" + k] = None if (v in [None, ""]) else v
+ else:
+ context["job_general_" + k] = v
+
+ return context
+
+
+class JobConfig(workflows.Step):
+ action_class = JobConfigAction
+ template_name = 'project/data_processing.jobs/config_template.html'
+
+ def contribute(self, data, context):
+ job_config = self.clean_configs(
+ json.loads(data.get("job_configs", '{}')))
+ job_params = self.clean_configs(
+ json.loads(data.get("job_params", '{}')))
+ job_args_array = self.clean_configs(
+ json.loads(data.get("job_args_array", '[]')))
+ job_type = data.get("job_type", '')
+
+ context["job_type"] = job_type
+ context["job_config"] = {"configs": job_config}
+ context["job_config"]["args"] = job_args_array
+
+ if job_type in ["Java", "Spark", "Storm"]:
+ context["job_config"]["configs"][JobConfigAction.MAIN_CLASS] = (
+ data.get("main_class", ""))
+ context["job_config"]["configs"][JobConfigAction.JAVA_OPTS] = (
+ data.get("java_opts", ""))
+ context["job_config"]["configs"][
+ JobConfigAction.EDP_HBASE_COMMON_LIB] = (
+ data.get("hbase_common_lib", True))
+ if job_type == "Java":
+ context["job_config"]["configs"][
+ JobConfigAction.EDP_ADAPT_FOR_OOZIE] = (
+ data.get("adapt_oozie", True))
+ if job_type == "Spark":
+ context["job_config"]["configs"][
+ JobConfigAction.EDP_ADAPT_SPARK_SWIFT] = (
+ data.get("adapt_spark_swift", True))
+ elif job_type == "MapReduce.Streaming":
+ context["job_config"]["configs"][JobConfigAction.EDP_MAPPER] = (
+ data.get("streaming_mapper", ""))
+ context["job_config"]["configs"][JobConfigAction.EDP_REDUCER] = (
+ data.get("streaming_reducer", ""))
+ else:
+ context["job_config"]["params"] = job_params
+
+ return context
+
+ @staticmethod
+ def clean_configs(configs):
+ cleaned_conf = None
+ if isinstance(configs, dict):
+ cleaned_conf = dict([(k.strip(), v.strip())
+ for k, v in configs.items()
+ if len(v.strip()) > 0 and len(k.strip()) > 0])
+ elif isinstance(configs, list):
+ cleaned_conf = list([v.strip() for v in configs
+ if len(v.strip()) > 0])
+ return cleaned_conf
+
+
+class NewClusterConfigAction(c_flow.GeneralConfigAction):
+ persist_cluster = forms.BooleanField(
+ label=_("Persist cluster after job exit"),
+ required=False)
+
+ class Meta(object):
+ name = _("Configure Cluster")
+ help_text_template = (
+ "project/data_processing.clusters/_configure_general_help.html")
+
+
+class ClusterGeneralConfig(workflows.Step):
+ action_class = NewClusterConfigAction
+ contributes = ("hidden_configure_field", )
+
+ def contribute(self, data, context):
+ for k, v in data.items():
+ context["cluster_general_" + k] = v
+
+ return context
+
+
+class JobExecutionInterfaceConfigAction(workflows.Action):
+
+ def __init__(self, request, *args, **kwargs):
+ super(JobExecutionInterfaceConfigAction, self).__init__(
+ request, *args, **kwargs)
+ job_id = (request.GET.get("job_id")
+ or request.POST.get("job"))
+ job = saharaclient.job_get(request, job_id)
+ interface = job.interface or []
+ interface_args = {}
+
+ job_ex_id = request.REQUEST.get("job_execution_id")
+ if job_ex_id is not None:
+ job_ex = saharaclient.job_execution_get(request, job_ex_id)
+ job = saharaclient.job_get(request, job_ex.job_id)
+ job_configs, interface_args = _merge_interface_with_configs(
+ job.interface, job_ex.job_configs)
+
+ for argument in interface:
+ field = forms.CharField(
+ required=argument.get('required'),
+ label=argument['name'],
+ initial=(interface_args.get(argument['id']) or
+ argument.get('default')),
+ help_text=argument.get('description'),
+ widget=forms.TextInput()
+ )
+ self.fields['argument_%s' % argument['id']] = field
+ self.fields['argument_ids'] = forms.CharField(
+ initial=json.dumps({argument['id']: argument['name']
+ for argument in interface}),
+ widget=forms.HiddenInput()
+ )
+
+ def clean(self):
+ cleaned_data = super(JobExecutionInterfaceConfigAction, self).clean()
+ return cleaned_data
+
+ class Meta(object):
+ name = _("Interface Arguments")
+
+
+class JobExecutionInterfaceConfig(workflows.Step):
+ action_class = JobExecutionInterfaceConfigAction
+
+ def contribute(self, data, context):
+ for k, v in data.items():
+ context[k] = v
+ return context
+
+
+class LaunchJob(workflows.Workflow):
+ slug = "launch_job"
+ name = _("Launch Job")
+ finalize_button_name = _("Launch")
+ success_message = _("Job launched")
+ failure_message = _("Could not launch job")
+ success_url = "horizon:project:data_processing.job_executions:index"
+ default_steps = (JobExecutionExistingGeneralConfig, JobConfig,
+ JobExecutionInterfaceConfig)
+
+ def handle(self, request, context):
+ argument_ids = json.loads(context['argument_ids'])
+ interface = {name: context["argument_" + str(arg_id)]
+ for arg_id, name in argument_ids.items()}
+
+ saharaclient.job_execution_create(
+ request,
+ context["job_general_job"],
+ context["job_general_cluster"],
+ context["job_general_job_input"],
+ context["job_general_job_output"],
+ context["job_config"],
+ interface)
+ return True
+
+
+class SelectHadoopPluginAction(t_flows.SelectPluginAction):
+ def __init__(self, request, *args, **kwargs):
+ super(SelectHadoopPluginAction, self).__init__(request,
+ *args,
+ **kwargs)
+ self.fields["job_id"] = forms.ChoiceField(
+ label=_("Plugin name"),
+ initial=request.GET.get("job_id") or request.POST.get("job_id"),
+ widget=forms.HiddenInput(attrs={"class": "hidden_create_field"}))
+
+ self.fields["job_configs"] = forms.ChoiceField(
+ label=_("Job configs"),
+ widget=forms.HiddenInput(attrs={"class": "hidden_create_field"}))
+
+ self.fields["job_args"] = forms.ChoiceField(
+ label=_("Job args"),
+ widget=forms.HiddenInput(attrs={"class": "hidden_create_field"}))
+
+ self.fields["job_params"] = forms.ChoiceField(
+ label=_("Job params"),
+ widget=forms.HiddenInput(attrs={"class": "hidden_create_field"}))
+
+ job_ex_id = request.REQUEST.get("job_execution_id")
+ if job_ex_id is not None:
+ self.fields["job_execution_id"] = forms.ChoiceField(
+ label=_("Job Execution ID"),
+ initial=job_ex_id,
+ widget=forms.HiddenInput(
+ attrs={"class": "hidden_create_field"}))
+
+ job_configs = (
+ saharaclient.job_execution_get(request,
+ job_ex_id).job_configs)
+
+ if "configs" in job_configs:
+ self.fields["job_configs"].initial = (
+ json.dumps(job_configs["configs"]))
+ if "params" in job_configs:
+ self.fields["job_params"].initial = (
+ json.dumps(job_configs["params"]))
+ if "args" in job_configs:
+ self.fields["job_args"].initial = (
+ json.dumps(job_configs["args"]))
+
+ class Meta(object):
+ name = _("Select plugin and hadoop version for cluster")
+ help_text_template = ("project/data_processing.clusters/"
+ "_create_general_help.html")
+
+
+class SelectHadoopPlugin(workflows.Step):
+ action_class = SelectHadoopPluginAction
+
+
+class ChosePluginVersion(workflows.Workflow):
+ slug = "lunch_job"
+ name = _("Launch Job")
+ finalize_button_name = _("Create")
+ success_message = _("Created")
+ failure_message = _("Could not create")
+ success_url = "horizon:project:data_processing.cluster_templates:index"
+ default_steps = (SelectHadoopPlugin,)
+
+
+class LaunchJobNewCluster(workflows.Workflow):
+ slug = "launch_job"
+ name = _("Launch Job")
+ finalize_button_name = _("Launch")
+ success_message = _("Job launched")
+ failure_message = _("Could not launch job")
+ success_url = "horizon:project:data_processing.jobs:index"
+ default_steps = (ClusterGeneralConfig,
+ JobExecutionGeneralConfig,
+ JobConfig,
+ JobExecutionInterfaceConfig)
+
+ def handle(self, request, context):
+ node_groups = None
+
+ plugin, hadoop_version = (
+ whelpers.get_plugin_and_hadoop_version(request))
+
+ ct_id = context["cluster_general_cluster_template"] or None
+ user_keypair = context["cluster_general_keypair"] or None
+
+ argument_ids = json.loads(context['argument_ids'])
+ interface = {name: context["argument_" + str(arg_id)]
+ for arg_id, name in argument_ids.items()}
+
+ try:
+ cluster = saharaclient.cluster_create(
+ request,
+ context["cluster_general_cluster_name"],
+ plugin, hadoop_version,
+ cluster_template_id=ct_id,
+ default_image_id=context["cluster_general_image"],
+ description=context["cluster_general_description"],
+ node_groups=node_groups,
+ user_keypair_id=user_keypair,
+ is_transient=not(context["cluster_general_persist_cluster"]),
+ net_id=context.get(
+ "cluster_general_neutron_management_network",
+ None))
+ except Exception:
+ exceptions.handle(request,
+ _("Unable to create new cluster for job."))
+ return False
+
+ try:
+ saharaclient.job_execution_create(
+ request,
+ context["job_general_job"],
+ cluster.id,
+ context["job_general_job_input"],
+ context["job_general_job_output"],
+ context["job_config"],
+ interface)
+ except Exception:
+ exceptions.handle(request,
+ _("Unable to launch job."))
+ return False
+ return True
diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/__init__.py b/sahara_dashboard/content/data_processing/nodegroup_templates/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/panel.py b/sahara_dashboard/content/data_processing/nodegroup_templates/panel.py
new file mode 100644
index 00000000..76d2cf8d
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/nodegroup_templates/panel.py
@@ -0,0 +1,28 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.utils.translation import ugettext_lazy as _
+
+import horizon
+
+from openstack_dashboard.dashboards.project import dashboard
+
+
+class NodegroupTemplatesPanel(horizon.Panel):
+ name = _("Node Group Templates")
+ slug = 'data_processing.nodegroup_templates'
+ permissions = (('openstack.services.data-processing',
+ 'openstack.services.data_processing'),)
+
+
+dashboard.Project.register(NodegroupTemplatesPanel)
diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/tables.py b/sahara_dashboard/content/data_processing/nodegroup_templates/tables.py
new file mode 100644
index 00000000..766bbf62
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/nodegroup_templates/tables.py
@@ -0,0 +1,110 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.template import defaultfilters as filters
+from django.utils.translation import ugettext_lazy as _
+from django.utils.translation import ungettext_lazy
+
+from horizon import tables
+from sahara_dashboard.api import sahara as saharaclient
+
+LOG = logging.getLogger(__name__)
+
+
+class NodeGroupTemplatesFilterAction(tables.FilterAction):
+ filter_type = "server"
+ filter_choices = (('name', _("Name"), True),
+ ('plugin_name', _("Plugin"), True),
+ ('hadoop_version', _("Version"), True))
+
+
+class CreateNodegroupTemplate(tables.LinkAction):
+ name = "create"
+ verbose_name = _("Create Template")
+ url = ("horizon:project:data_processing.nodegroup_templates:"
+ "create-nodegroup-template")
+ classes = ("ajax-modal", "create-nodegrouptemplate-btn")
+ icon = "plus"
+
+
+class ConfigureNodegroupTemplate(tables.LinkAction):
+ name = "configure"
+ verbose_name = _("Configure Template")
+ url = ("horizon:project:data_processing.nodegroup_templates:"
+ "configure-nodegroup-template")
+ classes = ("ajax-modal", "configure-nodegrouptemplate-btn")
+ icon = "plus"
+ attrs = {"style": "display: none"}
+
+
+class CopyTemplate(tables.LinkAction):
+ name = "copy"
+ verbose_name = _("Copy Template")
+ url = "horizon:project:data_processing.nodegroup_templates:copy"
+ classes = ("ajax-modal", )
+
+
+class EditTemplate(tables.LinkAction):
+ name = "edit"
+ verbose_name = _("Edit Template")
+ url = "horizon:project:data_processing.nodegroup_templates:edit"
+ classes = ("ajax-modal", )
+
+
+class DeleteTemplate(tables.DeleteAction):
+ @staticmethod
+ def action_present(count):
+ return ungettext_lazy(
+ u"Delete Template",
+ u"Delete Templates",
+ count
+ )
+
+ @staticmethod
+ def action_past(count):
+ return ungettext_lazy(
+ u"Deleted Template",
+ u"Deleted Templates",
+ count
+ )
+
+ def delete(self, request, template_id):
+ saharaclient.nodegroup_template_delete(request, template_id)
+
+
+class NodegroupTemplatesTable(tables.DataTable):
+ name = tables.Column(
+ "name",
+ verbose_name=_("Name"),
+ link="horizon:project:data_processing.nodegroup_templates:details")
+ plugin_name = tables.Column("plugin_name",
+ verbose_name=_("Plugin"))
+ hadoop_version = tables.Column("hadoop_version",
+ verbose_name=_("Version"))
+ node_processes = tables.Column("node_processes",
+ verbose_name=_("Node Processes"),
+ wrap_list=True,
+ filters=(filters.unordered_list,))
+
+ class Meta(object):
+ name = "nodegroup_templates"
+ verbose_name = _("Node Group Templates")
+ table_actions = (CreateNodegroupTemplate,
+ ConfigureNodegroupTemplate,
+ DeleteTemplate,
+ NodeGroupTemplatesFilterAction,)
+ row_actions = (EditTemplate,
+ CopyTemplate,
+ DeleteTemplate)
diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/tabs.py b/sahara_dashboard/content/data_processing/nodegroup_templates/tabs.py
new file mode 100644
index 00000000..68d85697
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/nodegroup_templates/tabs.py
@@ -0,0 +1,100 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.utils.translation import ugettext_lazy as _
+
+from horizon import exceptions
+from horizon import tabs
+
+from openstack_dashboard.api import network
+from openstack_dashboard.api import nova
+from sahara_dashboard.api import sahara as saharaclient
+
+
+from sahara_dashboard.content. \
+ data_processing.utils import workflow_helpers as helpers
+
+LOG = logging.getLogger(__name__)
+
+
+class GeneralTab(tabs.Tab):
+ name = _("General Info")
+ slug = "nodegroup_template_details_tab"
+ template_name = (
+ "project/data_processing.nodegroup_templates/_details.html")
+
+ def get_context_data(self, request):
+ template_id = self.tab_group.kwargs['template_id']
+ try:
+ template = saharaclient.nodegroup_template_get(
+ request, template_id)
+ except Exception as e:
+ template = {}
+ LOG.error(
+ "Unable to fetch node group template details: %s" % str(e))
+ return {"template": template}
+
+ try:
+ flavor = nova.flavor_get(request, template.flavor_id)
+ except Exception:
+ flavor = {}
+ exceptions.handle(request,
+ _("Unable to fetch flavor for template."))
+
+ floating_ip_pool_name = None
+ if template.floating_ip_pool:
+ try:
+ floating_ip_pool_name = self._get_floating_ip_pool_name(
+ request, template.floating_ip_pool)
+ except Exception:
+ exceptions.handle(request,
+ _("Unable to fetch floating ip pools."))
+
+ security_groups = helpers.get_security_groups(
+ request, template.security_groups)
+
+ return {"template": template, "flavor": flavor,
+ "floating_ip_pool_name": floating_ip_pool_name,
+ "security_groups": security_groups}
+
+ def _get_floating_ip_pool_name(self, request, pool_id):
+ pools = [pool for pool in network.floating_ip_pools_list(
+ request) if pool.id == pool_id]
+
+ return pools[0].name if pools else pool_id
+
+
+class ConfigsTab(tabs.Tab):
+ name = _("Service Configurations")
+ slug = "nodegroup_template_service_configs_tab"
+ template_name = (
+ "project/data_processing.nodegroup_templates/_service_confs.html")
+
+ def get_context_data(self, request):
+ template_id = self.tab_group.kwargs['template_id']
+ try:
+ template = saharaclient.nodegroup_template_get(
+ request, template_id)
+ except Exception as e:
+ template = {}
+ LOG.error(
+ "Unable to fetch node group template details: %s" % str(e))
+ return {"template": template}
+
+
+class NodegroupTemplateDetailsTabs(tabs.TabGroup):
+ slug = "nodegroup_template_details"
+ tabs = (GeneralTab, ConfigsTab, )
+ sticky = True
diff --git a/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_configure_general_help.html b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_configure_general_help.html
new file mode 100644
index 00000000..97e1cb8e
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/nodegroup_templates/templates/data_processing.nodegroup_templates/_configure_general_help.html
@@ -0,0 +1,23 @@
+{% load i18n horizon %}
+
+
+ {% blocktrans %}This Node Group Template will be created for:{% endblocktrans %}
+
+ {% blocktrans %}Plugin{% endblocktrans %}: {{ plugin_name }}
+
+ {% blocktrans %}Version{% endblocktrans %}: {{ hadoop_version }}
+
+
+
+ {% blocktrans %}The Node Group Template object specifies the processes
+ that will be launched on each instance. Check one or more processes.
+ When processes are selected, you may set node scoped
+ configurations on corresponding tabs.{% endblocktrans %}
+
+
+ {% blocktrans %}You must choose a flavor to determine the size (VCPUs, memory and storage) of all launched VMs.{% endblocktrans %}
+
+
+ {% blocktrans %}Data Processing provides different storage location options. You may choose Ephemeral Drive or a Cinder Volume to be attached to instances.{% endblocktrans %}
+
+ {% blocktrans %}Select which type of job that you want to run.
+ This choice will dictate which steps are required to successfully
+ execute your job.
+ {% endblocktrans %}
+
+ {% blocktrans %}Select which plugin and version that you
+ want to use to create your cluster.{% endblocktrans %}
+
diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/cluster_guide.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/cluster_guide.html
new file mode 100644
index 00000000..a7409c8d
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/cluster_guide.html
@@ -0,0 +1,163 @@
+{% extends 'base.html' %}
+{% load i18n %}
+{% block title %}{% trans "Data Processing" %}{% endblock %}
+
+{% block main %}
+
+
+
+
+
+
{% blocktrans %}The first step is to determine which type of
+ cluster you want to run. You may have several choices
+ available depending on the configuration of your system.
+ Click on "choose plugin" to bring up the list of data
+ processing plugins. There you will be able to choose the
+ data processing plugin along with the version number.
+ Choosing this up front will allow the rest of the cluster
+ creation steps to focus only on options that are pertinent
+ to your desired cluster type.{% endblocktrans %}
+
{% trans "Current choice:" %}
+ {% if request.session.plugin_name and request.session.plugin_version %}
+
+ {% trans "Plugin:" %}
+ {{ request.session.plugin_name }}
+ {% trans "Version:" %}
+ {{ request.session.plugin_version }}
+
+ {% else %}
+
+ {% trans "No plugin chosen" %}
+
+ {% endif %}
+
+
+
+
+
+
+
{% blocktrans %}Next, you need to define the different
+ types of machines in your cluster. This is done by
+ defining a Node Group Template for each type of
+ machine. A very common case is where you
+ need to have one or more machines running a "master"
+ set of processes while another set of machines need
+ to be running the "worker" processes. Here,
+ you will define the Node Group Template for your
+ "master" node(s).
+ {% endblocktrans %}
+
{% trans "Current choice:" %}
+ {% if request.session.master_name %}
+
+ {% trans "Master Node Group Template:" %}
+ {{ request.session.master_name }}
+
+ {% else %}
+
+ {% trans "No Master Node Group Template Created" %}
+
+ {% endif %}
+
+
+
+
+
+
+
{% blocktrans %}Repeat the Node Group Template
+ creation process, but this time you are creating
+ your "worker" Node Group Template.{% endblocktrans %}
+
{% trans "Current choice:" %}
+ {% if request.session.worker_name %}
+
+ {% trans "Worker Node Group Template:" %}
+ {{ request.session.worker_name }}
+
+ {% else %}
+
+ {% trans "No Worker Node Group Template Created" %}
+
+ {% endif %}
+
+
+
+
+
+
+
{% blocktrans %}Now you need to set the layout of your
+ cluster. By
+ creating a Cluster Template, you will be choosing the
+ number of instances of each Node Group Template that
+ will appear in your cluster. Additionally,
+ you will have a chance to set any cluster-specific
+ configuration items in the additional tabs on the
+ create Cluster Template form.{% endblocktrans %}
+
{% trans "Current choice:" %}
+ {% if request.session.guide_cluster_template_name %}
+
+ {% trans "Worker Node Group Template:" %}
+ {{ request.session.guide_cluster_template_name }}
+
+ {% else %}
+
+ {% trans "No Cluster Template Created" %}
+
+ {% endif %}
+
+
+
+
+
+
+
{% blocktrans %}You are now ready to
+ launch your cluster. When you click on the link
+ below, you will need to give your cluster a name,
+ choose the Cluster Template to use and choose which
+ image to use to build your instances. After you
+ click on "Create", your instances will begin to
+ spawn. Your cluster should be operational in a few
+ minutes.{% endblocktrans %}
+
+
+{% endblock %}
diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/job_type_select.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/job_type_select.html
new file mode 100644
index 00000000..7897a6c9
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/job_type_select.html
@@ -0,0 +1,7 @@
+{% extends 'base.html' %}
+{% load i18n %}
+{% block title %}{% trans "Choose job type" %}{% endblock %}
+
+{% block main %}
+ {% include 'project/data_processing.wizard/_job_type_select.html' %}
+{% endblock %}
diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/jobex_guide.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/jobex_guide.html
new file mode 100644
index 00000000..70af3475
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/jobex_guide.html
@@ -0,0 +1,114 @@
+{% extends 'base.html' %}
+{% load i18n %}
+{% block title %}{% trans "Data Processing" %}{% endblock %}
+
+{% block main %}
+
+
+
+
+
+
+ {% blocktrans %}First, select which type of job that
+ you want to run. This choice will determine which
+ other steps are required
+ {% endblocktrans %}
+
{% trans "Current type:" %}
+ {% if request.session.guide_job_type %}
+
+ {{ request.session.guide_job_type}}
+
+ {% else %}
+
+ {% trans "No type chosen" %}
+
+ {% endif %}
+
+
+
+
+ {% if request.session.guide_job_type %}
+ {% if view.show_data_sources %}
+
+
+
{% blocktrans %}Data Sources are what your
+ job uses for input and output. Depending on the type
+ of job you will be running, you may need to define one
+ or more data sources. You can create multiple data
+ sources by repeating this step.
+ {% endblocktrans %}
+
{% blocktrans %}Define your Job Template.
+ This is where you choose the type of job that you
+ want to run (Pig, Java Action, Spark, etc) and choose
+ or upload the files necessary to run it. The inputs
+ and outputs will be defined later.
+ {% endblocktrans %}
+
{% trans "Job template:" %}
+ {% if request.session.guide_job_name %}
+
+ {{ request.session.guide_job_name }}
+
+ {% else %}
+
+ {% trans "No job template created" %}
+
+ {% endif %}
+
+
+
+
+
+
+
{% blocktrans %}Launch your job. When
+ launching, you may need to choose your input and
+ output data sources. This is where you would also
+ add any special configuration values, parameters,
+ or arguments that you need to pass along
+ to your job.
+ {% endblocktrans %}
+
+
+{% endblock %}
diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/plugin_select.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/plugin_select.html
new file mode 100644
index 00000000..4af7eebc
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/plugin_select.html
@@ -0,0 +1,7 @@
+{% extends 'base.html' %}
+{% load i18n %}
+{% block title %}{% trans "Choose plugin and version" %}{% endblock %}
+
+{% block main %}
+ {% include 'project/data_processing.wizard/_plugin_select.html' %}
+{% endblock %}
diff --git a/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/wizard.html b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/wizard.html
new file mode 100644
index 00000000..555aa238
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/wizard/templates/data_processing.wizard/wizard.html
@@ -0,0 +1,56 @@
+{% extends 'base.html' %}
+{% load i18n %}
+{% block title %}{% trans "Data Processing" %}{% endblock %}
+
+{% block main %}
+
+
+
+
+
+
{% blocktrans %}
+ Each of the Data Processing frameworks require a cluster of machines
+ in order to do the work they are assigned. A cluster is
+ formed by creating a set of Node Group Templates, combining
+ those into a Cluster Template and then launching a Cluster.
+ You can do each of those steps manually, or you can follow
+ this guide to help take you through the steps of
+ Cluster creation.
+ {% endblocktrans %}
+
{% blocktrans %}
+ In order to run a Data Processing job, you need to make
+ the files for your program available to the
+ Data Processing system, define where the input and output
+ need to go and create a Job Template that describes
+ how to run your job. Each of those steps can be done
+ manually or you can follow this guide to help take you
+ through the steps to run a job on an existing cluster.
+ {% endblocktrans %}
+
+
+{% endblock %}
diff --git a/sahara_dashboard/content/data_processing/wizard/tests.py b/sahara_dashboard/content/data_processing/wizard/tests.py
new file mode 100644
index 00000000..eba596e4
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/wizard/tests.py
@@ -0,0 +1,59 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from django.core.urlresolvers import reverse
+
+from openstack_dashboard.test import helpers as test
+
+
+INDEX_URL = reverse(
+ 'horizon:project:data_processing.wizard:index')
+CLUSTER_GUIDE_URL = reverse(
+ 'horizon:project:data_processing.wizard:cluster_guide')
+CLUSTER_GUIDE_RESET_URL = reverse(
+ 'horizon:project:data_processing.wizard:reset_cluster_guide',
+ kwargs={"reset_cluster_guide": "true"})
+JOB_GUIDE_URL = reverse(
+ 'horizon:project:data_processing.wizard:jobex_guide')
+JOB_GUIDE_RESET_URL = reverse(
+ 'horizon:project:data_processing.wizard:reset_jobex_guide',
+ kwargs={"reset_jobex_guide": "true"})
+
+
+class DataProcessingClusterGuideTests(test.TestCase):
+ def test_index(self):
+ res = self.client.get(INDEX_URL)
+ self.assertTemplateUsed(
+ res, 'project/data_processing.wizard/wizard.html')
+ self.assertContains(res, 'Data Processing Guides')
+ self.assertContains(res, 'Cluster Creation Guide')
+
+ def test_cluster_guide(self):
+ res = self.client.get(CLUSTER_GUIDE_URL)
+ self.assertTemplateUsed(
+ res, 'project/data_processing.wizard/cluster_guide.html')
+ self.assertContains(res, 'Guided Cluster Creation')
+ self.assertContains(res, 'Current choice')
+
+ def test_cluster_guide_reset(self):
+ res = self.client.get(CLUSTER_GUIDE_RESET_URL)
+ self.assertRedirectsNoFollow(res, CLUSTER_GUIDE_URL)
+
+ def test_jobex_guide(self):
+ res = self.client.get(JOB_GUIDE_URL)
+ self.assertTemplateUsed(
+ res, 'project/data_processing.wizard/jobex_guide.html')
+ self.assertContains(res, 'Guided Job Execution')
+
+ def test_jobex_guide_reset(self):
+ res = self.client.get(JOB_GUIDE_RESET_URL)
+ self.assertRedirectsNoFollow(res, JOB_GUIDE_URL)
diff --git a/sahara_dashboard/content/data_processing/wizard/urls.py b/sahara_dashboard/content/data_processing/wizard/urls.py
new file mode 100644
index 00000000..cec57a8b
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/wizard/urls.py
@@ -0,0 +1,41 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.conf.urls import patterns
+from django.conf.urls import url
+
+from sahara_dashboard.content. \
+ data_processing.wizard import views
+
+
+urlpatterns = patterns('',
+ url(r'^$', views.WizardView.as_view(), name='index'),
+ url(r'^cluster_guide$',
+ views.ClusterGuideView.as_view(),
+ name='cluster_guide'),
+ url(r'^cluster_guide/(?P[^/]+)/$',
+ views.ResetClusterGuideView.as_view(),
+ name='reset_cluster_guide'),
+ url(r'^jobex_guide$',
+ views.JobExecutionGuideView.as_view(),
+ name='jobex_guide'),
+ url(r'^jobex_guide/(?P[^/]+)/$',
+ views.ResetJobExGuideView.as_view(),
+ name='reset_jobex_guide'),
+ url(r'^plugin_select$',
+ views.PluginSelectView.as_view(),
+ name='plugin_select'),
+ url(r'^job_type_select$',
+ views.JobTypeSelectView.as_view(),
+ name='job_type_select'),
+ )
diff --git a/sahara_dashboard/content/data_processing/wizard/views.py b/sahara_dashboard/content/data_processing/wizard/views.py
new file mode 100644
index 00000000..3c753ca5
--- /dev/null
+++ b/sahara_dashboard/content/data_processing/wizard/views.py
@@ -0,0 +1,102 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+from django.core.urlresolvers import reverse_lazy
+from django import http
+from django.utils.translation import ugettext_lazy as _
+from django.views import generic
+
+from horizon import exceptions
+from horizon import forms
+from horizon import views as horizon_views
+
+from sahara_dashboard.content.data_processing.utils \
+ import helpers
+import sahara_dashboard.content.data_processing.wizard \
+ .forms as wizforms
+
+
+LOG = logging.getLogger(__name__)
+
+
+class WizardView(horizon_views.APIView):
+ template_name = 'project/data_processing.wizard/wizard.html'
+ page_title = _("Data Processing Guides")
+
+ def get_data(self, request, context, *args, **kwargs):
+ try:
+ context["test"] = "test data"
+ except Exception:
+ msg = _('Unable to show guides')
+ exceptions.handle(self.request, msg)
+ return context
+
+
+class ClusterGuideView(horizon_views.APIView):
+ template_name = 'project/data_processing.wizard/cluster_guide.html'
+ page_title = _("Guided Cluster Creation")
+
+
+class ResetClusterGuideView(generic.RedirectView):
+ pattern_name = 'horizon:project:data_processing.wizard:cluster_guide'
+ permanent = True
+
+ def get(self, request, *args, **kwargs):
+ if kwargs["reset_cluster_guide"]:
+ hlps = helpers.Helpers(request)
+ hlps.reset_guide()
+ return http.HttpResponseRedirect(reverse_lazy(self.pattern_name))
+
+
+class JobExecutionGuideView(horizon_views.APIView):
+ template_name = 'project/data_processing.wizard/jobex_guide.html'
+ page_title = _("Guided Job Execution")
+
+ def show_data_sources(self):
+ try:
+ if self.request.session["guide_job_type"] in ["Spark", "Storm",
+ "Java"]:
+ return False
+ return True
+ except Exception:
+ return True
+
+
+class ResetJobExGuideView(generic.RedirectView):
+ pattern_name = 'horizon:project:data_processing.wizard:jobex_guide'
+ permanent = True
+
+ def get(self, request, *args, **kwargs):
+ if kwargs["reset_jobex_guide"]:
+ hlps = helpers.Helpers(request)
+ hlps.reset_job_guide()
+ return http.HttpResponseRedirect(reverse_lazy(self.pattern_name))
+
+
+class PluginSelectView(forms.ModalFormView):
+ form_class = wizforms.ChoosePluginForm
+ success_url = reverse_lazy(
+ 'horizon:project:data_processing.wizard:cluster_guide')
+ classes = ("ajax-modal")
+ template_name = "project/data_processing.wizard/plugin_select.html"
+ page_title = _("Choose plugin and version")
+
+
+class JobTypeSelectView(forms.ModalFormView):
+ form_class = wizforms.ChooseJobTypeForm
+ success_url = reverse_lazy(
+ 'horizon:project:data_processing.wizard:jobex_guide')
+ classes = ("ajax-modal")
+ template_name = "project/data_processing.wizard/job_type_select.html"
+ page_title = _("Choose job type")
diff --git a/sahara_dashboard/enabled/_1810_data_processing_panel_group.py b/sahara_dashboard/enabled/_1810_data_processing_panel_group.py
new file mode 100644
index 00000000..1cb03bc3
--- /dev/null
+++ b/sahara_dashboard/enabled/_1810_data_processing_panel_group.py
@@ -0,0 +1,8 @@
+from django.utils.translation import ugettext_lazy as _
+
+# The slug of the panel group to be added to HORIZON_CONFIG. Required.
+PANEL_GROUP = 'data_processing'
+# The display name of the PANEL_GROUP. Required.
+PANEL_GROUP_NAME = _('Data Processing')
+# The slug of the dashboard the PANEL_GROUP associated with. Required.
+PANEL_GROUP_DASHBOARD = 'project'
diff --git a/sahara_dashboard/enabled/_1815_data_processing_wizard_panel.py b/sahara_dashboard/enabled/_1815_data_processing_wizard_panel.py
new file mode 100644
index 00000000..642586e4
--- /dev/null
+++ b/sahara_dashboard/enabled/_1815_data_processing_wizard_panel.py
@@ -0,0 +1,24 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The slug of the panel to be added to HORIZON_CONFIG. Required.
+PANEL = 'data_processing.wizard'
+# The slug of the dashboard the PANEL associated with. Required.
+PANEL_DASHBOARD = 'project'
+# The slug of the panel group the PANEL is associated with.
+PANEL_GROUP = 'data_processing'
+
+# Python panel class of the PANEL to be added.
+ADD_PANEL = \
+ ('sahara_dashboard.'
+ 'content.data_processing.wizard.panel.WizardPanel')
diff --git a/sahara_dashboard/enabled/_1820_data_processing_clusters_panel.py b/sahara_dashboard/enabled/_1820_data_processing_clusters_panel.py
new file mode 100644
index 00000000..c0e4b56a
--- /dev/null
+++ b/sahara_dashboard/enabled/_1820_data_processing_clusters_panel.py
@@ -0,0 +1,31 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The slug of the panel to be added to HORIZON_CONFIG. Required.
+PANEL = 'data_processing.clusters'
+# The slug of the dashboard the PANEL associated with. Required.
+PANEL_DASHBOARD = 'project'
+# The slug of the panel group the PANEL is associated with.
+PANEL_GROUP = 'data_processing'
+
+# Python panel class of the PANEL to be added.
+ADD_PANEL = \
+ ('sahara_dashboard.'
+ 'content.data_processing.clusters.panel.ClustersPanel')
+
+ADD_INSTALLED_APPS = \
+ ["sahara_dashboard.content.data_processing", ]
+
+ADD_JS_FILES = [
+ 'dashboard/project/data_processing/data_processing.event_log.js'
+]
diff --git a/sahara_dashboard/enabled/_1825_data_processing_job_executions_panel.py b/sahara_dashboard/enabled/_1825_data_processing_job_executions_panel.py
new file mode 100644
index 00000000..04cb5de3
--- /dev/null
+++ b/sahara_dashboard/enabled/_1825_data_processing_job_executions_panel.py
@@ -0,0 +1,24 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The slug of the panel to be added to HORIZON_CONFIG. Required.
+PANEL = 'data_processing.job_executions'
+# The slug of the dashboard the PANEL associated with. Required.
+PANEL_DASHBOARD = 'project'
+# The slug of the panel group the PANEL is associated with.
+PANEL_GROUP = 'data_processing'
+
+# Python panel class of the PANEL to be added.
+ADD_PANEL = \
+ ('sahara_dashboard.'
+ 'content.data_processing.job_executions.panel.JobExecutionsPanel')
diff --git a/sahara_dashboard/enabled/_1830_data_processing_cluster_templates_panel.py b/sahara_dashboard/enabled/_1830_data_processing_cluster_templates_panel.py
new file mode 100644
index 00000000..a8877d0b
--- /dev/null
+++ b/sahara_dashboard/enabled/_1830_data_processing_cluster_templates_panel.py
@@ -0,0 +1,24 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The slug of the panel to be added to HORIZON_CONFIG. Required.
+PANEL = 'data_processing.cluster_templates'
+# The slug of the dashboard the PANEL associated with. Required.
+PANEL_DASHBOARD = 'project'
+# The slug of the panel group the PANEL is associated with.
+PANEL_GROUP = 'data_processing'
+
+# Python panel class of the PANEL to be added.
+ADD_PANEL = \
+ ('sahara_dashboard.'
+ 'content.data_processing.cluster_templates.panel.ClusterTemplatesPanel')
diff --git a/sahara_dashboard/enabled/_1835_data_processing_nodegroup_templates_panel.py b/sahara_dashboard/enabled/_1835_data_processing_nodegroup_templates_panel.py
new file mode 100644
index 00000000..ae14fbf4
--- /dev/null
+++ b/sahara_dashboard/enabled/_1835_data_processing_nodegroup_templates_panel.py
@@ -0,0 +1,25 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The slug of the panel to be added to HORIZON_CONFIG. Required.
+PANEL = 'data_processing.nodegroup_templates'
+# The slug of the dashboard the PANEL associated with. Required.
+PANEL_DASHBOARD = 'project'
+# The slug of the panel group the PANEL is associated with.
+PANEL_GROUP = 'data_processing'
+
+# Python panel class of the PANEL to be added.
+ADD_PANEL = \
+ ('sahara_dashboard.'
+ 'content.data_processing.nodegroup_templates.panel.'
+ 'NodegroupTemplatesPanel')
diff --git a/sahara_dashboard/enabled/_1840_data_processing_jobs_panel.py b/sahara_dashboard/enabled/_1840_data_processing_jobs_panel.py
new file mode 100644
index 00000000..7ec47bfb
--- /dev/null
+++ b/sahara_dashboard/enabled/_1840_data_processing_jobs_panel.py
@@ -0,0 +1,27 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The slug of the panel to be added to HORIZON_CONFIG. Required.
+PANEL = 'data_processing.jobs'
+# The slug of the dashboard the PANEL associated with. Required.
+PANEL_DASHBOARD = 'project'
+# The slug of the panel group the PANEL is associated with.
+PANEL_GROUP = 'data_processing'
+
+# Python panel class of the PANEL to be added.
+ADD_PANEL = ('sahara_dashboard.content.data_processing.jobs.panel.JobsPanel')
+
+ADD_JS_FILES = [
+ 'dashboard/project/data_processing/'
+ 'data_processing.job_interface_arguments.js'
+]
diff --git a/sahara_dashboard/enabled/_1845_data_processing_job_binaries_panel.py b/sahara_dashboard/enabled/_1845_data_processing_job_binaries_panel.py
new file mode 100644
index 00000000..d330c62b
--- /dev/null
+++ b/sahara_dashboard/enabled/_1845_data_processing_job_binaries_panel.py
@@ -0,0 +1,24 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The slug of the panel to be added to HORIZON_CONFIG. Required.
+PANEL = 'data_processing.job_binaries'
+# The slug of the dashboard the PANEL associated with. Required.
+PANEL_DASHBOARD = 'project'
+# The slug of the panel group the PANEL is associated with.
+PANEL_GROUP = 'data_processing'
+
+# Python panel class of the PANEL to be added.
+ADD_PANEL = \
+ ('sahara_dashboard.'
+ 'content.data_processing.job_binaries.panel.JobBinariesPanel')
diff --git a/sahara_dashboard/enabled/_1850_data_processing_data_sources_panel.py b/sahara_dashboard/enabled/_1850_data_processing_data_sources_panel.py
new file mode 100644
index 00000000..66476c26
--- /dev/null
+++ b/sahara_dashboard/enabled/_1850_data_processing_data_sources_panel.py
@@ -0,0 +1,24 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The slug of the panel to be added to HORIZON_CONFIG. Required.
+PANEL = 'data_processing.data_sources'
+# The slug of the dashboard the PANEL associated with. Required.
+PANEL_DASHBOARD = 'project'
+# The slug of the panel group the PANEL is associated with.
+PANEL_GROUP = 'data_processing'
+
+# Python panel class of the PANEL to be added.
+ADD_PANEL = \
+ ('sahara_dashboard.'
+ 'content.data_processing.data_sources.panel.DataSourcesPanel')
diff --git a/sahara_dashboard/enabled/_1855_data_processing_data_image_registry_panel.py b/sahara_dashboard/enabled/_1855_data_processing_data_image_registry_panel.py
new file mode 100644
index 00000000..d375523d
--- /dev/null
+++ b/sahara_dashboard/enabled/_1855_data_processing_data_image_registry_panel.py
@@ -0,0 +1,24 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The slug of the panel to be added to HORIZON_CONFIG. Required.
+PANEL = 'data_processing.data_image_registry'
+# The slug of the dashboard the PANEL associated with. Required.
+PANEL_DASHBOARD = 'project'
+# The slug of the panel group the PANEL is associated with.
+PANEL_GROUP = 'data_processing'
+
+# Python panel class of the PANEL to be added.
+ADD_PANEL = \
+ ('sahara_dashboard.'
+ 'content.data_processing.data_image_registry.panel.ImageRegistryPanel')
diff --git a/sahara_dashboard/enabled/_1860_data_processing_data_plugins_panel.py b/sahara_dashboard/enabled/_1860_data_processing_data_plugins_panel.py
new file mode 100644
index 00000000..45773c3a
--- /dev/null
+++ b/sahara_dashboard/enabled/_1860_data_processing_data_plugins_panel.py
@@ -0,0 +1,24 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The slug of the panel to be added to HORIZON_CONFIG. Required.
+PANEL = 'data_processing.data_plugins'
+# The slug of the dashboard the PANEL associated with. Required.
+PANEL_DASHBOARD = 'project'
+# The slug of the panel group the PANEL is associated with.
+PANEL_GROUP = 'data_processing'
+
+# Python panel class of the PANEL to be added.
+ADD_PANEL = \
+ ('sahara_dashboard.'
+ 'content.data_processing.data_plugins.panel.PluginsPanel')
diff --git a/sahara_dashboard/enabled/__init__.py b/sahara_dashboard/enabled/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/sahara_dashboard/test/__init__.py b/sahara_dashboard/test/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/sahara_dashboard/test/api_tests/__init__.py b/sahara_dashboard/test/api_tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/sahara_dashboard/test/api_tests/sahara_tests.py b/sahara_dashboard/test/api_tests/sahara_tests.py
new file mode 100644
index 00000000..9a1c75bc
--- /dev/null
+++ b/sahara_dashboard/test/api_tests/sahara_tests.py
@@ -0,0 +1,48 @@
+# Copyright 2015, Telles Nobrega
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from sahara_dashboard import api
+from sahara_dashboard.test import helpers as test
+
+
+class SaharaApiTest(test.SaharaAPITestCase):
+ #
+ # Cluster
+ #
+ def test_cluster_create_count(self):
+ saharaclient = self.stub_saharaclient()
+ saharaclient.clusters = self.mox.CreateMockAnything()
+ saharaclient.clusters.create(anti_affinity=None,
+ cluster_configs=None,
+ cluster_template_id=None,
+ count=2,
+ use_autoconfig=None,
+ default_image_id=None,
+ description=None,
+ hadoop_version='1.0.0',
+ is_transient=None,
+ name='name',
+ net_id=None,
+ node_groups=None,
+ plugin_name='fake_plugin',
+ user_keypair_id=None) \
+ .AndReturn({"Clusters": ['cluster1', 'cluster2']})
+ self.mox.ReplayAll()
+ ret_val = api.sahara.cluster_create(self.request,
+ 'name',
+ 'fake_plugin',
+ '1.0.0',
+ count=2)
+
+ self.assertEqual(2, len(ret_val['Clusters']))
diff --git a/sahara_dashboard/test/helpers.py b/sahara_dashboard/test/helpers.py
new file mode 100644
index 00000000..abc017bf
--- /dev/null
+++ b/sahara_dashboard/test/helpers.py
@@ -0,0 +1,57 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from saharaclient import client as sahara_client
+
+from openstack_dashboard.test import helpers
+
+from sahara_dashboard import api
+from sahara_dashboard.test.test_data import utils
+
+
+def create_stubs(stubs_to_create={}):
+ return helpers.create_stubs(stubs_to_create)
+
+
+class SaharaTestsMixin(object):
+ def _setup_test_data(self):
+ super(SaharaTestsMixin, self)._setup_test_data()
+ utils.load_test_data(self)
+
+
+class TestCase(SaharaTestsMixin, helpers.TestCase):
+ pass
+
+
+class BaseAdminViewTests(SaharaTestsMixin, helpers.TestCase):
+ pass
+
+
+class SaharaAPITestCase(helpers.APITestCase):
+
+ def setUp(self):
+ super(SaharaAPITestCase, self).setUp()
+
+ self._original_saharaclient = api.sahara.client
+ api.sahara.client = lambda request: self.stub_saharaclient()
+
+ def tearDown(self):
+ super(SaharaAPITestCase, self).tearDown()
+
+ api.sahara.client = self._original_saharaclient
+
+ def stub_saharaclient(self):
+ if not hasattr(self, "saharaclient"):
+ self.mox.StubOutWithMock(sahara_client, 'Client')
+ self.saharaclient = self.mox.CreateMock(sahara_client.Client)
+ return self.saharaclient
diff --git a/sahara-dashboard/test/settings.py b/sahara_dashboard/test/settings.py
similarity index 97%
rename from sahara-dashboard/test/settings.py
rename to sahara_dashboard/test/settings.py
index 28dc23da..0a08eee2 100644
--- a/sahara-dashboard/test/settings.py
+++ b/sahara_dashboard/test/settings.py
@@ -33,7 +33,7 @@ STATIC_URL = '/static/'
SECRET_KEY = secret_key.generate_or_read_from_file(
os.path.join(TEST_DIR, '.secret_key_store'))
-ROOT_URLCONF = 'sahara-dashboard.test.urls'
+ROOT_URLCONF = 'sahara_dashboard.test.urls'
TEMPLATE_DIRS = (
os.path.join(TEST_DIR, 'templates'),
)
@@ -80,7 +80,7 @@ from openstack_dashboard.utils import settings
dashboard_module_names = [
'openstack_dashboard.enabled',
'openstack_dashboard.local.enabled',
- 'sahara-dashboard.enabled',
+ 'sahara_dashboard.enabled',
]
dashboard_modules = []
# All dashboards must be enabled for the namespace to get registered, which is
@@ -149,7 +149,7 @@ LOGGING['loggers']['selenium'] = {
'propagate': False,
}
-LOGGING['loggers']['sahara-dashboard'] = {
+LOGGING['loggers']['sahara_dashboard'] = {
'handlers': ['test'],
'propagate': False,
}
diff --git a/sahara_dashboard/test/test_data/__init__.py b/sahara_dashboard/test/test_data/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/sahara_dashboard/test/test_data/keystone_data.py b/sahara_dashboard/test/test_data/keystone_data.py
new file mode 100644
index 00000000..0ad5761e
--- /dev/null
+++ b/sahara_dashboard/test/test_data/keystone_data.py
@@ -0,0 +1,26 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+def data(TEST):
+
+ # Add sahara to the keystone data
+ TEST.service_catalog.append(
+ {"type": "data-processing",
+ "name": "Sahara",
+ "endpoints_links": [],
+ "endpoints": [
+ {"region": "RegionOne",
+ "adminURL": "http://admin.sahara.example.com:8386/v1.1",
+ "publicURL": "http://public.sahara.example.com:8386/v1.1",
+ "internalURL": "http://int.sahara.example.com:8386/v1.1"}]}
+ )
diff --git a/sahara_dashboard/test/test_data/sahara_data.py b/sahara_dashboard/test/test_data/sahara_data.py
new file mode 100644
index 00000000..2a870e57
--- /dev/null
+++ b/sahara_dashboard/test/test_data/sahara_data.py
@@ -0,0 +1,598 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+
+from openstack_dashboard.test.test_data import utils
+
+from saharaclient.api import cluster_templates
+from saharaclient.api import clusters
+from saharaclient.api import data_sources
+from saharaclient.api import job_binaries
+from saharaclient.api import job_executions
+from saharaclient.api import job_types
+from saharaclient.api import jobs
+from saharaclient.api import node_group_templates
+from saharaclient.api import plugins
+
+
+def data(TEST):
+ TEST.plugins = utils.TestDataContainer()
+ TEST.plugins_configs = utils.TestDataContainer()
+ TEST.nodegroup_templates = utils.TestDataContainer()
+ TEST.cluster_templates = utils.TestDataContainer()
+ TEST.clusters = utils.TestDataContainer()
+ TEST.data_sources = utils.TestDataContainer()
+ TEST.job_binaries = utils.TestDataContainer()
+ TEST.jobs = utils.TestDataContainer()
+ TEST.job_executions = utils.TestDataContainer()
+ TEST.registered_images = copy.copy(TEST.images)
+ TEST.job_types = utils.TestDataContainer()
+
+ plugin1_dict = {
+ "description": "vanilla plugin",
+ "name": "vanilla",
+ "title": "Vanilla Apache Hadoop",
+ "versions": ["2.3.0", "1.2.1"]
+ }
+
+ plugin1 = plugins.Plugin(plugins.PluginManager(None), plugin1_dict)
+
+ TEST.plugins.add(plugin1)
+
+ plugin_config1_dict = {
+ "node_processes": {
+ "HDFS": [
+ "namenode",
+ "datanode",
+ "secondarynamenode"
+ ],
+ "MapReduce": [
+ "tasktracker",
+ "jobtracker"
+ ]
+ },
+ "description": "This plugin provides an ability to launch vanilla "
+ "Apache Hadoop cluster without any management "
+ "consoles.",
+ "versions": [
+ "1.2.1"
+ ],
+ "required_image_tags": [
+ "vanilla",
+ "1.2.1"
+ ],
+ "configs": [
+ {
+ "default_value": "/tmp/hadoop-${user.name}",
+ "name": "hadoop.tmp.dir",
+ "priority": 2,
+ "config_type": "string",
+ "applicable_target": "HDFS",
+ "is_optional": True,
+ "scope": "node",
+ "description": "A base for other temporary directories."
+ },
+ {
+ "default_value": True,
+ "name": "hadoop.native.lib",
+ "priority": 2,
+ "config_type": "bool",
+ "applicable_target": "HDFS",
+ "is_optional": True,
+ "scope": "node",
+ "description": "Should native hadoop libraries, if present, "
+ "be used."
+ },
+ ],
+ "title": "Vanilla Apache Hadoop",
+ "name": "vanilla"
+ }
+
+ TEST.plugins_configs.add(plugins.Plugin(plugins.PluginManager(None),
+ plugin_config1_dict))
+
+ # Nodegroup_Templates.
+ ngt1_dict = {
+ "created_at": "2014-06-04 14:01:03.701243",
+ "description": None,
+ "flavor_id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa",
+ "availability_zone": None,
+ "floating_ip_pool": None,
+ "auto_security_group": True,
+ "hadoop_version": "1.2.1",
+ "id": "c166dfcc-9cc7-4b48-adc9-f0946169bb36",
+ "image_id": None,
+ "name": "sample-template",
+ "node_configs": {},
+ "node_processes": [
+ "namenode",
+ "jobtracker",
+ "secondarynamenode",
+ "hiveserver",
+ "oozie"
+ ],
+ "plugin_name": "vanilla",
+ "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
+ "updated_at": None,
+ "volume_mount_prefix": "/volumes/disk",
+ "volumes_per_node": 0,
+ "volumes_size": 0,
+ "volume_type": None,
+ "volume_local_to_instance": False,
+ "security_groups": [],
+ "volumes_availability_zone": None,
+ "is_proxy_gateway": False,
+ "use_autoconfig": True,
+ }
+
+ ngt1 = node_group_templates.NodeGroupTemplate(
+ node_group_templates.NodeGroupTemplateManager(None), ngt1_dict)
+
+ TEST.nodegroup_templates.add(ngt1)
+
+ # Cluster_templates.
+ ct1_dict = {
+ "anti_affinity": [],
+ "cluster_configs": {},
+ "created_at": "2014-06-04 14:01:06.460711",
+ "default_image_id": None,
+ "description": "Sample description",
+ "hadoop_version": "1.2.1",
+ "id": "a2c3743f-31a2-4919-8d02-792138a87a98",
+ "name": "sample-cluster-template",
+ "neutron_management_network": None,
+ "use_autoconfig": True,
+ "node_groups": [
+ {
+ "count": 1,
+ "created_at": "2014-06-04 14:01:06.462512",
+ "flavor_id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa",
+ "floating_ip_pool": None,
+ "image_id": None,
+ "name": "master",
+ "node_configs": {},
+ "node_group_template_id": "c166dfcc-9cc7-4b48-adc9",
+ "node_processes": [
+ "namenode",
+ "jobtracker",
+ "secondarynamenode",
+ "hiveserver",
+ "oozie"
+ ],
+ "updated_at": None,
+ "volume_mount_prefix": "/volumes/disk",
+ "volumes_per_node": 0,
+ "volumes_size": 0,
+ "volume_type": None,
+ "volume_local_to_instance": False,
+ "volumes_availability_zone": None,
+ "use_autoconfig": True,
+ "is_proxy_gateway": False,
+ },
+ {
+ "count": 2,
+ "created_at": "2014-06-04 14:01:06.463214",
+ "flavor_id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa",
+ "floating_ip_pool": None,
+ "image_id": None,
+ "name": "workers",
+ "node_configs": {},
+ "node_group_template_id": "4eb5504c-94c9-4049-a440",
+ "node_processes": [
+ "datanode",
+ "tasktracker"
+ ],
+ "updated_at": None,
+ "volume_mount_prefix": "/volumes/disk",
+ "volumes_per_node": 0,
+ "volumes_size": 0,
+ "volume_type": None,
+ "volume_local_to_instance": False,
+ "volumes_availability_zone": None,
+ "use_autoconfig": True,
+ "is_proxy_gateway": False
+ }
+ ],
+ "plugin_name": "vanilla",
+ "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
+ "updated_at": None
+ }
+
+ ct1 = cluster_templates.ClusterTemplate(
+ cluster_templates.ClusterTemplateManager(None), ct1_dict)
+ TEST.cluster_templates.add(ct1)
+
+ # Clusters.
+ cluster1_dict = {
+ "anti_affinity": [],
+ "cluster_configs": {},
+ "cluster_template_id": "a2c3743f-31a2-4919-8d02-792138a87a98",
+ "created_at": "2014-06-04 20:02:14.051328",
+ "default_image_id": "9eb4643c-dca8-4ea7-92d2-b773f88a8dc6",
+ "description": "",
+ "hadoop_version": "1.2.1",
+ "id": "ec9a0d28-5cfb-4028-a0b5-40afe23f1533",
+ "info": {},
+ "is_transient": False,
+ "management_public_key": "fakekey",
+ "name": "cercluster",
+ "neutron_management_network": None,
+ "use_autoconfig": True,
+ "node_groups": [
+ {
+ "count": 1,
+ "created_at": "2014-06-04 20:02:14.053153",
+ "flavor_id": "0",
+ "floating_ip_pool": None,
+ "image_id": None,
+ "instances": [
+ {
+ "created_at": "2014-06-04 20:02:14.834529",
+ "id": "c3b8004b-7063-4b99-a082-820cdc6e961c",
+ "instance_id": "a45f5495-4a10-4f17-8fae",
+ "instance_name": "cercluster-master-001",
+ "internal_ip": None,
+ "management_ip": None,
+ "updated_at": None,
+ "volumes": []
+ }
+ ],
+ "name": "master",
+ "node_configs": {},
+ "node_group_template_id": "c166dfcc-9cc7-4b48-adc9",
+ "node_processes": [
+ "namenode",
+ "jobtracker",
+ "secondarynamenode",
+ "hiveserver",
+ "oozie"
+ ],
+ "updated_at": "2014-06-04 20:02:14.841760",
+ "volume_mount_prefix": "/volumes/disk",
+ "volumes_per_node": 0,
+ "volumes_size": 0,
+ "volume_type": None,
+ "volume_local_to_instance": False,
+ "security_groups": [],
+ "volumes_availability_zone": None,
+ "id": "ng1",
+ "use_autoconfig": True,
+ "is_proxy_gateway": False
+ },
+ {
+ "count": 2,
+ "created_at": "2014-06-04 20:02:14.053849",
+ "flavor_id": "0",
+ "floating_ip_pool": None,
+ "image_id": None,
+ "instances": [
+ {
+ "created_at": "2014-06-04 20:02:15.097655",
+ "id": "6a8ae0b1-bb28-4de2-bfbb-bdd3fd2d72b2",
+ "instance_id": "38bf8168-fb30-483f-8d52",
+ "instance_name": "cercluster-workers-001",
+ "internal_ip": None,
+ "management_ip": None,
+ "updated_at": None,
+ "volumes": []
+ },
+ {
+ "created_at": "2014-06-04 20:02:15.344515",
+ "id": "17b98ed3-a776-467a-90cf-9f46a841790b",
+ "instance_id": "85606938-8e53-46a5-a50b",
+ "instance_name": "cercluster-workers-002",
+ "internal_ip": None,
+ "management_ip": None,
+ "updated_at": None,
+ "volumes": []
+ }
+ ],
+ "name": "workers",
+ "node_configs": {},
+ "node_group_template_id": "4eb5504c-94c9-4049-a440",
+ "node_processes": [
+ "datanode",
+ "tasktracker"
+ ],
+ "updated_at": "2014-06-04 20:02:15.355745",
+ "volume_mount_prefix": "/volumes/disk",
+ "volumes_per_node": 0,
+ "volumes_size": 0,
+ "volume_type": None,
+ "volume_local_to_instance": False,
+ "security_groups": ["b7857890-09bf-4ee0-a0d5-322d7a6978bf"],
+ "volumes_availability_zone": None,
+ "id": "ng2",
+ "use_autoconfig": True,
+ "is_proxy_gateway": False
+ }
+ ],
+ "plugin_name": "vanilla",
+ "status": "Active",
+ "status_description": "",
+ "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
+ "trust_id": None,
+ "updated_at": "2014-06-04 20:02:15.446087",
+ "user_keypair_id": "stackboxkp"
+ }
+
+ cluster1 = clusters.Cluster(
+ clusters.ClusterManager(None), cluster1_dict)
+ TEST.clusters.add(cluster1)
+
+ cluster2_dict = copy.deepcopy(cluster1_dict)
+ cluster2_dict.update({
+ "id": "cl2",
+ "name": "cl2_name",
+ "provision_progress": [
+ {
+ "created_at": "2015-03-27T15:51:54",
+ "updated_at": "2015-03-27T15:59:34",
+ "step_name": "first_step",
+ "step_type": "some_type",
+ "successful": True,
+ "events": [],
+ "total": 3
+ },
+ {
+ "created_at": "2015-03-27T16:01:54",
+ "updated_at": "2015-03-27T16:10:22",
+ "step_name": "second_step",
+ "step_type": "some_other_type",
+ "successful": None,
+ "events": [
+ {
+ "id": "evt1",
+ "created_at": "2015-03-27T16:01:22",
+ "node_group_id": "ng1",
+ "instance_name": "cercluster-master-001",
+ "successful": True,
+ "event_info": None
+ },
+ {
+ "id": "evt2",
+ "created_at": "2015-03-27T16:04:51",
+ "node_group_id": "ng2",
+ "instance_name": "cercluster-workers-001",
+ "successful": True,
+ "event_info": None
+ }
+ ],
+ "total": 3
+ }
+ ]
+ })
+
+ cluster2 = clusters.Cluster(
+ clusters.ClusterManager(None), cluster2_dict)
+ TEST.clusters.add(cluster2)
+
+ # Data Sources.
+ data_source1_dict = {
+ "created_at": "2014-06-04 14:01:10.371562",
+ "description": "sample output",
+ "id": "426fb01c-5c7e-472d-bba2-b1f0fe7e0ede",
+ "name": "sampleOutput",
+ "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
+ "type": "swift",
+ "updated_at": None,
+ "url": "swift://example.sahara/output"
+ }
+
+ data_source2_dict = {
+ "created_at": "2014-06-05 15:01:12.331361",
+ "description": "second sample output",
+ "id": "ab3413-adfb-bba2-123456785675",
+ "name": "sampleOutput2",
+ "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
+ "type": "hdfs",
+ "updated_at": None,
+ "url": "hdfs://example.sahara/output"
+ }
+
+ data_source1 = data_sources.DataSources(
+ data_sources.DataSourceManager(None), data_source1_dict)
+ data_source2 = data_sources.DataSources(
+ data_sources.DataSourceManager(None), data_source2_dict)
+ TEST.data_sources.add(data_source1)
+ TEST.data_sources.add(data_source2)
+
+ # Job Binaries.
+ job_binary1_dict = {
+ "created_at": "2014-06-05 18:15:15.581285",
+ "description": "",
+ "id": "3f3a07ac-7d6f-49e8-8669-40b25ee891b7",
+ "name": "example.pig",
+ "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
+ "updated_at": None,
+ "url": "internal-db://80121dea-f8bd-4ad3-bcc7-096f4bfc722d"
+ }
+
+ job_binary2_dict = {
+ "created_at": "2014-10-10 13:12:15.583631",
+ "description": "Test for spaces in name",
+ "id": "abcdef56-1234-abcd-abcd-defabcdaedcb",
+ "name": "example with spaces.pig",
+ "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
+ "updated_at": None,
+ "url": "internal-db://abcdef56-1234-abcd-abcd-defabcdaedcb"
+ }
+
+ job_binary1 = job_binaries.JobBinaries(
+ job_binaries.JobBinariesManager(None), job_binary1_dict)
+ job_binary2 = job_binaries.JobBinaries(
+ job_binaries.JobBinariesManager(None), job_binary2_dict)
+
+ TEST.job_binaries.add(job_binary1)
+ TEST.job_binaries.add(job_binary2)
+
+ # Jobs.
+ job1_dict = {
+ "created_at": "2014-06-05 19:23:59.637165",
+ "description": "",
+ "id": "a077b851-46be-4ad7-93c3-2d83894546ef",
+ "libs": [
+ {
+ "created_at": "2014-06-05 19:23:42.742057",
+ "description": "",
+ "id": "ab140807-59f8-4235-b4f2-e03daf946256",
+ "name": "udf.jar",
+ "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
+ "updated_at": None,
+ "url": "internal-db://d186e2bb-df93-47eb-8c0e-ce21e7ecb78b"
+ }
+ ],
+ "mains": [
+ {
+ "created_at": "2014-06-05 18:15:15.581285",
+ "description": "",
+ "id": "3f3a07ac-7d6f-49e8-8669-40b25ee891b7",
+ "name": "example.pig",
+ "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
+ "updated_at": None,
+ "url": "internal-db://80121dea-f8bd-4ad3-bcc7-096f4bfc722d"
+ }
+ ],
+ "interface": [],
+ "name": "pigjob",
+ "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
+ "type": "Pig",
+ "updated_at": None,
+ "job_config": {"configs": {}}
+ }
+
+ job1 = jobs.Job(jobs.JobsManager(None), job1_dict)
+ TEST.jobs.add(job1)
+
+ # Job Executions.
+ jobex1_dict = {
+ "cluster_id": "ec9a0d28-5cfb-4028-a0b5-40afe23f1533",
+ "created_at": "2014-06-05 20:03:06.195937",
+ "end_time": None,
+ "id": "4b6c1cbf-c713-49d3-8025-808a87c514a6",
+ "info": {
+ "acl": None,
+ "actions": [
+ {
+ "consoleUrl": "-",
+ "cred": "None",
+ "data": None,
+ "endTime": "Thu,05 Jun 2014 20:03:32 GMT",
+ "errorCode": None,
+ "errorMessage": None,
+ "externalChildIDs": None,
+ "externalId": "-",
+ "externalStatus": "OK",
+ "id": "0000000-140604200538581-oozie-hado-W@:start:",
+ "name": ":start:",
+ "retries": 0,
+ "startTime": "Thu,05 Jun 2014 20:03:32 GMT",
+ "stats": None,
+ "status": "OK",
+ "toString": "Action name[:start:] status[OK]",
+ "trackerUri": "-",
+ "transition": "job-node",
+ "type": ":START:"
+ },
+ {
+ "consoleUrl": "fake://console.url",
+ "cred": "None",
+ "data": None,
+ "endTime": None,
+ "errorCode": None,
+ "errorMessage": None,
+ "externalChildIDs": None,
+ "externalId": "job_201406042004_0001",
+ "externalStatus": "RUNNING",
+ "id": "0000000-140604200538581-oozie-hado-W@job-node",
+ "name": "job-node",
+ "retries": 0,
+ "startTime": "Thu,05 Jun 2014 20:03:33 GMT",
+ "stats": None,
+ "status": "RUNNING",
+ "toString": "Action name[job-node] status[RUNNING]",
+ "trackerUri": "cercluster-master-001:8021",
+ "transition": None,
+ "type": "pig"
+ }
+ ],
+ "appName": "job-wf",
+ "appPath": "hdfs://fakepath/workflow.xml",
+ "conf": "fakeconfig",
+ "consoleUrl": "fake://consoleURL",
+ "createdTime": "Thu,05 Jun 2014 20:03:32 GMT",
+ "endTime": None,
+ "externalId": None,
+ "group": None,
+ "id": "0000000-140604200538581-oozie-hado-W",
+ "lastModTime": "Thu,05 Jun 2014 20:03:35 GMT",
+ "parentId": None,
+ "run": 0,
+ "startTime": "Thu,05 Jun 2014 20:03:32 GMT",
+ "status": "RUNNING",
+ "toString": "Workflow ...status[RUNNING]",
+ "user": "hadoop"
+ },
+ "input_id": "85884883-3083-49eb-b442-71dd3734d02c",
+ "job_configs": {
+ "args": [],
+ "configs": {},
+ "params": {}
+ },
+ "interface": {},
+ "job_id": "a077b851-46be-4ad7-93c3-2d83894546ef",
+ "oozie_job_id": "0000000-140604200538581-oozie-hado-W",
+ "output_id": "426fb01c-5c7e-472d-bba2-b1f0fe7e0ede",
+ "progress": None,
+ "return_code": None,
+ "start_time": "2014-06-05T16:03:32",
+ "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
+ "updated_at": "2014-06-05 20:03:46.438248",
+ "cluster_name_set": True,
+ "job_name_set": True,
+ "cluster_name": "cluster-1",
+ "job_name": "job-1",
+ "data_source_urls": {
+ "85884883-3083-49eb-b442-71dd3734d02c": "swift://a.sahara/input",
+ "426fb01c-5c7e-472d-bba2-b1f0fe7e0ede": "hdfs://a.sahara/output"
+ }
+ }
+
+ jobex1 = job_executions.JobExecution(
+ job_executions.JobExecutionsManager(None), jobex1_dict)
+ TEST.job_executions.add(jobex1)
+
+ augmented_image = TEST.registered_images.first()
+ augmented_image.tags = {}
+ augmented_image.username = 'myusername'
+ augmented_image.description = 'mydescription'
+
+ job_type1_dict = {
+ "name": "Pig",
+ "plugins": [
+ {
+ "description": "Fake description",
+ "versions": {
+ "2.6.0": {
+ },
+ "1.2.1": {
+ }
+ },
+ "name": "vanilla",
+ "title": "Vanilla Apache Hadoop"
+ },
+ ]
+ }
+ job_types1 = job_types.JobType(
+ job_types.JobTypesManager(None), job_type1_dict)
+ TEST.job_types.add(job_types1)
diff --git a/sahara_dashboard/test/test_data/utils.py b/sahara_dashboard/test/test_data/utils.py
new file mode 100644
index 00000000..e92e61af
--- /dev/null
+++ b/sahara_dashboard/test/test_data/utils.py
@@ -0,0 +1,53 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from openstack_dashboard.test.test_data import utils
+
+
+def load_test_data(load_onto=None):
+ from openstack_dashboard.test.test_data import ceilometer_data
+ from openstack_dashboard.test.test_data import cinder_data
+ from openstack_dashboard.test.test_data import exceptions
+ from openstack_dashboard.test.test_data import glance_data
+ from openstack_dashboard.test.test_data import heat_data
+ from openstack_dashboard.test.test_data import keystone_data
+ from openstack_dashboard.test.test_data import neutron_data
+ from openstack_dashboard.test.test_data import nova_data
+ from openstack_dashboard.test.test_data import swift_data
+ from openstack_dashboard.test.test_data import trove_data
+
+ from sahara_dashboard.test.test_data import keystone_data \
+ as sahara_keystone_data
+ from sahara_dashboard.test.test_data import sahara_data
+
+ # The order of these loaders matters, some depend on others.
+ loaders = (
+ exceptions.data,
+ keystone_data.data,
+ glance_data.data,
+ nova_data.data,
+ cinder_data.data,
+ neutron_data.data,
+ swift_data.data,
+ heat_data.data,
+ ceilometer_data.data,
+ trove_data.data,
+ sahara_data.data,
+ sahara_keystone_data.data,
+ )
+ if load_onto:
+ for data_func in loaders:
+ data_func(load_onto)
+ return load_onto
+ else:
+ return utils.TestData(*loaders)
diff --git a/sahara_dashboard/test/urls.py b/sahara_dashboard/test/urls.py
new file mode 100644
index 00000000..9bef20ff
--- /dev/null
+++ b/sahara_dashboard/test/urls.py
@@ -0,0 +1,20 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from django.conf import urls
+import openstack_dashboard.urls
+
+urlpatterns = urls.patterns(
+ '',
+ urls.url(r'', urls.include(openstack_dashboard.urls))
+)
diff --git a/setup.cfg b/setup.cfg
index e62b72ce..d4714403 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -22,7 +22,7 @@ classifier =
[files]
packages =
- sahara-dashboard
+ sahara_dashboard
[build_sphinx]
source-dir = doc/source
@@ -33,15 +33,15 @@ all_files = 1
upload-dir = doc/build/html
[compile_catalog]
-directory = sahara-dashboard/locale
+directory = sahara_dashboard/locale
domain = sahara-dashboard
[update_catalog]
-domain = manila-ui
-output_dir = manila_ui/locale
-input_file = manila_ui/locale/manila-ui.pot
+domain = sahara-dashboard
+output_dir = sahara_dashboard/locale
+input_file = sahara_dashboard/locale/sahara_dashboard.pot
[extract_messages]
keywords = _ gettext ngettext l_ lazy_gettext
mapping_file = babel.cfg
-output_file = manila_ui/locale/manila-ui.pot
+output_file = sahara_dashboard/locale/sahara_dashboard.pot
diff --git a/tools/clean_enabled_files.py b/tools/clean_enabled_files.py
new file mode 100644
index 00000000..25d384c9
--- /dev/null
+++ b/tools/clean_enabled_files.py
@@ -0,0 +1,45 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# This file is temporarily needed to allow the conversion from integrated
+# Sahara content in Horizon to plugin based content. Horizon currently defines
+# the same module name data_processing and imports it by default. This utility
+# removes the configuration files that are responsible for importing the old
+# version of the module. Only Sahara content configuration files are effected
+# in Horizon.
+
+import os
+
+from openstack_dashboard import enabled as local_enabled
+
+from sahara_dashboard import enabled
+
+ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+WITH_VENV = os.path.join(ROOT, 'tools', 'with_venv.sh')
+
+def main():
+ src_path = os.path.dirname(enabled.__file__)
+ dest_path = os.path.dirname(local_enabled.__file__)
+
+ src_files = os.listdir(src_path)
+ for file in src_files:
+ # skip the __init__.py or bad things happen
+ if file == "__init__.py":
+ continue
+
+ file_path = os.path.join(dest_path, file)
+ if os.path.isfile(file_path):
+ print ("removing ", file_path)
+ os.remove(file_path)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/install_venv.py b/tools/install_venv.py
new file mode 100644
index 00000000..8550e2c6
--- /dev/null
+++ b/tools/install_venv.py
@@ -0,0 +1,154 @@
+# Copyright 2012 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Copyright 2012 OpenStack, LLC
+#
+# Copyright 2012 Nebula, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Installation script for the OpenStack Dashboard development virtualenv.
+"""
+
+import os
+import subprocess
+import sys
+
+
+ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+VENV = os.path.join(ROOT, '.venv')
+WITH_VENV = os.path.join(ROOT, 'tools', 'with_venv.sh')
+PIP_REQUIRES = os.path.join(ROOT, 'requirements.txt')
+TEST_REQUIRES = os.path.join(ROOT, 'test-requirements.txt')
+
+
+def die(message, *args):
+ print >> sys.stderr, message % args
+ sys.exit(1)
+
+
+def run_command(cmd, redirect_output=True, check_exit_code=True, cwd=ROOT,
+ die_message=None):
+ """
+ Runs a command in an out-of-process shell, returning the
+ output of that command. Working directory is ROOT.
+ """
+ if redirect_output:
+ stdout = subprocess.PIPE
+ else:
+ stdout = None
+
+ proc = subprocess.Popen(cmd, cwd=cwd, stdout=stdout)
+ output = proc.communicate()[0]
+ if check_exit_code and proc.returncode != 0:
+ if die_message is None:
+ die('Command "%s" failed.\n%s', ' '.join(cmd), output)
+ else:
+ die(die_message)
+ return output
+
+
+HAS_EASY_INSTALL = bool(run_command(['which', 'easy_install'],
+ check_exit_code=False).strip())
+HAS_VIRTUALENV = bool(run_command(['which', 'virtualenv'],
+ check_exit_code=False).strip())
+
+
+def check_dependencies():
+ """Make sure virtualenv is in the path."""
+
+ print 'Checking dependencies...'
+ if not HAS_VIRTUALENV:
+ print 'Virtual environment not found.'
+ # Try installing it via easy_install...
+ if HAS_EASY_INSTALL:
+ print 'Installing virtualenv via easy_install...',
+ run_command(['easy_install', 'virtualenv'],
+ die_message='easy_install failed to install virtualenv'
+ '\ndevelopment requires virtualenv, please'
+ ' install it using your favorite tool')
+ if not run_command(['which', 'virtualenv']):
+ die('ERROR: virtualenv not found in path.\n\ndevelopment '
+ ' requires virtualenv, please install it using your'
+ ' favorite package management tool and ensure'
+ ' virtualenv is in your path')
+ print 'virtualenv installation done.'
+ else:
+ die('easy_install not found.\n\nInstall easy_install'
+ ' (python-setuptools in ubuntu) or virtualenv by hand,'
+ ' then rerun.')
+ print 'dependency check done.'
+
+
+def create_virtualenv(venv=VENV):
+ """Creates the virtual environment and installs PIP only into the
+ virtual environment
+ """
+ print 'Creating venv...',
+ run_command(['virtualenv', '-q', '--no-site-packages', VENV])
+ print 'done.'
+ print 'Installing pip in virtualenv...',
+ if not run_command([WITH_VENV, 'easy_install', 'pip']).strip():
+ die("Failed to install pip.")
+ print 'done.'
+ print 'Installing distribute in virtualenv...'
+ pip_install('distribute>=0.6.24')
+ print 'done.'
+
+
+def pip_install(*args):
+ args = [WITH_VENV, 'pip', 'install', '--upgrade'] + list(args)
+ run_command(args, redirect_output=False)
+
+
+def install_dependencies(venv=VENV):
+ print "Installing dependencies..."
+ print "(This may take several minutes, don't panic)"
+ pip_install('-r', TEST_REQUIRES)
+ pip_install('-r', PIP_REQUIRES)
+
+ # Tell the virtual env how to "import dashboard"
+ py = 'python%d.%d' % (sys.version_info[0], sys.version_info[1])
+ pthfile = os.path.join(venv, "lib", py, "site-packages", "dashboard.pth")
+ f = open(pthfile, 'w')
+ f.write("%s\n" % ROOT)
+
+
+def install_horizon():
+ print 'Installing horizon module in development mode...'
+ run_command([WITH_VENV, 'python', 'setup.py', 'develop'], cwd=ROOT)
+
+
+def print_summary():
+ summary = """
+Horizon development environment setup is complete.
+
+To activate the virtualenv for the extent of your current shell session you
+can run:
+
+$ source .venv/bin/activate
+"""
+ print summary
+
+
+def main():
+ check_dependencies()
+ create_virtualenv()
+ install_dependencies()
+ install_horizon()
+ print_summary()
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/with_venv.sh b/tools/with_venv.sh
new file mode 100755
index 00000000..7303990b
--- /dev/null
+++ b/tools/with_venv.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+TOOLS_PATH=${TOOLS_PATH:-$(dirname $0)}
+VENV_PATH=${VENV_PATH:-${TOOLS_PATH}}
+VENV_DIR=${VENV_NAME:-/../.venv}
+TOOLS=${TOOLS_PATH}
+VENV=${VENV:-${VENV_PATH}/${VENV_DIR}}
+source ${VENV}/bin/activate && "$@"
diff --git a/tox.ini b/tox.ini
index 61f61517..1234f8f8 100644
--- a/tox.ini
+++ b/tox.ini
@@ -13,7 +13,7 @@ deps = -r{toxinidir}/requirements.txt
commands = /bin/bash run_tests.sh -N --no-pep8 {posargs}
[testenv:py27]
-setenv = DJANGO_SETTINGS_MODULE=manila_ui.test.settings
+setenv = DJANGO_SETTINGS_MODULE=sahara_dashboard.test.settings
[testenv:pep8]
commands = flake8