Now surrogate deployment history tasks respecting filtering
Before when we filter out deployment tasks history all the difference between tasks graph snapshot and filter result was returned as surrogate tasks with no run name and node. The deployment tasks history is saved for dry run as well. Change-Id: I39a3341230a00aa53fa3a4cba31ee0aacb0ec2ae Closes-Bug: #1590872
This commit is contained in:
parent
106f253f73
commit
09a475ad8f
|
@ -13,9 +13,8 @@
|
|||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import copy
|
||||
from datetime import datetime
|
||||
|
||||
from datetime import datetime
|
||||
import six
|
||||
|
||||
from nailgun.consts import HISTORY_TASK_STATUSES
|
||||
|
@ -152,65 +151,78 @@ class DeploymentHistoryCollection(NailgunCollection):
|
|||
:returns: tasks history
|
||||
:rtype: list[dict]
|
||||
"""
|
||||
query = cls.filter_by(None, task_id=transaction.id)
|
||||
if nodes_ids:
|
||||
query = query.filter(cls.single.model.node_id.in_(nodes_ids))
|
||||
if statuses:
|
||||
query = query.filter(cls.single.model.status.in_(statuses))
|
||||
if tasks_names:
|
||||
query = query.filter(
|
||||
cls.single.model.deployment_graph_task_name.in_(tasks_names))
|
||||
|
||||
history = copy.deepcopy(cls.to_list(query))
|
||||
|
||||
# rename task id to conventional field
|
||||
for record in history:
|
||||
record['task_name'] = record.pop(
|
||||
'deployment_graph_task_name', None)
|
||||
nodes_ids = nodes_ids and frozenset(nodes_ids)
|
||||
statuses = statuses and frozenset(statuses)
|
||||
tasks_names = tasks_names and frozenset(tasks_names)
|
||||
|
||||
task_parameters_by_name = {}
|
||||
visited_tasks = set()
|
||||
tasks_snapshot = Transaction.get_tasks_snapshot(transaction)
|
||||
history = []
|
||||
|
||||
if tasks_snapshot:
|
||||
task_parameters_by_name = {}
|
||||
task_present_in_history_by_name = {}
|
||||
|
||||
for task in tasks_snapshot:
|
||||
# make a copy for each task to avoid modification
|
||||
for task in six.moves.map(dict, tasks_snapshot):
|
||||
# remove ambiguous id field
|
||||
task.pop('id', None)
|
||||
task_parameters_by_name[task['task_name']] = task
|
||||
task_present_in_history_by_name[task['task_name']] = False
|
||||
|
||||
for history_record in history:
|
||||
task_name = history_record['task_name']
|
||||
try:
|
||||
task_parameters = task_parameters_by_name[task_name]
|
||||
task_present_in_history_by_name[task_name] = True
|
||||
history_record.update(task_parameters)
|
||||
except KeyError:
|
||||
logger.warning(
|
||||
'Definition of "{0}" task is not found'.format(
|
||||
task_name))
|
||||
|
||||
# Make surrogate history records for tasks that were not launched
|
||||
# to provide their parameters
|
||||
absent_tasks = [
|
||||
k for k, v
|
||||
in six.iteritems(task_present_in_history_by_name)
|
||||
if not v
|
||||
]
|
||||
for task_name in absent_tasks:
|
||||
history_record = {
|
||||
'task_name': task_name,
|
||||
'node_id': None,
|
||||
'status': 'skipped',
|
||||
'time_start': None,
|
||||
'time_end': None
|
||||
}
|
||||
history_record.update(task_parameters_by_name[task_name])
|
||||
history.append(history_record)
|
||||
|
||||
else:
|
||||
logger.warning('No tasks snapshot is defined in given '
|
||||
'transaction, probably it is a legacy '
|
||||
'(Fuel<10.0) or malformed.')
|
||||
history_records = cls.filter_by(None, task_id=transaction.id)
|
||||
if tasks_names:
|
||||
history_records = cls.filter_by_list(
|
||||
history_records, 'deployment_graph_task_name', tasks_names
|
||||
)
|
||||
if nodes_ids:
|
||||
history_records = cls.filter_by_list(
|
||||
history_records, 'node_id', nodes_ids
|
||||
)
|
||||
if statuses and HISTORY_TASK_STATUSES.skipped not in statuses:
|
||||
history_records = cls.filter_by_list(
|
||||
history_records, 'status', statuses
|
||||
)
|
||||
|
||||
for history_record in history_records:
|
||||
task_name = history_record.deployment_graph_task_name
|
||||
visited_tasks.add(task_name)
|
||||
|
||||
# the visited tasks should be calculated, it is
|
||||
# reason why the query filter cannot be used here
|
||||
if statuses and history_record.status not in statuses:
|
||||
continue
|
||||
|
||||
record = cls.single.to_dict(history_record)
|
||||
history.append(record)
|
||||
# remove ambiguous field
|
||||
record['task_name'] = record.pop('deployment_graph_task_name')
|
||||
|
||||
if task_parameters_by_name:
|
||||
try:
|
||||
record.update(task_parameters_by_name[task_name])
|
||||
except KeyError:
|
||||
logger.warning(
|
||||
'Definition of "{0}" task is not found'
|
||||
.format(task_name)
|
||||
)
|
||||
|
||||
# calculates absent tasks respecting filter
|
||||
if (not nodes_ids and (
|
||||
not statuses or HISTORY_TASK_STATUSES.skipped in statuses)):
|
||||
|
||||
for task_name in task_parameters_by_name:
|
||||
if tasks_names and task_name not in tasks_names:
|
||||
continue
|
||||
if task_name in visited_tasks:
|
||||
continue
|
||||
|
||||
history.append(dict(
|
||||
task_parameters_by_name[task_name],
|
||||
task_name=task_name,
|
||||
node_id='-',
|
||||
status=HISTORY_TASK_STATUSES.skipped,
|
||||
time_start=None,
|
||||
time_end=None,
|
||||
))
|
||||
return history
|
||||
|
|
|
@ -141,7 +141,7 @@ class BaseDeploymentTask(object):
|
|||
try:
|
||||
args = getattr(cls, method)(transaction, **kwargs)
|
||||
# save tasks history
|
||||
if 'tasks_graph' in args and not args.get('dry_run', False):
|
||||
if 'tasks_graph' in args:
|
||||
logger.info("tasks history saving is started.")
|
||||
objects.DeploymentHistoryCollection.create(
|
||||
transaction, args['tasks_graph']
|
||||
|
|
|
@ -134,24 +134,13 @@ class TestDeploymentHistoryHandlers(BaseIntegrationTest):
|
|||
'time_start': None,
|
||||
'time_end': None,
|
||||
'custom': {}
|
||||
} for node in cluster.nodes] + [{
|
||||
'task_name': 'test2',
|
||||
'roles': '*',
|
||||
'parameters': {'param1': 'value1'},
|
||||
'requires': ['pre_deployment_end'],
|
||||
'version': '2.1.0',
|
||||
'type': 'puppet',
|
||||
'status': 'skipped',
|
||||
'time_start': None,
|
||||
'time_end': None,
|
||||
'node_id': None
|
||||
}],
|
||||
} for node in cluster.nodes],
|
||||
response.json_body
|
||||
)
|
||||
|
||||
@mock_rpc()
|
||||
@mock.patch('objects.Cluster.get_deployment_tasks')
|
||||
def test_history_task_not_found_returns_surrogate_tasks(self, tasks_mock):
|
||||
def test_unexisting_task_filter_returning_nothing(self, tasks_mock):
|
||||
tasks_mock.return_value = self.test_tasks
|
||||
|
||||
cluster = self.env.create(**self.cluster_parameters)
|
||||
|
@ -173,19 +162,8 @@ class TestDeploymentHistoryHandlers(BaseIntegrationTest):
|
|||
headers=self.default_headers
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertItemsEqual(
|
||||
[{
|
||||
'task_name': task_name,
|
||||
'roles': '*',
|
||||
'parameters': {'param1': 'value1'},
|
||||
'requires': ['pre_deployment_end'],
|
||||
'version': '2.1.0',
|
||||
'type': 'puppet',
|
||||
'status': 'skipped',
|
||||
'time_start': None,
|
||||
'time_end': None,
|
||||
'node_id': None
|
||||
} for task_name in ['test1', 'test2']],
|
||||
self.assertEqual(
|
||||
[],
|
||||
response.json_body
|
||||
)
|
||||
|
||||
|
|
|
@ -0,0 +1,158 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from nailgun import consts
|
||||
from nailgun import objects
|
||||
|
||||
from nailgun.test import base
|
||||
|
||||
|
||||
class TestDeploymentTasksHistory(base.BaseTestCase):
|
||||
tasks = [
|
||||
{'id': 'task11', 'task_name': 'task11',
|
||||
'parameters': {'name': 'task11'}},
|
||||
{'id': 'task12', 'task_name': 'task12',
|
||||
'parameters': {'name': 'task12'}},
|
||||
{'id': 'task21', 'task_name': 'task21',
|
||||
'parameters': {'name': 'task21'}},
|
||||
{'id': 'task22', 'task_name': 'task22',
|
||||
'parameters': {'name': 'task22'}},
|
||||
{'id': 'task31', 'task_name': 'task31',
|
||||
'parameters': {'name': 'task31'}},
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(TestDeploymentTasksHistory, self).setUp()
|
||||
self.maxDiff = None
|
||||
|
||||
self.transaction = objects.Transaction.create(
|
||||
{'status': consts.TASK_STATUSES.ready,
|
||||
'tasks_snapshot': self.tasks}
|
||||
)
|
||||
objects.DeploymentHistoryCollection.create(
|
||||
self.transaction, {
|
||||
'0': self.tasks[:2],
|
||||
'2': self.tasks[2:4],
|
||||
}
|
||||
)
|
||||
|
||||
def test_get_all_tasks(self):
|
||||
history = objects.DeploymentHistoryCollection.get_history(
|
||||
self.transaction
|
||||
)
|
||||
expected = []
|
||||
for n in (0, 2):
|
||||
for task in self.tasks[n: n + 2]:
|
||||
expected.append({
|
||||
'task_name': task['task_name'],
|
||||
'parameters': task['parameters'],
|
||||
'node_id': str(n),
|
||||
'status': consts.HISTORY_TASK_STATUSES.pending,
|
||||
'time_start': None,
|
||||
'time_end': None,
|
||||
'custom': {}
|
||||
})
|
||||
|
||||
expected.append({
|
||||
'task_name': self.tasks[-1]['task_name'],
|
||||
'parameters': self.tasks[-1]['parameters'],
|
||||
'node_id': '-',
|
||||
'status': consts.HISTORY_TASK_STATUSES.skipped,
|
||||
'time_start': None,
|
||||
'time_end': None,
|
||||
})
|
||||
|
||||
self.assertItemsEqual(expected, history)
|
||||
|
||||
def test_get_tasks_by_node(self):
|
||||
history = objects.DeploymentHistoryCollection.get_history(
|
||||
self.transaction, nodes_ids=['0']
|
||||
)
|
||||
expected = [
|
||||
{
|
||||
'task_name': task['task_name'],
|
||||
'parameters': task['parameters'],
|
||||
'node_id': '0',
|
||||
'status': consts.HISTORY_TASK_STATUSES.pending,
|
||||
'time_start': None,
|
||||
'time_end': None,
|
||||
'custom': {}
|
||||
}
|
||||
for task in self.tasks[0: 2]
|
||||
]
|
||||
|
||||
self.assertItemsEqual(expected, history)
|
||||
|
||||
def test_get_tasks_by_status_pending(self):
|
||||
history = objects.DeploymentHistoryCollection.get_history(
|
||||
self.transaction, statuses=[consts.HISTORY_TASK_STATUSES.pending]
|
||||
)
|
||||
expected = [
|
||||
{
|
||||
'task_name': task['task_name'],
|
||||
'parameters': task['parameters'],
|
||||
'node_id': str(n),
|
||||
'status': consts.HISTORY_TASK_STATUSES.pending,
|
||||
'time_start': None,
|
||||
'time_end': None,
|
||||
'custom': {}
|
||||
}
|
||||
for n in [0, 2]
|
||||
for task in self.tasks[n: n + 2]
|
||||
]
|
||||
|
||||
self.assertItemsEqual(expected, history)
|
||||
|
||||
def test_get_tasks_by_status_skipped(self):
|
||||
history = objects.DeploymentHistoryCollection.get_history(
|
||||
self.transaction, statuses=[consts.HISTORY_TASK_STATUSES.skipped]
|
||||
)
|
||||
expected = [{
|
||||
'task_name': self.tasks[-1]['task_name'],
|
||||
'parameters': self.tasks[-1]['parameters'],
|
||||
'node_id': '-',
|
||||
'status': consts.HISTORY_TASK_STATUSES.skipped,
|
||||
'time_start': None,
|
||||
'time_end': None,
|
||||
}]
|
||||
|
||||
self.assertItemsEqual(expected, history)
|
||||
|
||||
def test_get_tasks_by_name(self):
|
||||
history = objects.DeploymentHistoryCollection.get_history(
|
||||
self.transaction, tasks_names=['task31', 'task11']
|
||||
)
|
||||
expected = [
|
||||
{
|
||||
'task_name': 'task11',
|
||||
'parameters': {'name': 'task11'},
|
||||
'node_id': '0',
|
||||
'status': consts.HISTORY_TASK_STATUSES.pending,
|
||||
'time_start': None,
|
||||
'time_end': None,
|
||||
'custom': {}
|
||||
},
|
||||
{
|
||||
'task_name': 'task31',
|
||||
'parameters': {'name': 'task31'},
|
||||
'node_id': '-',
|
||||
'status': consts.HISTORY_TASK_STATUSES.skipped,
|
||||
'time_start': None,
|
||||
'time_end': None,
|
||||
}
|
||||
]
|
||||
|
||||
self.assertItemsEqual(expected, history)
|
Loading…
Reference in New Issue