Optimize algorithm that retrieves CI results from Gerrit

New algorithm iterates over drivers and branches and polls corresponding
reviews from Gerrit. Reviews are iterated until vote or comment matching
pattern is found.

Also:
 * CI results are merged into drivers during retrieval. Drivers are stored
   in memcached as part of default-data
 * Enforced capitalized case for release ids
 * Removed unused code from dashboard

Closes bug 1319293
Closes bug 1318051

Change-Id: Id8893deb1fcb7d206830678c2aefe6f5e5751c71
This commit is contained in:
Ilya Shakhat 2014-05-14 18:45:29 +04:00
parent 7fe83f00a5
commit d61ed36d50
12 changed files with 292 additions and 602 deletions

View File

@ -1,154 +0,0 @@
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
MEMORY_STORAGE_CACHED = 0
class MemoryStorage(object):
def __init__(self):
pass
class CachedMemoryStorage(MemoryStorage):
def __init__(self):
super(CachedMemoryStorage, self).__init__()
# common indexes
self.records = {}
self.primary_key_index = {}
self.record_types_index = {}
self.module_index = {}
self.user_id_index = {}
self.company_index = {}
self.release_index = {}
self.blueprint_id_index = {}
self.company_name_mapping = {}
self.indexes = {
'primary_key': self.primary_key_index,
'record_type': self.record_types_index,
'company_name': self.company_index,
'module': self.module_index,
'user_id': self.user_id_index,
'release': self.release_index,
}
def _save_record(self, record):
if record.get('company_name') == '*robots':
return
self.records[record['record_id']] = record
for key, index in six.iteritems(self.indexes):
self._add_to_index(index, record, key)
for bp_id in (record.get('blueprint_id') or []):
if bp_id in self.blueprint_id_index:
self.blueprint_id_index[bp_id].add(record['record_id'])
else:
self.blueprint_id_index[bp_id] = set([record['record_id']])
def update(self, records):
have_updates = False
for record in records:
have_updates = True
record_id = record['record_id']
if record_id in self.records:
# remove existing record from indexes
self._remove_record_from_index(self.records[record_id])
self._save_record(record)
if have_updates:
self.company_name_mapping = dict(
(c.lower(), c) for c in self.company_index.keys())
return have_updates
def _remove_record_from_index(self, record):
for key, index in six.iteritems(self.indexes):
index[record[key]].remove(record['record_id'])
def _add_to_index(self, record_index, record, key):
record_key = record[key]
if record_key in record_index:
record_index[record_key].add(record['record_id'])
else:
record_index[record_key] = set([record['record_id']])
def _get_record_ids_from_index(self, items, index):
record_ids = set()
for item in items:
if item in index:
record_ids |= index[item]
return record_ids
def get_record_ids_by_modules(self, modules):
return self._get_record_ids_from_index(modules, self.module_index)
def get_record_ids_by_companies(self, companies):
return self._get_record_ids_from_index(
map(self.get_original_company_name, companies),
self.company_index)
def get_record_ids_by_user_ids(self, launchpad_ids):
return self._get_record_ids_from_index(launchpad_ids,
self.user_id_index)
def get_record_ids_by_releases(self, releases):
return self._get_record_ids_from_index(releases, self.release_index)
def get_record_ids_by_blueprint_ids(self, blueprint_ids):
return self._get_record_ids_from_index(blueprint_ids,
self.blueprint_id_index)
def get_record_ids(self):
return self.records.keys()
def get_record_ids_by_type(self, record_type):
return self.record_types_index.get(record_type, set())
def get_records(self, record_ids):
for i in record_ids:
yield self.records[i]
def get_record_by_primary_key(self, primary_key):
if primary_key in self.primary_key_index:
record_id = list(self.primary_key_index[primary_key])
if record_id:
return self.records[record_id[0]]
return None
def get_original_company_name(self, company_name):
normalized = company_name.lower()
if normalized not in self.company_name_mapping:
return normalized
return self.company_name_mapping[normalized]
def get_companies(self):
return self.company_index.keys()
def get_modules(self):
return self.module_index.keys()
def get_user_ids(self):
return self.user_id_index.keys()
def get_memory_storage(memory_storage_type):
if memory_storage_type == MEMORY_STORAGE_CACHED:
return CachedMemoryStorage()
else:
raise Exception('Unknown memory storage type %s' % memory_storage_type)

View File

@ -197,18 +197,24 @@ function show_summary(base_url) {
tableData[i].driver_info += "<div>" + tableData[i].description + "</div>";
}
tableData[i].in_trunk = "";
var releases_list = [];
for (var j = 0; j < tableData[i].releases_info.length; j++) {
tableData[i].in_trunk += "<a href=\"" + tableData[i].releases_info[j].wiki + "\" target=\"_blank\">" +
tableData[i].releases_info[j].name + "</a> ";
releases_list.push("<a href=\"" + tableData[i].releases_info[j].wiki + "\" target=\"_blank\">" +
tableData[i].releases_info[j].name + "</a>");
}
tableData[i].in_trunk = releases_list.join(" ");
tableData[i].ci_tested = "";
if (tableData[i].os_versions_map["master"]) {
var master = tableData[i].os_versions_map["master"];
if (master.review_url) {
tableData[i].ci_tested = "<a href=\"" + master.review_url +
"\" target=\"_blank\" title=\"Click for details\"><span style=\"color: #008000\">&#x2714;</span></a>";
if (tableData[i].ci) {
if (tableData[i].releases_info.length > 0) {
var last_release = tableData[i].releases_info[tableData[i].releases_info.length - 1].release_id;
var master = tableData[i].releases[last_release];
if (master.review_url) {
tableData[i].ci_tested = "<a href=\"" + master.review_url +
"\" target=\"_blank\" title=\"Click for details\"><span style=\"color: #008000\">&#x2714;</span></a>";
} else {
tableData[i].ci_tested = "<span style=\"color: #808080\">&#x2714;</span>";
}
} else {
tableData[i].ci_tested = "<span style=\"color: #808080\">&#x2714;</span>";
}
@ -216,21 +222,22 @@ function show_summary(base_url) {
tableData[i].ci_tested = "<span style=\"color: darkred\">&#x2716;</span>";
}
tableData[i].maintainers_info = "";
var maintainers_list = [];
if (tableData[i].maintainers) {
for (j = 0; j < tableData[i].maintainers.length; j++) {
var maintainer = tableData[i].maintainers[j];
var mn = maintainer.name;
if (maintainer.launchpad_id) {
tableData[i].maintainers_info = "<a href=\"http://stackalytics.com/?user_id=" +
maintainer.launchpad_id + "\" target=\"_blank\">" + mn + "</a>";
maintainers_list.push("<a href=\"http://stackalytics.com/?user_id=" +
maintainer.launchpad_id + "\" target=\"_blank\">" + mn + "</a>");
}
else if (maintainer.irc) {
tableData[i].maintainers_info = "<a href=\"irc:" + maintainer.irc + "\">" + mn + "</a>";
maintainers_list.push("<a href=\"irc:" + maintainer.irc + "\">" + mn + "</a>");
} else {
tableData[i].maintainers_info = mn;
maintainers_list.push(mn);
}
}
tableData[i].maintainers_info = maintainers_list.join(", ");
} else {
tableData[i].maintainers_info = "";
}

View File

@ -18,7 +18,6 @@ import re
import flask
import memcache
from driverlog.dashboard import memory_storage
from driverlog.openstack.common import log as logging
@ -35,19 +34,14 @@ def _build_projects_map(default_data):
def _build_releases_map(default_data):
releases_map = {}
for release in default_data['releases']:
releases_map[release['id']] = release
releases_map[release['id'].lower()] = release
return releases_map
def _extend_drivers_info():
for driver in get_vault()['drivers_map'].values():
releases_info = []
for release in driver['os_versions_map'].keys():
release = release.lower()
if release.find('/') > 0:
release = release.split('/')[1]
if release == 'master':
release = get_vault()['default_data']['releases'][-1]['id']
for release in driver['releases'].keys():
if release in get_vault()['releases_map']:
releases_info.append(
{
@ -61,35 +55,20 @@ def _extend_drivers_info():
if 'email' in driver['maintainer']:
del driver['maintainer']['email']
def _build_drivers_map(default_data, projects_map):
driver_map = {}
for driver in default_data['drivers']:
driver['project_name'] = projects_map[driver['project_id']]['name']
key = (driver['project_id'].lower(),
driver['vendor'].lower(),
driver['name'].lower())
driver_map[key] = driver
return driver_map
driver['project_name'] = (get_vault()['projects_map']
[driver['project_id']]['name'])
def get_vault():
vault = getattr(flask.current_app, 'driverlog_vault', None)
if not vault:
try:
vault = {}
vault['memory_storage'] = memory_storage.get_memory_storage(
memory_storage.MEMORY_STORAGE_CACHED)
if 'CONF' not in flask.current_app.config:
LOG.critical('Configure environment variable DRIVERLOG_CONF '
'with path to config file')
flask.abort(500)
vault = {}
conf = flask.current_app.config['CONF']
MEMCACHED_URI_PREFIX = r'^memcached:\/\/'
@ -110,37 +89,21 @@ def get_vault():
flask.request.driverlog_updated = True
memcached = vault['memcached']
hashes = memcached.get_multi(['default_data_hash', 'update_hash'],
key_prefix='driverlog:')
update_time = memcached.get('driverlog:update_time')
if vault.get('default_data_hash') != hashes.get('default_data_hash'):
vault['default_data_hash'] = hashes['default_data_hash']
vault['default_data'] = memcached.get('driverlog:default_data')
if vault.get('update_time') != update_time:
vault['update_time'] = update_time
projects_map = _build_projects_map(vault['default_data'])
default_data = memcached.get('driverlog:default_data')
vault['default_data'] = default_data
projects_map = _build_projects_map(default_data)
vault['projects_map'] = projects_map
releases_map = _build_releases_map(vault['default_data'])
releases_map = _build_releases_map(default_data)
vault['releases_map'] = releases_map
drivers_map = _build_drivers_map(
vault['default_data'], projects_map)
vault['drivers_map'] = drivers_map
_extend_drivers_info()
if vault.get('update_hash') != hashes.get('update_hash'):
vault['update_hash'] = hashes['update_hash']
update = memcached.get('driverlog:update')
for proj_vendor_driver, os_versions_map in update.iteritems():
ovm = os_versions_map['os_versions_map']
if proj_vendor_driver not in vault['drivers_map']:
LOG.info('Unknown driver %s, ignoring', proj_vendor_driver)
else:
vault['drivers_map'][proj_vendor_driver][
'os_versions_map'].update(ovm)
vault['drivers_map'] = default_data['drivers']
_extend_drivers_info()
@ -149,7 +112,3 @@ def get_vault():
'Please run the processor')
return vault
def get_memory_storage():
return get_vault()['memory_storage']

View File

@ -14,7 +14,6 @@
# limitations under the License.
import os
import urllib
import flask
from flask.ext import gravatar as gravatar_ext
@ -23,7 +22,6 @@ import six
from driverlog.dashboard import api
from driverlog.dashboard import decorators
from driverlog.dashboard import vault
from driverlog.openstack.common import log as logging
from driverlog.processor import config
@ -61,35 +59,6 @@ def summary():
pass
@app.route('/details')
@decorators.templated()
def details():
project_id = flask.request.args.get('project_id') or ''
vendor = flask.request.args.get('vendor') or ''
driver_name = flask.request.args.get('driver_name') or ''
drivers_map = vault.get_vault()['drivers_map']
key = (urllib.unquote_plus(project_id).lower(),
urllib.unquote_plus(vendor).lower(),
urllib.unquote_plus(driver_name).lower())
if key not in drivers_map:
flask.abort(404)
driver = drivers_map[key]
os_versions_list = []
for os_version, os_version_info in driver['os_versions_map'].iteritems():
os_version_info['os_version'] = os_version
os_versions_list.append(os_version_info)
sorted(os_versions_list, key=lambda x: x['os_version'])
driver['os_versions'] = os_versions_list
return {
'driver': driver,
}
@app.errorhandler(404)
@decorators.templated('404.html', 404)
def page_not_found(e):

View File

@ -13,16 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import json
import collections
import re
import time
import memcache
from oslo.config import cfg
from six.moves.urllib import parse
import time
from driverlog.openstack.common import log as logging
from driverlog.processor import config
@ -33,19 +28,10 @@ from driverlog.processor import utils
LOG = logging.getLogger(__name__)
def find_comment(review, ci):
patch_number = review['currentPatchSet']['number']
for comment in reversed(review.get('comments') or []):
prefix = 'Patch Set %s:' % patch_number
if ((comment['reviewer'].get('username') == ci) and
(comment['message'].find(prefix) == 0)):
return comment['message'][len(prefix):].strip()
return None
def find_vote(review, ci_id):
def _find_vote(review, ci_id):
"""
Finds vote corresponding to ci_id
"""
for approval in (review['currentPatchSet'].get('approvals') or []):
if approval['type'] not in ['Verified', 'VRIF']:
continue
@ -56,164 +42,139 @@ def find_vote(review, ci_id):
return None
def process_reviews(review_iterator, ci_ids_map, project_id):
branch_ci_set = set()
def find_ci_result(review_iterator, ci):
"""
For a given stream of reviews finds result left by specified ci
"""
for review in review_iterator:
review_url = review['url']
branch = review['branch']
for comment in reversed(review.get('comments') or []):
ci_id = comment['reviewer'].get('username')
if ci_id not in ci_ids_map:
if comment['reviewer'].get('username') != ci['id']:
continue
branch_ci = (branch, ci_id)
if branch_ci in branch_ci_set:
continue # already seen, ignore
branch_ci_set.add(branch_ci)
message = comment['message']
prefix = 'Patch Set %s:' % review['currentPatchSet']['number']
if comment['message'].find(prefix) != 0:
break # all comments from the latest patch set passed
message = message[len(prefix):].strip()
for one_ci in ci_ids_map[ci_id]:
result = None
result = None
# try to get result by parsing comment message
success_pattern = one_ci.get('success_pattern')
failure_pattern = one_ci.get('failure_pattern')
result = None
# try to get result by parsing comment message
success_pattern = ci.get('success_pattern')
failure_pattern = ci.get('failure_pattern')
if success_pattern and re.search(success_pattern, message):
result = True
elif failure_pattern and re.search(failure_pattern, message):
result = False
if success_pattern and re.search(success_pattern, message):
result = True
elif failure_pattern and re.search(failure_pattern, message):
result = False
# try to get result from vote
if result is None:
result = find_vote(review, ci_id)
# try to get result from vote
if result is None:
result = _find_vote(review, ci['id'])
if result is not None:
yield {
(project_id,
one_ci['vendor'].lower(),
one_ci['driver_name'].lower()): {
'os_versions_map': {
branch: {
'comment': message,
'timestamp': comment['timestamp'],
'review_url': review_url
}
}
}
}
def update_generator(memcached_inst, default_data, ci_ids_map,
force_update=False):
for project in default_data['projects']:
project_id = project['id'].lower()
rcs_inst = rcs.get_rcs(project_id, cfg.CONF.review_uri)
rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename,
username=cfg.CONF.ssh_username)
LOG.debug('Processing reviews for project: %s', project_id)
rcs_key = 'driverlog:rcs:' + parse.quote_plus(project_id)
last_id = None
if not force_update:
last_id = memcached_inst.get(rcs_key)
review_iterator = rcs_inst.log(last_id)
for item in process_reviews(review_iterator, ci_ids_map, project_id):
yield item
last_id = rcs_inst.get_last_id()
LOG.debug('RCS last id is: %s', last_id)
memcached_inst.set(rcs_key, last_id)
def _get_hash(data):
h = hashlib.new('sha1')
h.update(json.dumps(data))
return h.hexdigest()
def build_ci_map(drivers):
ci_map = collections.defaultdict(list)
for driver in drivers:
if 'ci' in driver:
value = {
'vendor': driver['vendor'],
'driver_name': driver['name'],
}
ci = driver['ci']
if 'success_pattern' in ci:
value['success_pattern'] = ci['success_pattern']
if 'failure_pattern' in ci:
value['failure_pattern'] = ci['failure_pattern']
ci_map[ci['id']].append(value)
return ci_map
def transform_default_data(default_data):
for driver in default_data['drivers']:
driver['os_versions_map'] = {}
if 'releases' in driver:
for release in driver['releases']:
driver['os_versions_map'][release] = {
'success': True,
'comment': 'self-tested verification'
if result is not None:
return {
'ci_result': result,
'comment': message,
'timestamp': comment['timestamp'],
'review_url': review_url,
}
def store_default_data(default_data, memcached_inst):
transform_default_data(default_data)
memcached_inst.set('driverlog:default_data', default_data)
def _get_release_by_branch(releases, branch):
"""
Translates branch name into release_id
"""
release = branch.lower()
if release.find('/') > 0:
return release.split('/')[1]
elif release == 'master':
return releases[-1]['id'].lower()
def update_drivers(drivers, releases):
"""
Iterates all drivers and searches for results produced by their CIs
Returns True if info was updated
"""
branches = [('stable/' + r['id'].lower()) for r in releases] + ['master']
rcs_inst = rcs.get_rcs(cfg.CONF.review_uri)
rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename,
username=cfg.CONF.ssh_username)
has_updates = False
for driver in drivers.values():
if 'ci' not in driver:
continue
project_id = driver['project_id']
ci_id = driver['ci']['id']
for branch in branches:
LOG.debug('Searching reviews for project: %(project_id)s, branch: '
'%(branch)s, ci_id: %(ci_id)s',
{'project_id': project_id, 'branch': branch,
'ci_id': ci_id})
review_iterator = rcs_inst.log(project=project_id, branch=branch,
reviewer=ci_id)
ci_result = find_ci_result(review_iterator, driver['ci'])
if ci_result:
LOG.debug('Found CI result: %s', ci_result)
has_updates = True
key = (project_id, driver['vendor'], driver['name'])
os_version = _get_release_by_branch(releases, branch)
ci_result['ci_tested'] = True
drivers[key]['releases'][os_version] = ci_result
return has_updates
def transform_default_data(default_data):
transformed_drivers = {}
for driver in default_data['drivers']:
transformed_releases = {}
if 'releases' in driver:
for release in driver['releases']:
transformed_releases[release.lower()] = {
'ci_tested': False,
}
driver['releases'] = transformed_releases
key = (driver['project_id'], driver['vendor'], driver['name'])
transformed_drivers[key] = driver
default_data['drivers'] = transformed_drivers
def process(memcached_inst, default_data, force_update):
old_dd_hash = memcached_inst.get('driverlog:default_data_hash')
new_dd_hash = _get_hash(default_data)
memcached_inst.set('driverlog:default_data_hash', new_dd_hash)
new_dd_hash = utils.calc_hash(default_data)
return new_dd_hash != old_dd_hash
need_update = False
def calculate_update(memcached_inst, default_data, force_update):
update = {}
if not force_update:
update = memcached_inst.get('driverlog:update') or {}
ci_ids_map = build_ci_map(default_data['drivers'])
need_update = force_update
for record in update_generator(memcached_inst, default_data, ci_ids_map,
force_update=force_update):
LOG.info('Got new record from Gerrit: %s', record)
if (new_dd_hash != old_dd_hash) or force_update:
transform_default_data(default_data)
need_update = True
else:
default_data = memcached_inst.get('driverlog:default_data')
key = record.keys()[0]
if key not in update:
update.update(record)
else:
os_version = record[key]['os_versions_map'].keys()[0]
info = record[key]['os_versions_map'].values()[0]
if os_version in update[key]['os_versions_map']:
update[key]['os_versions_map'][os_version].update(info)
else:
update[key]['os_versions_map'][os_version] = info
# write update into memcache
memcached_inst.set('driverlog:update', update)
need_update |= update_drivers(default_data['drivers'],
default_data['releases'])
if need_update:
memcached_inst.set('driverlog:update_hash', time.time())
# write default data into memcache
memcached_inst.set('driverlog:default_data', default_data)
memcached_inst.set('driverlog:default_data_hash', new_dd_hash)
memcached_inst.set('driverlog:update_time', time.time())
def main():
@ -239,10 +200,7 @@ def main():
LOG.critical('Unable to load default data')
return not 0
dd_update = store_default_data(default_data, memcached_inst)
calculate_update(memcached_inst, default_data,
cfg.CONF.force_update or dd_update)
process(memcached_inst, default_data, cfg.CONF.force_update)
if __name__ == '__main__':

View File

@ -17,6 +17,7 @@ import json
import re
import paramiko
import six
from driverlog.openstack.common import log as logging
@ -25,12 +26,12 @@ LOG = logging.getLogger(__name__)
DEFAULT_PORT = 29418
GERRIT_URI_PREFIX = r'^gerrit:\/\/'
PAGE_LIMIT = 100
PAGE_LIMIT = 5
class Rcs(object):
def __init__(self, project_id, uri):
self.project_id = project_id
def __init__(self, uri):
pass
def setup(self, **kwargs):
pass
@ -43,8 +44,8 @@ class Rcs(object):
class Gerrit(Rcs):
def __init__(self, project_id, uri):
super(Gerrit, self).__init__(project_id, uri)
def __init__(self, uri):
super(Gerrit, self).__init__(uri)
stripped = re.sub(GERRIT_URI_PREFIX, '', uri)
if stripped:
@ -83,11 +84,14 @@ class Gerrit(Rcs):
LOG.exception(e)
return False
def _get_cmd(self, project_id, sort_key=None, limit=PAGE_LIMIT):
def _get_cmd(self, sort_key=None, limit=PAGE_LIMIT, **kwargs):
params = ' '.join([(k + ':\'' + v + '\'')
for k, v in six.iteritems(kwargs)])
cmd = ('gerrit query --format JSON '
'project:\'%(project_id)s\' limit:%(limit)s '
'%(params)s limit:%(limit)s '
'--current-patch-set --comments ' %
{'project_id': project_id, 'limit': limit})
{'params': params, 'limit': limit})
cmd += ' is:merged'
if sort_key:
cmd += ' resume_sortkey:%016x' % sort_key
@ -102,11 +106,11 @@ class Gerrit(Rcs):
LOG.exception(e)
return False
def _poll_reviews(self, project_id, start_id=None, last_id=None):
def _poll_reviews(self, start_id=None, last_id=None, **kwargs):
sort_key = start_id
while True:
cmd = self._get_cmd(project_id, sort_key)
cmd = self._get_cmd(sort_key, **kwargs)
LOG.debug('Executing command: %s', cmd)
exec_result = self._exec_command(cmd)
if not exec_result:
@ -124,55 +128,27 @@ class Gerrit(Rcs):
break
proceed = True
review['project_id'] = project_id
yield review
if not proceed:
break
def log(self, last_id):
def log(self, **kwargs):
if not self._connect():
return
# poll new merged reviews from the top down to last_id
LOG.debug('Poll new reviews for project: %s', self.project_id)
for review in self._poll_reviews(self.project_id, last_id=last_id):
for review in self._poll_reviews(**kwargs):
yield review
self.client.close()
def get_last_id(self):
if not self._connect():
return None
LOG.debug('Get last id for project: %s', self.project_id)
cmd = self._get_cmd(self.project_id, limit=1)
LOG.debug('Executing command: %s', cmd)
exec_result = self._exec_command(cmd)
if not exec_result:
return None
stdin, stdout, stderr = exec_result
last_id = None
for line in stdout:
review = json.loads(line)
if 'sortKey' in review:
last_id = int(review['sortKey'], 16)
break
self.client.close()
LOG.debug('Project %(project_id)s last id is %(id)s',
{'project_id': self.project_id, 'id': last_id})
return last_id
def get_rcs(project_id, uri):
def get_rcs(uri):
LOG.debug('Review control system is requested for uri %s' % uri)
match = re.search(GERRIT_URI_PREFIX, uri)
if match:
return Gerrit(project_id, uri)
return Gerrit(uri)
else:
LOG.warning('Unsupported review control system, fallback to dummy')
return Rcs(project_id, uri)
return Rcs(uri)

View File

@ -15,6 +15,7 @@
import cgi
import datetime
import hashlib
import json
import re
import time
@ -181,3 +182,9 @@ def copy_dict(source, include=None, exclude=None):
exclude = exclude or []
return dict([(k, v) for k, v in six.iteritems(source)
if k in include and k not in exclude])
def calc_hash(data):
h = hashlib.new('sha1')
h.update(json.dumps(data))
return h.hexdigest()

View File

@ -23,43 +23,43 @@
],
"releases": [
{
"id": "austin",
"id": "Austin",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Austin"
},
{
"id": "bexar",
"id": "Bexar",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Bexar"
},
{
"id": "cactus",
"id": "Cactus",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Cactus"
},
{
"id": "diablo",
"id": "Diablo",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Diablo"
},
{
"id": "essex",
"id": "Essex",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Essex"
},
{
"id": "folsom",
"id": "Folsom",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Folsom"
},
{
"id": "grizzly",
"id": "Grizzly",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Grizzly"
},
{
"id": "havana",
"id": "Havana",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Havana"
},
{
"id": "icehouse",
"id": "Icehouse",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Icehouse"
},
{
"id": "juno",
"id": "Juno",
"wiki": "https://wiki.openstack.org/wiki/Releases"
}
],
@ -857,8 +857,8 @@
"wiki": "http://openstack.redhat.com/OpenDaylight_integration",
"ci": {
"id": "odl-jenkins",
"success_pattern": "success",
"failure_pattern": "fail"
"success_pattern": "SUCCESS",
"failure_pattern": "FAILURE"
}
},
{
@ -1014,8 +1014,8 @@
"wiki": "http://wiki.cloudbase.it/hyperv-tempest-exclusions",
"ci": {
"id": "hyper-v-ci",
"success_pattern": "Successful",
"failure_pattern": "Failed"
"success_pattern": "Build succeeded",
"failure_pattern": "Build failed"
},
"releases": ["Folsom", "Grizzly", "Havana", "Icehouse"]
},

View File

@ -26,8 +26,7 @@
"type": "object",
"properties": {
"id": {
"type": "string",
"pattern": "^[a-z]+$"
"$ref": "#/definitions/release_id"
},
"wiki": {
"type": "string"
@ -65,8 +64,7 @@
"releases": {
"type": "array",
"items": {
"type": "string",
"pattern": "^[\\w]+$"
"$ref": "#/definitions/release_id"
}
},
"ci": {
@ -118,6 +116,10 @@
},
"required": ["id"],
"additionalProperties": false
},
"release_id": {
"type": "string",
"pattern": "^[A-Z][a-z]+$"
}
}
}

View File

@ -37,13 +37,14 @@ class TestCIConfigValidity(testtools.TestCase):
def test_ci_config_matches_sample_review(self):
def verify_single_driver(driver_name):
ci_ids_map = main.build_ci_map(self.default_data['drivers'])
records = list(main.process_reviews(
[self.review], ci_ids_map, 'openstack/neutron'))
records = [r for r in records
if r.keys()[0][2] == driver_name.lower()]
self.assertEqual(1, len(records), '1 record is expected for '
'driver %s' % driver_name)
for driver in self.default_data['drivers']:
if driver['name'] == driver_name:
result = main.find_ci_result([self.review], driver['ci'])
self.assertIsNotNone(result, 'CI result should be found '
'for driver %s' % driver_name)
return
self.fail('No result parsed for driver %s' % driver_name)
verify_single_driver('Cisco Nexus Plugin')
verify_single_driver('Neutron ML2 Driver For Cisco Nexus Devices')

View File

@ -15,15 +15,15 @@
],
"releases": [
{
"id": "havana",
"id": "Havana",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Havana"
},
{
"id": "icehouse",
"id": "Icehouse",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Icehouse"
},
{
"id": "juno",
"id": "Juno",
"wiki": "https://wiki.openstack.org/wiki/Releases"
}
],

View File

@ -18,89 +18,78 @@ import memcache
import mock
from driverlog.processor import main
from driverlog.processor import utils
import testtools
def _read_sample_review():
with open('tests/unit/test_data/sample_review.json') as fd:
return json.load(fd)
def _read_sample_default_data():
with open('tests/unit/test_data/sample_default_data.json') as fd:
return json.load(fd)
class TestMain(testtools.TestCase):
def setUp(self):
super(TestMain, self).setUp()
with open('tests/unit/test_data/sample_review.json') as fd:
self.review = json.load(fd)
with open('tests/unit/test_data/sample_default_data.json') as fd:
self.default_data = json.load(fd)
def test_build_ci_map(self):
ci_map = main.build_ci_map(self.default_data['drivers'])
self.assertTrue('arista-test' in ci_map)
self.assertEqual([{
'vendor': 'Arista',
'driver_name': 'Arista Neutron ML2 Driver'
}], ci_map['arista-test'])
def test_process_reviews_ci_vote_and_comment(self):
# check that vote and matching comment are found
ci_ids_map = main.build_ci_map(self.default_data['drivers'])
records = list(main.process_reviews(
[self.review], ci_ids_map, 'openstack/neutron'))
records = [r for r in records if r.keys()[0][1] == 'arista']
result = main.find_ci_result([_read_sample_review()],
{'id': 'arista-test'})
self.assertEqual(1, len(records), 'One record is expected')
self.assertIsNotNone(result, 'CI result should be found')
expected_record = {
('openstack/neutron', 'arista', 'arista neutron ml2 driver'): {
'os_versions_map': {
'master': {
'comment': 'Verified+1\n\n'
'Arista third party testing PASSED '
'[ https://arista.box.com/s/x8z0 ]',
'timestamp': 1399478047,
'review_url': 'https://review.openstack.org/92468',
}
}
}
'ci_result': True,
'comment': 'Verified+1\n\nArista third party testing PASSED '
'[ https://arista.box.com/s/x8z0 ]',
'timestamp': 1399478047,
'review_url': 'https://review.openstack.org/92468',
}
self.assertEqual(expected_record, records[0])
self.assertEqual(expected_record, result)
def test_process_reviews_ci_only_comments(self):
# check that comment is found and parsed correctly
ci_ids_map = main.build_ci_map(self.default_data['drivers'])
records = list(main.process_reviews(
[self.review], ci_ids_map, 'openstack/neutron'))
records = [r for r in records if r.keys()[0][1] == 'cisco']
result = main.find_ci_result([_read_sample_review()], {
'id': 'cisco_neutron_ci',
'success_pattern': 'neutron_zuul \\S+ : SUCCESS',
'failure_pattern': 'neutron_zuul \\S+ : FAILURE',
})
self.assertEqual(2, len(records), '2 records are expected '
'(since there are 2 cisco entries)')
self.assertIsNotNone(result, 'CI result should be found')
expected_record = {
(
'openstack/neutron', 'cisco',
'neutron ml2 driver for cisco nexus devices'
): {
'os_versions_map': {
'master': {
'comment': 'Build succeeded.\n\n'
'- neutron_zuul http://128.107.233.28:8080/'
'job/neutron_zuul/263 : SUCCESS in 18m 52s',
'timestamp': 1399481091,
'review_url': 'https://review.openstack.org/92468',
}
}
}
'ci_result': True,
'comment': 'Build succeeded.\n\n- neutron_zuul '
'http://128.107.233.28:8080/job/neutron_zuul/263 : '
'SUCCESS in 18m 52s',
'timestamp': 1399481091,
'review_url': 'https://review.openstack.org/92468',
}
self.assertEqual(expected_record, records[0])
self.assertEqual(expected_record, result)
def test_tranform_default_data(self):
def test_transform_default_data(self):
driver = {
"project_id": "openstack/neutron",
"releases": ["Grizzly", "Havana", "Icehouse"], }
'project_id': 'openstack/neutron',
'vendor': 'Cisco',
'name': 'Cisco Nexus Plugin',
'releases': ['Grizzly', 'Havana', 'Icehouse'], }
dd = {'drivers': [driver]}
main.transform_default_data(dd)
self.assertTrue('Grizzly' in driver['os_versions_map'],
self.assertIn(('openstack/neutron', 'Cisco', 'Cisco Nexus Plugin'),
dd['drivers'].keys())
driver = dd['drivers'][
('openstack/neutron', 'Cisco', 'Cisco Nexus Plugin')]
self.assertTrue('grizzly' in driver['releases'],
'Grizzly should be copied from releases into '
'os_version_map')
@ -119,12 +108,15 @@ class TestMain(testtools.TestCase):
return memcached_inst
def _patch_rcs(self, rcs_getter):
def _get_rcs(project_id, review_uri):
def _patch_log(**kwargs):
if (kwargs['project'] == 'openstack/neutron' and
kwargs['branch'] == 'master'):
return [_read_sample_review()]
return []
def _get_rcs(review_uri):
rcs_inst = mock.Mock()
if project_id == 'openstack/neutron':
rcs_inst.log.return_value = [self.review]
else:
rcs_inst.log.return_value = []
rcs_inst.log.side_effect = _patch_log
return rcs_inst
rcs_getter.side_effect = _get_rcs
@ -136,16 +128,16 @@ class TestMain(testtools.TestCase):
self._patch_rcs(rcs_getter)
# run!
main.calculate_update(memcached_inst, self.default_data, False)
main.process(memcached_inst, _read_sample_default_data(), False)
# verify
update = memcached_inst.get('driverlog:update')
update = memcached_inst.get('driverlog:default_data')['drivers']
driver_key = ('openstack/neutron', 'cisco', 'cisco nexus plugin')
self.assertIn(driver_key, update)
self.assertIn('master', update[driver_key]['os_versions_map'])
driver_key = ('openstack/neutron', 'Cisco', 'Cisco Nexus Plugin')
self.assertIn(driver_key, update.keys())
self.assertIn('havana', update[driver_key]['releases'].keys())
self.assertEqual('https://review.openstack.org/92468',
(update[driver_key]['os_versions_map']['master']
(update[driver_key]['releases']['juno']
['review_url']))
@mock.patch('oslo.config.cfg.CONF')
@ -154,72 +146,45 @@ class TestMain(testtools.TestCase):
# checks that existing data will be overwritten with update
# preserving data for other versions
# put default data with some updates into memory storage
dd = _read_sample_default_data()
main.transform_default_data(dd)
key = ('openstack/neutron', 'Cisco', 'Cisco Nexus Plugin')
dd['drivers'][key]['releases'].update({
'juno': {
'comment': 'Build succeeded.',
'timestamp': 1234567890,
'review_url': 'https://review.openstack.org/11111'
},
'havana': {
'comment': 'Build succeeded.',
'timestamp': 1234567890,
'review_url': 'https://review.openstack.org/22222'
}})
# put hash from default data to emulate that file is not changed
default_data_from_file = _read_sample_default_data()
memcached_inst = self._make_test_memcached({
'driverlog:update': {
('openstack/neutron', 'cisco', 'cisco nexus plugin'): {
'os_versions_map': {
'master': {
'comment': 'Build succeeded.',
'timestamp': 1234567890,
'review_url': 'https://review.openstack.org/11111'
},
'stable/havana': {
'comment': 'Build succeeded.',
'timestamp': 1234567890,
'review_url': 'https://review.openstack.org/22222'
}
}}}})
'driverlog:default_data': dd,
'driverlog:default_data_hash': utils.calc_hash(
default_data_from_file)})
self._patch_rcs(rcs_getter)
# run!
main.calculate_update(memcached_inst, self.default_data, False)
main.process(memcached_inst, default_data_from_file, False)
# verify
update = memcached_inst.get('driverlog:update')
update = memcached_inst.get('driverlog:default_data')['drivers']
driver_key = ('openstack/neutron', 'cisco', 'cisco nexus plugin')
self.assertIn(driver_key, update)
self.assertIn('master', update[driver_key]['os_versions_map'])
driver_key = ('openstack/neutron', 'Cisco', 'Cisco Nexus Plugin')
self.assertIn(driver_key, update.keys())
self.assertIn('juno', update[driver_key]['releases'])
self.assertEqual('https://review.openstack.org/92468',
(update[driver_key]['os_versions_map']['master']
(update[driver_key]['releases']['juno']
['review_url']))
self.assertIn('stable/havana', update[driver_key]['os_versions_map'])
self.assertIn('havana', update[driver_key]['releases'])
self.assertEqual('https://review.openstack.org/22222',
(update[driver_key]['os_versions_map']
['stable/havana']['review_url']))
@mock.patch('oslo.config.cfg.CONF')
@mock.patch('driverlog.processor.rcs.get_rcs')
def test_calculate_update_insert_version_data(self, rcs_getter, conf):
# checks that existing data will be overwritten with update
memcached_inst = self._make_test_memcached({
'driverlog:update': {
('openstack/neutron', 'cisco', 'cisco nexus plugin'): {
'os_versions_map': {
'stable/havana': {
'comment': 'Build succeeded.',
'timestamp': 1234567890,
'review_url': 'https://review.openstack.org/22222'
}
}}}})
self._patch_rcs(rcs_getter)
# run!
main.calculate_update(memcached_inst, self.default_data, False)
# verify
update = memcached_inst.get('driverlog:update')
driver_key = ('openstack/neutron', 'cisco', 'cisco nexus plugin')
self.assertIn(driver_key, update)
self.assertIn('master', update[driver_key]['os_versions_map'])
self.assertEqual('https://review.openstack.org/92468',
(update[driver_key]['os_versions_map']['master']
['review_url']))
self.assertIn('stable/havana', update[driver_key]['os_versions_map'])
self.assertEqual('https://review.openstack.org/22222',
(update[driver_key]['os_versions_map']
['stable/havana']['review_url']))
(update[driver_key]['releases']
['havana']['review_url']))