Made elastic-recheck py3 compatible

- Adds py36/py37 jobs.
- Fixed invalid syntax errors
- Bumps dependencies to versions that are py3 compatible

Change-Id: I0cebc35993b259cc86470c5ceb1016462a1d649b
Related-Bug: #1803402
This commit is contained in:
Sorin Sbarnea 2018-11-08 15:06:58 +00:00
parent 36067c49d8
commit 6c4f466282
17 changed files with 78 additions and 68 deletions

View File

@ -1,6 +1,8 @@
---
- project:
templates:
- openstack-python36-jobs
- openstack-python37-jobs
- publish-tox-docs-infra
check:
jobs:

View File

@ -250,13 +250,13 @@ class ChannelConfig(object):
data['#' + key] = data.pop(key)
self.channels = data.keys()
self.events = {}
for channel, val in self.data.iteritems():
for channel, val in self.data.items():
for event in val['events']:
event_set = self.events.get(event, set())
event_set.add(channel)
self.events[event] = event_set
self.projects = {}
for channel, val in self.data.iteritems():
for channel, val in self.data.items():
for project in val['projects']:
project_set = self.projects.get(project, set())
project_set.add(channel)

View File

@ -98,12 +98,12 @@ def classifying_rate(fails, data):
print("Classification percentage: %2.2f%%" %
((float(count) / float(total)) * 100.0))
sort = sorted(
bad_jobs.iteritems(),
bad_jobs.items(),
key=operator.itemgetter(1),
reverse=True)
print("Job fails with most unclassified errors")
for s in sort:
print " %3s : %s" % (s[1], s[0])
print(" %3s : %s" % (s[1], s[0]))
def _status_count(results):
@ -183,10 +183,10 @@ def collect_metrics(classifier, fails):
def print_metrics(data, with_lp=False):
print "Elastic recheck known issues"
print
print("Elastic recheck known issues")
print()
sorted_data = sorted(data.iteritems(),
sorted_data = sorted(data.items(),
key=lambda x: -x[1]['fails'])
for d in sorted_data:
bug = d[0]
@ -195,13 +195,13 @@ def print_metrics(data, with_lp=False):
% (bug, data['query'].rstrip()))
if with_lp:
get_launchpad_bug(d[0])
print "Hits"
print("Hits")
for s in data['hits']:
print " %s: %s" % (s, data['hits'][s])
print "Percentage of Gate Queue Job failures triggered by this bug"
print(" %s: %s" % (s, data['hits'][s]))
print("Percentage of Gate Queue Job failures triggered by this bug")
for s in data['percentages']:
print " %s: %2.2f%%" % (s, data['percentages'][s])
print
print(" %s: %2.2f%%" % (s, data['percentages'][s]))
print()
def get_launchpad_bug(bug):
@ -209,11 +209,11 @@ def get_launchpad_bug(bug):
'production',
LPCACHEDIR)
lp_bug = lp.bugs[bug]
print "Title: %s" % lp_bug.title
print("Title: %s" % lp_bug.title)
targets = map(lambda x: (x.bug_target_name, x.status), lp_bug.bug_tasks)
print "Project: Status"
print("Project: Status")
for target, status in targets:
print " %s: %s" % (target, status)
print(" %s: %s" % (target, status))
def main():

View File

@ -42,7 +42,7 @@ IGNORED_ATTRIBUTES = [
def analyze_attributes(attributes):
analysis = {}
for attribute, values in attributes.iteritems():
for attribute, values in attributes.items():
if attribute[0] == '@' or attribute == 'message':
# skip meta attributes and raw messages
continue
@ -50,7 +50,7 @@ def analyze_attributes(attributes):
analysis[attribute] = []
total_hits = sum(values.values())
for value_hash, hits in values.iteritems():
for value_hash, hits in values.items():
value = json.loads(value_hash)
analysis[attribute].append((100.0 * hits / total_hits, value))
@ -78,13 +78,13 @@ def query(query_file_name, config=None, days=DEFAULT_NUMBER_OF_DAYS,
attributes = {}
for hit in r.hits['hits']:
for key, value in hit['_source'].iteritems():
for key, value in hit['_source'].items():
value_hash = json.dumps(value)
attributes.setdefault(key, {}).setdefault(value_hash, 0)
attributes[key][value_hash] += 1
analysis = analyze_attributes(attributes)
for attribute, results in sorted(analysis.iteritems()):
for attribute, results in sorted(analysis.items()):
if not verbose and attribute in IGNORED_ATTRIBUTES:
# skip less-than-useful attributes to reduce noise in the report
continue
@ -92,7 +92,7 @@ def query(query_file_name, config=None, days=DEFAULT_NUMBER_OF_DAYS,
print(attribute)
for percentage, value in itertools.islice(results, quantity):
if isinstance(value, list):
value = ' '.join(unicode(x) for x in value)
value = ' '.join(str(x) for x in value)
print(' %d%% %s' % (percentage, value))

View File

@ -176,7 +176,7 @@ def classifying_rate(fails, data, engine, classifier, ls_url):
classification rate. For every failure in the gate queue did
we find a match for it.
"""
found_fails = {k: False for (k, v) in fails.iteritems()}
found_fails = {k: False for (k, v) in fails.items()}
for bugnum in data:
bug = data[bugnum]
@ -237,7 +237,7 @@ def classifying_rate(fails, data, engine, classifier, ls_url):
(float(bad_jobs[job]) / float(total_job_failures[job]))
* 100.0)
sort = sorted(
bad_jobs.iteritems(),
bad_jobs.items(),
key=operator.itemgetter(1),
reverse=True)

View File

@ -12,7 +12,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import ConfigParser
from six.moves import configparser
import os
import re
@ -26,6 +26,7 @@ JOBS_RE = 'dsvm'
CI_USERNAME = 'jenkins'
GERRIT_QUERY_FILE = 'queries'
GERRIT_HOST = 'review.openstack.org'
PID_FN = '/var/run/elastic-recheck/elastic-recheck.pid'
@ -101,7 +102,7 @@ class Config(object):
if config_obj:
config = config_obj
else:
config = ConfigParser.ConfigParser(
config = configparser.ConfigParser(
{'es_url': ES_URL,
'ls_url': LS_URL,
'db_uri': DB_URI,
@ -110,7 +111,7 @@ class Config(object):
'jobs_re': JOBS_RE,
'pidfile': PID_FN,
'index_format': DEFAULT_INDEX_FORMAT,
'query_file': GERRIT_QUERY_FILE,
'query_file': GERRIT_QUERY_FILE
}
)
config.read(config_file)
@ -129,8 +130,15 @@ class Config(object):
if config.has_section('gerrit'):
self.gerrit_user = config.get('gerrit', 'user')
self.gerrit_query_file = config.get('gerrit', 'query_file')
self.gerrit_host = config.get('gerrit', 'host',
'review.openstack.org')
# workaround for python api change https://docs.python.org/3/library/configparser.html#fallback-values
try:
self.gerrit_host = config.get('gerrit',
'host',
fallback=GERRIT_HOST)
except TypeError:
self.gerrit_host = config.get('gerrit',
'host',
GERRIT_HOST)
self.gerrit_host_key = config.get('gerrit', 'key')
if config.has_section('ircbot'):

View File

@ -90,7 +90,7 @@ class FailJob(object):
self.url = url
# The last set of characters of the URL are the first 7 digits
# of the build_uuid.
self.build_short_uuid = filter(None, url.split('/'))[-1]
self.build_short_uuid = list(filter(None, url.split('/')))[-1]
def __repr__(self):
return self.name

View File

@ -12,8 +12,8 @@
# License for the specific language governing permissions and limitations
# under the License.
import ConfigParser
import gerritlib
from six.moves import configparser
import testtools
from elastic_recheck import elasticRecheck
@ -23,7 +23,7 @@ class TestGerritComment(testtools.TestCase):
def setUp(self):
super(TestGerritComment, self).setUp()
config = ConfigParser.ConfigParser({'server_password': None})
config = configparser.ConfigParser({'server_password': None})
config.read('elasticRecheck.conf')
self.user = config.get('gerrit', 'user')
key = config.get('gerrit', 'key')

View File

@ -58,4 +58,4 @@ class TestUncategorizedFails(testtools.TestCase):
self.assertThat(all_fails['integrated_gate'],
testtools.matchers.HasLength(1))
self.assertIn('gate-tempest-dsvm-full',
all_fails['integrated_gate'].keys()[0])
list(all_fails['integrated_gate'].keys())[0])

View File

@ -12,7 +12,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import ConfigParser
from six.moves import configparser
import unittest
import yaml
@ -49,7 +49,9 @@ def _set_fake_config(fake_config):
class TestBot(unittest.TestCase):
def setUp(self):
super(TestBot, self).setUp()
self.fake_config = ConfigParser.ConfigParser({'server_password': None})
self.fake_config = configparser.ConfigParser(
{'server_password': None},
allow_no_value=True)
_set_fake_config(self.fake_config)
config = er_conf.Config(config_obj=self.fake_config)
self.channel_config = bot.ChannelConfig(yaml.load(
@ -93,7 +95,9 @@ class TestBotWithTestTools(tests.TestCase):
self.useFixture(fixtures.MonkeyPatch(
'gerritlib.gerrit.Gerrit',
fg.Gerrit))
self.fake_config = ConfigParser.ConfigParser({'server_password': None})
self.fake_config = configparser.ConfigParser(
{'server_password': None},
allow_no_value=True)
_set_fake_config(self.fake_config)
config = er_conf.Config(config_obj=self.fake_config)
self.channel_config = bot.ChannelConfig(yaml.load(

View File

@ -11,7 +11,7 @@
# under the License.
import json
import StringIO
from six import StringIO
import sys
import mock
@ -25,7 +25,7 @@ class TestQueryCmd(unit.UnitTestCase):
def setUp(self):
super(TestQueryCmd, self).setUp()
self._stdout = sys.stdout
sys.stdout = StringIO.StringIO()
sys.stdout = StringIO()
def tearDown(self):
super(TestQueryCmd, self).tearDown()

View File

@ -59,7 +59,7 @@ class TestBasicParsing(tests.TestCase):
facets = results.FacetSet()
facets.detect_facets(result_set, ["build_status"])
self.assertEqual(facets.keys(), ['FAILURE'])
self.assertEqual(list(facets.keys()), ['FAILURE'])
data = load_sample(1226337)
result_set = results.ResultSet(data)
@ -87,18 +87,15 @@ class TestBasicParsing(tests.TestCase):
facets.detect_facets(result_set,
["timestamp", "build_status", "build_uuid"])
self.assertEqual(len(facets.keys()), 14)
print facets[1382104800000].keys()
self.assertEqual(facets[1382104800000].keys(), ["FAILURE"])
print(facets[1382104800000].keys())
self.assertEqual(list(facets[1382104800000].keys()), ["FAILURE"])
self.assertEqual(len(facets[1382104800000]["FAILURE"]), 2)
self.assertEqual(facets[1382101200000].keys(), ["FAILURE"])
self.assertEqual(list(facets[1382101200000].keys()), ["FAILURE"])
# NOTE(mriedem): We can't mock built-ins so we have to override utcnow().
class MockDatetimeToday(datetime.datetime):
def __init__(self, *args):
super(MockDatetimeToday, self).__init__(*args)
@classmethod
def utcnow(cls):
# One hour and one second into today.
@ -108,9 +105,6 @@ class MockDatetimeToday(datetime.datetime):
class MockDatetimeYesterday(datetime.datetime):
def __init__(self, *args):
super(MockDatetimeYesterday, self).__init__(*args)
@classmethod
def utcnow(cls):
# 59 minutes and 59 seconds into today.

View File

@ -9,8 +9,9 @@ irc>=15.0.1
pyyaml
lockfile
Babel>=0.9.6
httplib2<0.12.0 # required by launchpadlib, see https://bugs.launchpad.net/lazr.restfulclient/+bug/1803402
launchpadlib
lazr.restfulclient>=0.14.2 # LGPL
httplib2>=0.12.0 # MIT License
launchpadlib>=1.10.6 # LGPL
Jinja2
requests
subunit2sql>=0.9.0

View File

@ -16,7 +16,8 @@ classifier =
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
[files]
packages =

View File

@ -101,12 +101,12 @@ def cross_ref_with_er(changes, dirname):
def summarize_changes(changes):
no_er = {}
print "Summary"
print "%4.4s - Total Rechecks" % (len(changes))
print "%4.4s - Total w/Bug" % (
len([c for c in changes if c['bug'] != 'no bug']))
print "%4.4s - Total w/Bug and new recheck" % (
len([c for c in changes if (c['bug'] != 'no bug' and not c['er'])]))
print("Summary")
print("%4.4s - Total Rechecks" % (len(changes)))
print("%4.4s - Total w/Bug" % (
len([c for c in changes if c['bug'] != 'no bug'])))
print("%4.4s - Total w/Bug and new recheck" % (
len([c for c in changes if (c['bug'] != 'no bug' and not c['er'])])))
for c in changes:
bug = c['bug']
@ -115,12 +115,12 @@ def summarize_changes(changes):
no_er[bug] = {'count': 0, 'reviews': []}
no_er[bug]['count'] += 1
no_er[bug]['reviews'].append(c['review'])
print
print "New bugs"
for k, v in no_er.iteritems():
print "Bug %s found %d times" % (k, v['count'])
print()
print("New bugs")
for k, v in no_er.items():
print("Bug %s found %d times" % (k, v['count']))
for rev in v['reviews']:
print " - %s" % rev
print(" - %s" % rev)
def main():

View File

@ -1,6 +1,6 @@
[tox]
minversion = 1.6
envlist = py27,pep8,queries,docs
envlist = pep8,py37,py36,py27,queries,docs
skipsdist = True
[testenv]

View File

@ -40,7 +40,7 @@ class ERHandler(BaseHTTPServer.BaseHTTPRequestHandler):
# if the file exists locally, we'll serve it up directly
fname = "web/share" + self.path
if os.path.isfile(fname):
print "found local file %s" % (fname)
print("found local file %s" % (fname))
self.send_response(200, "Success")
self.end_headers()
with open(fname) as f:
@ -73,7 +73,7 @@ class ERHandler(BaseHTTPServer.BaseHTTPRequestHandler):
return
# Fall through for paths we don't understand
print "Unknown path requested: %s" % self.path
print("Unknown path requested: %s" % self.path)
def parse_opts():
@ -90,9 +90,9 @@ def main():
server_address = ('', opts.port)
httpd = BaseHTTPServer.HTTPServer(server_address, ERHandler)
print "Test Server is running at http://localhost:%s" % opts.port
print "Ctrl-C to exit"
print
print("Test Server is running at http://localhost:%s" % opts.port)
print("Ctrl-C to exit")
print()
while True:
httpd.handle_request()
@ -101,5 +101,5 @@ if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print "\n"
print "Thanks for testing! Please come again."
print("\n")
print("Thanks for testing! Please come again.")