summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorZuul <zuul@review.openstack.org>2018-10-12 15:19:04 +0000
committerGerrit Code Review <review@openstack.org>2018-10-12 15:19:04 +0000
commit4315e28baf910be1208ceeefed827e60975202a2 (patch)
treec4a59422cccd47947c98cd3512ef1c83bf7c9e79
parent07fc2f7d3f34e2e7d9779806c01ae955f2536222 (diff)
parent75cd334d147c123648199c16d7faedc6f930ffb0 (diff)
Merge "Remove dead tooling"
-rwxr-xr-xanalyze-oslo.sh90
-rwxr-xr-xapply_cookiecutter.sh46
-rwxr-xr-xbuild_dashboards.sh16
-rwxr-xr-xclean_oslo_namespaces.sh36
-rwxr-xr-xclone_openstack.sh196
-rwxr-xr-xcolorizer.py333
-rw-r--r--dashboards/libs_part1.dash31
-rw-r--r--dashboards/libs_part2.dash31
-rw-r--r--dashboards/libs_part3.dash31
-rw-r--r--dashboards/libs_part4.dash28
-rw-r--r--dashboards/main.dash39
-rwxr-xr-xfind_used_deleted_modules.py116
-rw-r--r--install_venv.py70
-rw-r--r--install_venv_common.py165
-rw-r--r--lint.py30
-rwxr-xr-xnew_core.py96
-rw-r--r--new_core_analyzer.py177
-rwxr-xr-xnew_core_recruit.py70
-rw-r--r--remove-namespace-packages-project.txt18
-rwxr-xr-xrun_tests_common.sh248
-rw-r--r--virtual_sprint.py80
-rwxr-xr-xwith_venv.sh6
22 files changed, 0 insertions, 1953 deletions
diff --git a/analyze-oslo.sh b/analyze-oslo.sh
deleted file mode 100755
index 782b292..0000000
--- a/analyze-oslo.sh
+++ /dev/null
@@ -1,90 +0,0 @@
1#!/bin/bash
2
3# This requires gitinspector to be installed
4# it can be gotten from:
5#
6# - https://pypi.org/project/gitinspector/0.3.2
7# - https://github.com/ejwa/gitinspector
8
9# Check out a new copy of a repository and set it up to be a useful
10# local copy.
11function clone_new {
12 typeset repo="$1"
13 typeset url="$2"
14 echo
15 echo "Cloning $repo"
16 git clone $url $repo
17 return 0
18}
19
20# Determine the current branch of a local repository.
21function current_branch {
22 (cd $1 && git rev-parse --abbrev-ref HEAD)
23}
24
25# Update an existing copy of a repository, including all remotes and
26# pulling into the local master branch if we're on that branch
27# already.
28function update_existing {
29 typeset repo="$1"
30 echo
31 echo "Updating $repo"
32 (cd $repo && git remote update)
33 RC=$?
34 if [ $RC -ne 0 ]
35 then
36 return $RC
37 fi
38 # Only run git pull for repos where I'm not working in a branch.
39 typeset b=$(current_branch $repo)
40 if [ $b == "master" ]
41 then
42 if (cd $repo && git diff --exit-code >/dev/null)
43 then
44 (cd $repo && git pull)
45 else
46 echo "Skipping pull for master branch with local changes"
47 (cd $repo && git status)
48 fi
49 else
50 echo "Skipping pull for branch $b"
51 branched="$branched $repo"
52 fi
53}
54
55# Process a single repository found in gerrit, determining whether it
56# exists locally already or not.
57function get_one_repo {
58 typeset repo="$1"
59 typeset url="$2"
60 typeset pardir=$(dirname $repo)
61 if [ ! -z "$pardir" ]
62 then
63 mkdir -p $pardir
64 fi
65 if [ ! -d $repo ] ; then
66 clone_new $repo $url
67 else
68 update_existing $repo
69 fi
70 RC=$?
71 return $RC
72}
73
74current_dir=`pwd`
75base="git://git.openstack.org"
76projects=$(ssh review.openstack.org -p 29418 gerrit ls-projects | grep -v 'attic' | grep "oslo")
77projects="$projects openstack/taskflow openstack/tooz openstack/cliff openstack/debtcollector"
78projects="$projects openstack/futurist openstack/stevedore openstack-dev/cookiecutter"
79projects="$projects openstack/automaton"
80
81for repo in $projects; do
82 get_one_repo "$repo" "$base/$repo"
83 RC=$?
84 if [ $RC -ne 0 ] ; then
85 echo "Unable to obtain $repo"
86 exit 1
87 fi
88done
89
90python new_core_analyzer.py $projects > "${current_dir}/oslo_reports.txt"
diff --git a/apply_cookiecutter.sh b/apply_cookiecutter.sh
deleted file mode 100755
index c1869ac..0000000
--- a/apply_cookiecutter.sh
+++ /dev/null
@@ -1,46 +0,0 @@
1#!/bin/bash
2#
3# Apply the Oslo cookiecutter template to an existing directory,
4# usually as part of the graduation process.
5
6COOKIECUTTER_TEMPLATE_REPO=${COOKIECUTTER_TEMPLATE_REPO:-https://git.openstack.org/openstack-dev/oslo-cookiecutter}
7
8function usage {
9 echo "Usage: apply_cookiecutter.sh newlib" 1>&2
10}
11
12if [ $# -lt 1 ]
13then
14 usage
15 exit 1
16fi
17
18new_lib="$1"
19
20if [[ $new_lib =~ oslo.* ]]
21then
22 echo "You probably don't want 'oslo' in the lib name." 1>&2
23 exit 2
24fi
25
26# Set up a virtualenv with cookiecutter
27tmpdir=$(mktemp -d -t oslo-cookiecutter.XXXX)
28echo "Installing cookiecutter..."
29venv=$tmpdir/venv
30virtualenv $venv
31$venv/bin/python -m pip install cookiecutter
32cookiecutter=$venv/bin/cookiecutter
33
34# Apply the cookiecutter template by building out a fresh copy using
35# the name chosen for this library and then copying any parts of the
36# results into the local tree, without overwriting files that already
37# exist.
38git clone $COOKIECUTTER_TEMPLATE_REPO $tmpdir/oslo-cookiecutter
39
40# FIXME(dhellmann): We need a better non-interactive mode for cookiecutter
41(cd $tmpdir && $cookiecutter $tmpdir/oslo-cookiecutter) <<EOF
42$new_lib
43openstack
44oslo.${new_lib} library
45EOF
46rsync -a --verbose --ignore-existing $tmpdir/oslo.${new_lib}/ .
diff --git a/build_dashboards.sh b/build_dashboards.sh
deleted file mode 100755
index d638f19..0000000
--- a/build_dashboards.sh
+++ /dev/null
@@ -1,16 +0,0 @@
1#!/bin/bash
2#
3# Process the dashboard files and emit the URLs
4
5creator_dir=$1
6dashboard_dir=$2
7
8cd $creator_dir
9
10for f in $dashboard_dir/*.dash
11do
12 echo '----------------------------------------'
13 echo $(basename $f .dash)
14 echo '----------------------------------------'
15 ./gerrit-dash-creator $f
16done
diff --git a/clean_oslo_namespaces.sh b/clean_oslo_namespaces.sh
deleted file mode 100755
index 9a6e0eb..0000000
--- a/clean_oslo_namespaces.sh
+++ /dev/null
@@ -1,36 +0,0 @@
1#!/bin/bash
2#
3# Script to replace imports from the 'oslo' namespace package with the
4# appropriate alternative in the dist-specific packages.
5#
6# Licensed under the Apache License, Version 2.0 (the "License"); you may
7# not use this file except in compliance with the License. You may obtain
8# a copy of the License at
9#
10# http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15# License for the specific language governing permissions and limitations
16# under the License.
17
18name=$(python setup.py --name)
19dir=${1:-$name}
20
21echo "Updating $dir"
22sed -i \
23 -e 's/from oslo\./from oslo_/g' \
24 -e 's/import oslo\./import oslo_/g' \
25 -e 's/from oslo import i18n/import oslo_i18n as i18n/g' \
26 -e 's/from oslo import messaging/import oslo_messaging as messaging/g' \
27 -e 's/from oslo import config/import oslo_config as config/g' \
28 -e 's/from oslo import serialization/import oslo_serialization as serialization/g' \
29 -e 's/from oslo import utils/import oslo_utils as utils/g' \
30 -e 's/oslo\.i18n\.TranslatorFactory/oslo_i18n.TranslatorFactory/g' \
31 $(find $dir -name '*.py' | grep -v "$name/tests/unit/test_hacking.py")
32
33set -x
34
35git grep 'from oslo import'
36git grep 'oslo\.'
diff --git a/clone_openstack.sh b/clone_openstack.sh
deleted file mode 100755
index 7792d16..0000000
--- a/clone_openstack.sh
+++ /dev/null
@@ -1,196 +0,0 @@
1#!/bin/bash
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14#
15# Check out every active repository from git.openstack.org. For new
16# copies, set up git-review. For any existing copies, update their
17# remotes and pull changes up to the local master.
18#
19# This script is based on prior art from mordred on the openstack-dev
20# mailing list.
21# http://lists.openstack.org/pipermail/openstack-dev/2013-October/017532.html
22#
23# Usage:
24#
25# Check out everything under the current directory:
26# $ clone_openstack.sh
27#
28# Check out a specific project (you can list multiple names):
29# $ clone_openstack.sh openstack/oslo-incubator
30#
31
32trouble_with=""
33branched=""
34
35# Figure out if git-hooks is installed and should be used.
36# https://github.com/icefox/git-hooks
37which git-hooks 2>&1 > /dev/null
38USE_GIT_HOOKS=$?
39
40# Users can set INCLUDE_STACKFORGE=1 if they want to always check out
41# new copies of stackforge projects.
42INCLUDE_STACKFORGE=${INCLUDE_STACKFORGE:-0}
43
44# If we have any trouble at all working with a repository, report that
45# and then record the name for the summary at the end.
46function track_trouble {
47 if [ $1 -ne 0 ]
48 then
49 echo "Remembering trouble with $2"
50 trouble_with="$trouble_with $2"
51 fi
52}
53
54# Determine the current branch of a local repository.
55function current_branch {
56 (cd $1 && git rev-parse --abbrev-ref HEAD)
57}
58
59# Print a summary report for any repositories that had trouble
60# updating.
61function report_trouble {
62 if [ ! -z "$trouble_with" ]
63 then
64 echo
65 echo "Had trouble updating:"
66 for r in $trouble_with
67 do
68 echo " $r - $(current_branch $r)"
69 done
70 fi
71}
72
73# Print a summary report for any repositories that were not on the
74# master branch when we updated them.
75function report_branched {
76 if [ ! -z "$branched" ]
77 then
78 echo
79 echo "Branched repos:"
80 for r in $branched
81 do
82 echo " $r - $(current_branch $r)"
83 done
84 fi
85}
86
87# Check out a new copy of a repository and set it up to be a useful
88# local copy.
89function clone_new {
90 typeset repo="$1"
91 typeset url="$2"
92 # Ignore stackforge projects unless told otherwise.
93 if [[ $repo =~ ^stackforge/.* ]]
94 then
95 if [ $INCLUDE_STACKFORGE -ne 1 ]
96 then
97 return 0
98 fi
99 fi
100 echo
101 echo "Cloning $repo"
102 git clone $url $repo
103 (cd $repo && git review -s)
104 if [ $USE_GIT_HOOKS -eq 0 ]
105 then
106 echo "Configuring git hooks"
107 (cd $repo && git hooks --install)
108 fi
109 return 0
110}
111
112# Update an existing copy of a repository, including all remotes and
113# pulling into the local master branch if we're on that branch
114# already.
115function update_existing {
116 typeset repo="$1"
117 echo
118 echo "Updating $repo"
119 (cd $repo && git remote update)
120 RC=$?
121 if [ $RC -ne 0 ]
122 then
123 return $RC
124 fi
125 # Only run git pull for repos where I'm not working in a branch.
126 typeset b=$(current_branch $repo)
127 if [ $b == "master" ]
128 then
129 if (cd $repo && git diff --exit-code >/dev/null)
130 then
131 (cd $repo && git pull)
132 else
133 echo "Skipping pull for master branch with local changes"
134 (cd $repo && git status)
135 fi
136 else
137 echo "Skipping pull for branch $b"
138 branched="$branched $repo"
139 fi
140}
141
142# Process a single repository found in gerrit, determining whether it
143# exists locally already or not.
144function get_one_repo {
145 typeset repo="$1"
146 typeset url="$2"
147 typeset pardir=$(dirname $repo)
148 if [ ! -z "$pardir" ]
149 then
150 mkdir -p $pardir
151 fi
152 if [ ! -d $repo ] ; then
153 clone_new $repo $url
154 else
155 update_existing $repo
156 fi
157 RC=$?
158 return $RC
159}
160
161# If we are given a list of projects on the command line, we will only
162# work on those. Otherwise, ask gerrit for the full list of openstack
163# projects, ignoring the ones in the attic. Stackforge projects are
164# ignored if they do not exist locally, so we include them in the
165# output list and check for them when we decide what to do with each
166# repository.
167projects="$*"
168if [ -z "$projects" ]
169then
170 projects=$(ssh review.openstack.org -p 29418 gerrit ls-projects | grep '^openstack' | grep -v 'attic')
171 RC=$?
172 if [ $RC -ne 0 ]
173 then
174 echo "Unable to obtain a list of projects from gerrit. Check your ssh credientials for review.openstack.org"
175 userid=`id -un`
176 gerrit_userid=`git config --get gitreview.username`
177 if [ $userid != $gerrit_userid ]
178 then
179 echo "Identified a possible userid difference between $userid and $gerrit_userid"
180 fi
181 exit $RC
182 fi
183else
184 # Go ahead and set things up so we will work with stackforge
185 # repositories, in case the caller has specified one on the
186 # command line.
187 INCLUDE_STACKFORGE=1
188fi
189
190for repo in $projects; do
191 get_one_repo $repo git://git.openstack.org/$repo
192 track_trouble $? $repo
193done
194
195report_branched
196report_trouble
diff --git a/colorizer.py b/colorizer.py
deleted file mode 100755
index a16c620..0000000
--- a/colorizer.py
+++ /dev/null
@@ -1,333 +0,0 @@
1#!/usr/bin/env python
2
3# Copyright (c) 2013, Nebula, Inc.
4# Copyright 2010 United States Government as represented by the
5# Administrator of the National Aeronautics and Space Administration.
6# All Rights Reserved.
7#
8# Licensed under the Apache License, Version 2.0 (the "License"); you may
9# not use this file except in compliance with the License. You may obtain
10# a copy of the License at
11#
12# http://www.apache.org/licenses/LICENSE-2.0
13#
14# Unless required by applicable law or agreed to in writing, software
15# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
16# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
17# License for the specific language governing permissions and limitations
18# under the License.
19#
20# Colorizer Code is borrowed from Twisted:
21# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
22#
23# Permission is hereby granted, free of charge, to any person obtaining
24# a copy of this software and associated documentation files (the
25# "Software"), to deal in the Software without restriction, including
26# without limitation the rights to use, copy, modify, merge, publish,
27# distribute, sublicense, and/or sell copies of the Software, and to
28# permit persons to whom the Software is furnished to do so, subject to
29# the following conditions:
30#
31# The above copyright notice and this permission notice shall be
32# included in all copies or substantial portions of the Software.
33#
34# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
35# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
36# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
37# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
38# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
39# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
40# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
41
42"""Display a subunit stream through a colorized unittest test runner."""
43
44import heapq
45import sys
46import unittest
47
48import six
49import subunit
50import testtools
51
52
53class _AnsiColorizer(object):
54 """Colorizer allows callers to write text in a particular color.
55
56 A colorizer is an object that loosely wraps around a stream, allowing
57 callers to write text to the stream in a particular color.
58
59 Colorizer classes must implement C{supported()} and C{write(text, color)}.
60 """
61 _colors = dict(black=30, red=31, green=32, yellow=33,
62 blue=34, magenta=35, cyan=36, white=37)
63
64 def __init__(self, stream):
65 self.stream = stream
66
67 def supported(cls, stream=sys.stdout):
68 """Check is the current platform supports coloring terminal output.
69
70 A class method that returns True if the current platform supports
71 coloring terminal output using this method. Returns False otherwise.
72 """
73 if not stream.isatty():
74 return False # auto color only on TTYs
75 try:
76 import curses
77 except ImportError:
78 return False
79 else:
80 try:
81 try:
82 return curses.tigetnum("colors") > 2
83 except curses.error:
84 curses.setupterm()
85 return curses.tigetnum("colors") > 2
86 except Exception:
87 # guess false in case of error
88 return False
89 supported = classmethod(supported)
90
91 def write(self, text, color):
92 """Write the given text to the stream in the given color.
93
94 @param text: Text to be written to the stream.
95
96 @param color: A string label for a color. e.g. 'red', 'white'.
97 """
98 color = self._colors[color]
99 self.stream.write('\x1b[%s;1m%s\x1b[0m' % (color, text))
100
101
102class _Win32Colorizer(object):
103 """See _AnsiColorizer docstring."""
104 def __init__(self, stream):
105 import win32console
106 red, green, blue, bold = (win32console.FOREGROUND_RED,
107 win32console.FOREGROUND_GREEN,
108 win32console.FOREGROUND_BLUE,
109 win32console.FOREGROUND_INTENSITY)
110 self.stream = stream
111 self.screenBuffer = win32console.GetStdHandle(
112 win32console.STD_OUT_HANDLE)
113 self._colors = {
114 'normal': red | green | blue,
115 'red': red | bold,
116 'green': green | bold,
117 'blue': blue | bold,
118 'yellow': red | green | bold,
119 'magenta': red | blue | bold,
120 'cyan': green | blue | bold,
121 'white': red | green | blue | bold,
122 }
123
124 def supported(cls, stream=sys.stdout):
125 try:
126 import win32console
127 screenBuffer = win32console.GetStdHandle(
128 win32console.STD_OUT_HANDLE)
129 except ImportError:
130 return False
131 import pywintypes
132 try:
133 screenBuffer.SetConsoleTextAttribute(
134 win32console.FOREGROUND_RED |
135 win32console.FOREGROUND_GREEN |
136 win32console.FOREGROUND_BLUE)
137 except pywintypes.error:
138 return False
139 else:
140 return True
141 supported = classmethod(supported)
142
143 def write(self, text, color):
144 color = self._colors[color]
145 self.screenBuffer.SetConsoleTextAttribute(color)
146 self.stream.write(text)
147 self.screenBuffer.SetConsoleTextAttribute(self._colors['normal'])
148
149
150class _NullColorizer(object):
151 """See _AnsiColorizer docstring."""
152 def __init__(self, stream):
153 self.stream = stream
154
155 def supported(cls, stream=sys.stdout):
156 return True
157 supported = classmethod(supported)
158
159 def write(self, text, color):
160 self.stream.write(text)
161
162
163def get_elapsed_time_color(elapsed_time):
164 if elapsed_time > 1.0:
165 return 'red'
166 elif elapsed_time > 0.25:
167 return 'yellow'
168 else:
169 return 'green'
170
171
172class OpenStackTestResult(testtools.TestResult):
173 def __init__(self, stream, descriptions, verbosity):
174 super(OpenStackTestResult, self).__init__()
175 self.stream = stream
176 self.showAll = verbosity > 1
177 self.num_slow_tests = 10
178 self.slow_tests = [] # this is a fixed-sized heap
179 self.colorizer = None
180 # NOTE(vish): reset stdout for the terminal check
181 stdout = sys.stdout
182 sys.stdout = sys.__stdout__
183 for colorizer in [_Win32Colorizer, _AnsiColorizer, _NullColorizer]:
184 if colorizer.supported():
185 self.colorizer = colorizer(self.stream)
186 break
187 sys.stdout = stdout
188 self.start_time = None
189 self.last_time = {}
190 self.results = {}
191 self.last_written = None
192
193 def _writeElapsedTime(self, elapsed):
194 color = get_elapsed_time_color(elapsed)
195 self.colorizer.write(" %.2f" % elapsed, color)
196
197 def _addResult(self, test, *args):
198 try:
199 name = test.id()
200 except AttributeError:
201 name = 'Unknown.unknown'
202 test_class, test_name = name.rsplit('.', 1)
203
204 elapsed = (self._now() - self.start_time).total_seconds()
205 item = (elapsed, test_class, test_name)
206 if len(self.slow_tests) >= self.num_slow_tests:
207 heapq.heappushpop(self.slow_tests, item)
208 else:
209 heapq.heappush(self.slow_tests, item)
210
211 self.results.setdefault(test_class, [])
212 self.results[test_class].append((test_name, elapsed) + args)
213 self.last_time[test_class] = self._now()
214 self.writeTests()
215
216 def _writeResult(self, test_name, elapsed, long_result, color,
217 short_result, success):
218 if self.showAll:
219 self.stream.write(' %s' % str(test_name).ljust(66))
220 self.colorizer.write(long_result, color)
221 if success:
222 self._writeElapsedTime(elapsed)
223 self.stream.writeln()
224 else:
225 self.colorizer.write(short_result, color)
226
227 def addSuccess(self, test):
228 super(OpenStackTestResult, self).addSuccess(test)
229 self._addResult(test, 'OK', 'green', '.', True)
230
231 def addFailure(self, test, err):
232 if test.id() == 'process-returncode':
233 return
234 super(OpenStackTestResult, self).addFailure(test, err)
235 self._addResult(test, 'FAIL', 'red', 'F', False)
236
237 def addError(self, test, err):
238 super(OpenStackTestResult, self).addFailure(test, err)
239 self._addResult(test, 'ERROR', 'red', 'E', False)
240
241 def addSkip(self, test, reason=None, details=None):
242 super(OpenStackTestResult, self).addSkip(test, reason, details)
243 self._addResult(test, 'SKIP', 'blue', 'S', True)
244
245 def startTest(self, test):
246 self.start_time = self._now()
247 super(OpenStackTestResult, self).startTest(test)
248
249 def writeTestCase(self, cls):
250 if not self.results.get(cls):
251 return
252 if cls != self.last_written:
253 self.colorizer.write(cls, 'white')
254 self.stream.writeln()
255 for result in self.results[cls]:
256 self._writeResult(*result)
257 del self.results[cls]
258 self.stream.flush()
259 self.last_written = cls
260
261 def writeTests(self):
262 time = self.last_time.get(self.last_written, self._now())
263 if not self.last_written or (self._now() - time).total_seconds() > 2.0:
264 diff = 3.0
265 while diff > 2.0:
266 classes = self.results.keys()
267 oldest = min(classes, key=lambda x: self.last_time[x])
268 diff = (self._now() - self.last_time[oldest]).total_seconds()
269 self.writeTestCase(oldest)
270 else:
271 self.writeTestCase(self.last_written)
272
273 def done(self):
274 self.stopTestRun()
275
276 def stopTestRun(self):
277 for cls in list(six.iterkeys(self.results)):
278 self.writeTestCase(cls)
279 self.stream.writeln()
280 self.writeSlowTests()
281
282 def writeSlowTests(self):
283 # Pare out 'fast' tests
284 slow_tests = [item for item in self.slow_tests
285 if get_elapsed_time_color(item[0]) != 'green']
286 if slow_tests:
287 slow_total_time = sum(item[0] for item in slow_tests)
288 slow = ("Slowest %i tests took %.2f secs:"
289 % (len(slow_tests), slow_total_time))
290 self.colorizer.write(slow, 'yellow')
291 self.stream.writeln()
292 last_cls = None
293 # sort by name
294 for elapsed, cls, name in sorted(slow_tests,
295 key=lambda x: x[1] + x[2]):
296 if cls != last_cls:
297 self.colorizer.write(cls, 'white')
298 self.stream.writeln()
299 last_cls = cls
300 self.stream.write(' %s' % str(name).ljust(68))
301 self._writeElapsedTime(elapsed)
302 self.stream.writeln()
303
304 def printErrors(self):
305 if self.showAll:
306 self.stream.writeln()
307 self.printErrorList('ERROR', self.errors)
308 self.printErrorList('FAIL', self.failures)
309
310 def printErrorList(self, flavor, errors):
311 for test, err in errors:
312 self.colorizer.write("=" * 70, 'red')
313 self.stream.writeln()
314 self.colorizer.write(flavor, 'red')
315 self.stream.writeln(": %s" % test.id())
316 self.colorizer.write("-" * 70, 'red')
317 self.stream.writeln()
318 self.stream.writeln("%s" % err)
319
320
321test = subunit.ProtocolTestCase(sys.stdin, passthrough=None)
322
323if sys.version_info[0:2] <= (2, 6):
324 runner = unittest.TextTestRunner(verbosity=2)
325else:
326 runner = unittest.TextTestRunner(verbosity=2,
327 resultclass=OpenStackTestResult)
328
329if runner.run(test).wasSuccessful():
330 exit_code = 0
331else:
332 exit_code = 1
333sys.exit(exit_code)
diff --git a/dashboards/libs_part1.dash b/dashboards/libs_part1.dash
deleted file mode 100644
index d31f1c1..0000000
--- a/dashboards/libs_part1.dash
+++ /dev/null
@@ -1,31 +0,0 @@
1[dashboard]
2title = Oslo Review Inbox(Part One)
3description = Review Inbox(Part One)
4foreach = status:open NOT owner:self
5
6[section "Oslo Specs"]
7query = project:openstack/oslo-specs
8
9[section "automaton"]
10query = project:openstack/automaton
11
12[section "castellan"]
13query = project:openstack/castellan
14
15[section "cookiecutter"]
16query = project:openstack-dev/cookiecutter
17
18[section "debtcollector"]
19query = project:openstack/debtcollector
20
21[section "futurist"]
22query = project:openstack/futurist
23
24[section "mox3"]
25query = project:openstack/mox3
26
27[section "oslo-cookiecutter"]
28query = project:openstack-dev/oslo-cookiecutter
29
30[section "oslo.cache"]
31query = project:openstack/oslo.cache
diff --git a/dashboards/libs_part2.dash b/dashboards/libs_part2.dash
deleted file mode 100644
index 9de0c9a..0000000
--- a/dashboards/libs_part2.dash
+++ /dev/null
@@ -1,31 +0,0 @@
1[dashboard]
2title = Oslo Review Inbox(Part Two)
3description = Review Inbox(Part Two)
4foreach = status:open NOT owner:self
5
6[section "oslo.privsep"]
7query = project:openstack/oslo.privsep
8
9[section "oslo.reports"]
10query = project:openstack/oslo.reports
11
12[section "oslo.rootwrap"]
13query = project:openstack/oslo.rootwrap
14
15[section "oslo.serialization"]
16query = project:openstack/oslo.serialization
17
18[section "oslo.service"]
19query = project:openstack/oslo.service
20
21[section "oslo.tools"]
22query = project:openstack/oslo.tools
23
24[section "oslo.utils"]
25query = project:openstack/oslo.utils
26
27[section "oslo.versionedobjects"]
28query = project:openstack/oslo.versionedobjects
29
30[section "oslo.vmware"]
31query = project:openstack/oslo.vmware
diff --git a/dashboards/libs_part3.dash b/dashboards/libs_part3.dash
deleted file mode 100644
index 9f185e9..0000000
--- a/dashboards/libs_part3.dash
+++ /dev/null
@@ -1,31 +0,0 @@
1[dashboard]
2title = Oslo Review Inbox(Part Three)
3description = Review Inbox(Part Three)
4foreach = status:open NOT owner:self
5
6[section "oslo.concurrency"]
7query = project:openstack/oslo.concurrency
8
9[section "oslo.config"]
10query = project:openstack/oslo.config
11
12[section "oslo.context"]
13query = project:openstack/oslo.context
14
15[section "oslo.db"]
16query = project:openstack/oslo.db
17
18[section "oslo.i18n"]
19query = project:openstack/oslo.i18n
20
21[section "oslo.log"]
22query = project:openstack/oslo.log
23
24[section "oslo.messaging"]
25query = project:openstack/oslo.messaging
26
27[section "oslo.middleware"]
28query = project:openstack/oslo.middleware
29
30[section "oslo.policy"]
31query = project:openstack/oslo.policy
diff --git a/dashboards/libs_part4.dash b/dashboards/libs_part4.dash
deleted file mode 100644
index ef3addc..0000000
--- a/dashboards/libs_part4.dash
+++ /dev/null
@@ -1,28 +0,0 @@
1[dashboard]
2title = Oslo Review Inbox(Part Four)
3description = Review Inbox(Part Four)
4foreach = status:open NOT owner:self
5
6[section "oslosphinx"]
7query = project:openstack/oslosphinx
8
9[section "oslotest"]
10query = project:openstack/oslotest
11
12[section "osprofiler"]
13query = project:openstack/osprofiler
14
15[section "pbr"]
16query = project:openstack-dev/pbr
17
18[section "pylockfile"]
19query = project:openstack/pylockfile
20
21[section "stevedore"]
22query = project:openstack/stevedore
23
24[section "taskflow"]
25query = project:openstack/taskflow
26
27[section "tooz"]
28query = project:openstack/tooz
diff --git a/dashboards/main.dash b/dashboards/main.dash
deleted file mode 100644
index 4e92f81..0000000
--- a/dashboards/main.dash
+++ /dev/null
@@ -1,39 +0,0 @@
1[dashboard]
2title = Oslo Review Inbox
3description = Review Inbox
4foreach = (project:^openstack/oslo.* OR project:openstack/debtcollector OR
5 project:openstack/pylockfile OR project:openstack/castellan OR
6 project:openstack/futurist OR project:openstack/automaton OR
7 project:openstack/stevedore OR project:openstack/taskflow OR
8 project:openstack/tooz OR project:openstack-dev/cookiecutter OR
9 project:openstack-dev/pbr OR project:openstack/debtcollector OR
10 project:openstack-dev/oslo-cookiecutter OR project:openstack/mox3)
11 status:open NOT owner:self NOT label:Workflow<=-1 label:Verified>=1
12 NOT reviewedby:self
13
14[section "Oslo Specs"]
15query = project:openstack/oslo-specs
16
17[section "Bug Fixes"]
18query = topic:^bug/.*
19
20[section "Blueprints"]
21query = message:"Blueprint"
22
23[section "Needs Feedback (Changes older than 5 days that have not been reviewed by anyone)"]
24query = NOT label:Code-Review<=2 age:5d
25
26[section "You are a reviewer, but haven't voted in the current revision"]
27query = reviewer:self
28
29[section "Needs final +2"]
30query = label:Code-Review>=2 limit:50
31
32[section "New Contributors"]
33query = reviewer:10068
34
35[section "Passed Jenkins, No Negative Feedback"]
36query = NOT label:Code-Review>=2 NOT label:Code-Review<=-1 limit:50
37
38[section "Wayward Changes (Changes with no code review in the last 2days)"]
39query = NOT label:Code-Review<=2 age:2d
diff --git a/find_used_deleted_modules.py b/find_used_deleted_modules.py
deleted file mode 100755
index 724d2bd..0000000
--- a/find_used_deleted_modules.py
+++ /dev/null
@@ -1,116 +0,0 @@
1#!/usr/bin/env python
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14"""
15Look through the openstack-common.conf files for projects to find
16any that are using modules that have been deleted from the
17incubator.
18"""
19
20from __future__ import print_function
21
22import glob
23import os
24import sys
25
26from oslo_config import cfg
27
28# Extend sys.path to find update.py
29my_dir = os.path.dirname(__file__)
30incubator_root = os.path.abspath(os.path.dirname(my_dir))
31sys.path.append(incubator_root)
32import update
33
34
35def main(argv=sys.argv[1:]):
36 repodir = os.path.abspath(
37 os.path.join(my_dir, os.pardir, os.pardir, os.pardir)
38 )
39
40 main_cfg = cfg.ConfigOpts()
41 main_cfg.register_cli_opt(
42 cfg.MultiStrOpt(
43 # NOTE(dhellmann): We can't call this "project" because
44 # that conflicts with another property of the ConfigOpts
45 # class.
46 'proj',
47 default=[],
48 positional=True,
49 help='list of repo subdirs to scan, e.g. "openstack/nova"',
50 )
51 )
52 main_cfg(argv)
53
54 # If the user gave us project names, turn them into full paths to
55 # the project directory. If not, build a full list of all the
56 # projects we find.
57 projects = main_cfg.proj
58 if projects:
59 projects = [os.path.join(repodir, p) for p in projects]
60 else:
61 projects = glob.glob(
62 os.path.join(repodir, '*', '*')
63 )
64
65 base_dir = os.path.join(
66 incubator_root,
67 'openstack',
68 'common',
69 )
70 tools_dir = os.path.join(incubator_root, 'tools')
71
72 previous_project = None
73 for project_path in projects:
74 conf_file = os.path.join(project_path, 'openstack-common.conf')
75 if not os.path.exists(conf_file):
76 # This is not a directory using oslo-incubator.
77 continue
78
79 project_name = '/'.join(project_path.split('/')[-2:])
80
81 # Use a separate parser for each configuration file.
82 pcfg = cfg.ConfigOpts()
83 pcfg.register_opts(update.opts)
84 pcfg(['--config-file', conf_file])
85
86 # The list of modules can come in a couple of different
87 # options, so combine the results.
88 modules = pcfg.module + pcfg.modules
89 for mod in modules:
90 # Build a few filenames and patterns for looking for
91 # versions of the module being used by the project before
92 # testing them all.
93 mod_path = os.path.join(
94 base_dir,
95 mod.replace('.', os.sep),
96 )
97 mod_file = '%s.py' % mod_path
98 tool_pattern = os.path.join(tools_dir, mod + '*')
99 tool_subdir_pattern = os.path.join(tools_dir, mod, '*.sh')
100 if (os.path.isfile(mod_file)
101 or
102 os.path.isdir(mod_path)
103 or
104 glob.glob(tool_pattern)
105 or
106 glob.glob(tool_subdir_pattern)):
107 # Found something we would have copied in update.py.
108 continue
109 else:
110 if project_name != previous_project:
111 previous_project = project_name
112 print()
113 print('%s: %s' % (project_name, mod))
114
115if __name__ == '__main__':
116 main()
diff --git a/install_venv.py b/install_venv.py
deleted file mode 100644
index 15d02ee..0000000
--- a/install_venv.py
+++ /dev/null
@@ -1,70 +0,0 @@
1# Copyright 2010 United States Government as represented by the
2# Administrator of the National Aeronautics and Space Administration.
3# All Rights Reserved.
4#
5# Copyright 2010 OpenStack Foundation
6# Copyright 2013 IBM Corp.
7#
8# Licensed under the Apache License, Version 2.0 (the "License"); you may
9# not use this file except in compliance with the License. You may obtain
10# a copy of the License at
11#
12# http://www.apache.org/licenses/LICENSE-2.0
13#
14# Unless required by applicable law or agreed to in writing, software
15# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
16# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
17# License for the specific language governing permissions and limitations
18# under the License.
19
20import os
21import sys
22
23import install_venv_common as install_venv # noqa
24
25
26def print_help(venv, root):
27 help = """
28 OpenStack development environment setup is complete.
29
30 OpenStack development uses virtualenv to track and manage Python
31 dependencies while in development and testing.
32
33 To activate the OpenStack virtualenv for the extent of your current shell
34 session you can run:
35
36 $ source %s/bin/activate
37
38 Or, if you prefer, you can run commands in the virtualenv on a case by case
39 basis by running:
40
41 $ %s/tools/with_venv.sh <your command>
42
43 Also, make test will automatically use the virtualenv.
44 """
45 print(help % (venv, root))
46
47
48def main(argv):
49 root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
50
51 if os.environ.get('TOOLS_PATH'):
52 root = os.environ['TOOLS_PATH']
53 venv = os.path.join(root, '.venv')
54 if os.environ.get('VENV'):
55 venv = os.environ['VENV']
56
57 pip_requires = os.path.join(root, 'requirements.txt')
58 test_requires = os.path.join(root, 'test-requirements.txt')
59 py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1])
60 project = 'OpenStack'
61 install = install_venv.InstallVenv(root, venv, pip_requires, test_requires,
62 py_version, project)
63 options = install.parse_args(argv)
64 install.check_dependencies()
65 install.create_virtualenv(no_site_packages=options.no_site_packages)
66 install.install_dependencies()
67 print_help(venv, root)
68
69if __name__ == '__main__':
70 main(sys.argv)
diff --git a/install_venv_common.py b/install_venv_common.py
deleted file mode 100644
index d132478..0000000
--- a/install_venv_common.py
+++ /dev/null
@@ -1,165 +0,0 @@
1# Copyright 2013 OpenStack Foundation
2# Copyright 2013 IBM Corp.
3#
4# Licensed under the Apache License, Version 2.0 (the "License"); you may
5# not use this file except in compliance with the License. You may obtain
6# a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13# License for the specific language governing permissions and limitations
14# under the License.
15
16"""Provides methods needed by installation script for OpenStack development
17virtual environments.
18
19Synced in from openstack-common
20"""
21
22from __future__ import print_function
23
24import optparse
25import os
26import subprocess
27import sys
28
29
30class InstallVenv(object):
31
32 def __init__(self, root, venv, requirements,
33 test_requirements, py_version,
34 project):
35 self.root = root
36 self.venv = venv
37 self.requirements = requirements
38 self.test_requirements = test_requirements
39 self.py_version = py_version
40 self.project = project
41
42 def die(self, message, *args):
43 print(message % args, file=sys.stderr)
44 sys.exit(1)
45
46 def run_command_with_code(self, cmd, redirect_output=True,
47 check_exit_code=True):
48 """Runs a command in an out-of-process shell.
49
50 Returns the output of that command. Working directory is self.root.
51 """
52 if redirect_output:
53 stdout = subprocess.PIPE
54 else:
55 stdout = None
56
57 proc = subprocess.Popen(cmd, cwd=self.root, stdout=stdout)
58 output = proc.communicate()[0]
59 if check_exit_code and proc.returncode != 0:
60 self.die('Command "%s" failed.\n%s', ' '.join(cmd), output)
61 return (output, proc.returncode)
62
63 def run_command(self, cmd, redirect_output=True, check_exit_code=True):
64 return self.run_command_with_code(cmd, redirect_output,
65 check_exit_code)[0]
66
67 def get_distro(self):
68 if (os.path.exists('/etc/fedora-release') or
69 os.path.exists('/etc/redhat-release')):
70 return Fedora(
71 self.root, self.venv, self.requirements,
72 self.test_requirements, self.py_version, self.project)
73 else:
74 return Distro(
75 self.root, self.venv, self.requirements,
76 self.test_requirements, self.py_version, self.project)
77
78 def check_dependencies(self):
79 self.get_distro().install_virtualenv()
80
81 def create_virtualenv(self, no_site_packages=True):
82 """Creates the virtual environment and installs PIP.
83
84 Creates the virtual environment and installs PIP only into the
85 virtual environment.
86 """
87 if not os.path.isdir(self.venv):
88 print('Creating venv...', end=' ')
89 if no_site_packages:
90 self.run_command(['virtualenv', '-q', '--no-site-packages',
91 self.venv])
92 else:
93 self.run_command(['virtualenv', '-q', self.venv])
94 print('done.')
95 else:
96 print("venv already exists...")
97 pass
98
99 def pip_install(self, *args):
100 self.run_command(['tools/with_venv.sh',
101 'pip', 'install', '--upgrade'] + list(args),
102 redirect_output=False)
103
104 def install_dependencies(self):
105 print('Installing dependencies with pip (this can take a while)...')
106
107 # First things first, make sure our venv has the latest pip and
108 # setuptools and pbr
109 self.pip_install('pip>=1.4')
110 self.pip_install('setuptools')
111 self.pip_install('pbr')
112
113 self.pip_install('-r', self.requirements, '-r', self.test_requirements)
114
115 def parse_args(self, argv):
116 """Parses command-line arguments."""
117 parser = optparse.OptionParser()
118 parser.add_option('-n', '--no-site-packages',
119 action='store_true',
120 help="Do not inherit packages from global Python "
121 "install.")
122 return parser.parse_args(argv[1:])[0]
123
124
125class Distro(InstallVenv):
126
127 def check_cmd(self, cmd):
128 return bool(self.run_command(['which', cmd],
129 check_exit_code=False).strip())
130
131 def install_virtualenv(self):
132 if self.check_cmd('virtualenv'):
133 return
134
135 if self.check_cmd('easy_install'):
136 print('Installing virtualenv via easy_install...', end=' ')
137 if self.run_command(['easy_install', 'virtualenv']):
138 print('Succeeded')
139 return
140 else:
141 print('Failed')
142
143 self.die('ERROR: virtualenv not found.\n\n%s development'
144 ' requires virtualenv, please install it using your'
145 ' favorite package management tool' % self.project)
146
147
148class Fedora(Distro):
149 """This covers all Fedora-based distributions.
150
151 Includes: Fedora, RHEL, CentOS, Scientific Linux
152 """
153
154 def check_pkg(self, pkg):
155 return self.run_command_with_code(['rpm', '-q', pkg],
156 check_exit_code=False)[1] == 0
157
158 def install_virtualenv(self):
159 if self.check_cmd('virtualenv'):
160 return
161
162 if not self.check_pkg('python-virtualenv'):
163 self.die("Please install 'python-virtualenv'.")
164
165 super(Fedora, self).install_virtualenv()
diff --git a/lint.py b/lint.py
deleted file mode 100644
index cd4c5e7..0000000
--- a/lint.py
+++ /dev/null
@@ -1,30 +0,0 @@
1# Copyright (c) 2013 Intel Corporation.
2# All Rights Reserved.
3#
4# Licensed under the Apache License, Version 2.0 (the "License"); you may
5# not use this file except in compliance with the License. You may obtain
6# a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13# License for the specific language governing permissions and limitations
14# under the License.
15#
16
17import sys
18
19from pylint import lint
20
21
22ENABLED_PYLINT_MSGS = ['W0611']
23
24
25def main(dirpath):
26 enable_opt = '--enable=%s' % ','.join(ENABLED_PYLINT_MSGS)
27 lint.Run(['--reports=n', '--disable=all', enable_opt, dirpath])
28
29if __name__ == '__main__':
30 main(sys.argv[1])
diff --git a/new_core.py b/new_core.py
deleted file mode 100755
index 1d19968..0000000
--- a/new_core.py
+++ /dev/null
@@ -1,96 +0,0 @@
1#!/usr/bin/env python
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15"""
16New core email content generator.
17"""
18
19import argparse
20
21import jinja2
22import parawrap
23
24
25CORE_TPL = """
26Greetings all stackers,
27
28I propose that we add {{FULL_NAME}}[1] to the {{TEAM_CORE}}[2] team.
29
30{{FIRST_NAME}} has been actively contributing to {{TEAM}} for a while now, both
31in helping make {{TEAM}} better via code contribution(s) and by helping with
32the review load when {{HE_SHE_LOWER}} can. {{HE_SHE}} has provided quality
33reviews and is doing an awesome job with the various {{TEAM}} concepts and
34helping make {{TEAM}} the best it can be!
35
36Overall I think {{HE_SHE_LOWER}} would make a great addition to the core
37review team.
38
39Please respond with +1/-1.
40
41Thanks much!
42
43- {{ME}}
44"""
45CORE_TPL = CORE_TPL.strip()
46
47
48def expand_template(contents, params):
49 if not params:
50 params = {}
51 tpl = jinja2.Template(source=contents, undefined=jinja2.StrictUndefined)
52 return tpl.render(**params)
53
54
55def generate_email(args):
56 params = {
57 'FULL_NAME': args.who,
58 'HE_SHE': args.gender.title(),
59 'TEAM_CORE': '%s-core' % args.team,
60 'ME': args.sender,
61 }
62 params['TEAM'] = args.team.strip().lower()
63 params['HE_SHE_LOWER'] = params['HE_SHE'].lower()
64 params['FIRST_NAME'] = params['FULL_NAME'].split()[0]
65 contents = expand_template(CORE_TPL, params)
66 contents = parawrap.fill(contents.strip(), width=75)
67 # Put the links on after so they are not affected by the wrapping...
68 links = [
69 'https://launchpad.net/~%s' % args.who_launchpad_id,
70 'https://launchpad.net/%s' % params['TEAM'],
71 ]
72 contents += "\n\n"
73 for i, link in enumerate(links, 1):
74 contents += "[%s] %s\n" % (i, link)
75 return contents.rstrip()
76
77
78def main():
79 parser = argparse.ArgumentParser(description=__doc__)
80 parser.add_argument('--adding-who', action="store", dest="who",
81 required=True, metavar="<full-name>")
82 parser.add_argument('--adding-who-launchpad-id', action="store",
83 dest="who_launchpad_id",
84 required=True, metavar="<launchpad-id>")
85 parser.add_argument('--from-who', action="store", dest="sender",
86 metavar="<full-name>", required=True)
87 parser.add_argument('--team', action="store", dest="team",
88 metavar="<team>", required=True)
89 parser.add_argument('--gender', action="store", dest="gender",
90 metavar="<he/she>", required=True)
91 args = parser.parse_args()
92 print(generate_email(args))
93
94
95if __name__ == '__main__':
96 main()
diff --git a/new_core_analyzer.py b/new_core_analyzer.py
deleted file mode 100644
index 0d6adb6..0000000
--- a/new_core_analyzer.py
+++ /dev/null
@@ -1,177 +0,0 @@
1# Licensed under the Apache License, Version 2.0 (the "License"); you may
2# not use this file except in compliance with the License. You may obtain
3# a copy of the License at
4#
5# http://www.apache.org/licenses/LICENSE-2.0
6#
7# Unless required by applicable law or agreed to in writing, software
8# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
9# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
10# License for the specific language governing permissions and limitations
11# under the License.
12
13import collections
14import contextlib
15import datetime
16import os
17import sys
18
19import tabulate
20
21from gitinspector.changes import Changes
22from gitinspector.metrics import MetricsLogic
23
24Repository = collections.namedtuple('Repository', 'name,location')
25
26CORE_SKIPS = frozenset([
27 u'Julien Danjou',
28 u'Davanum Srinivas',
29 u'Ben Nemec',
30 u'Joshua Harlow',
31 u'Brant Knudson',
32 u'Doug Hellmann',
33 u'Victor Stinner',
34 u'Michael Still',
35 u'Flavio Percoco',
36 u'Mehdi Abaakouk',
37 u'Robert Collins',
38])
39EMAIL_SKIPS = frozenset([
40 'openstack-infra@lists.openstack.org',
41 'flaper87@gmail.com',
42 'fpercoco@redhat.com',
43])
44OLDEST_COMMIT_YEAR = 2014
45
46
47@contextlib.contextmanager
48def auto_cwd(target_dir):
49 old_dir = os.getcwd()
50 if old_dir == target_dir:
51 yield
52 else:
53 os.chdir(target_dir)
54 try:
55 yield
56 finally:
57 os.chdir(old_dir)
58
59
60def new_core_compare(c1, c2):
61 # Sort by insertions, deletions...
62 c1_info = (c1[3], c1[4], c1[5])
63 c2_info = (c2[3], c2[4], c2[5])
64 if c1_info == c2_info:
65 return 0
66 if c1_info < c2_info:
67 return -1
68 else:
69 return 1
70
71
72def should_discard(change_date, author_name, author_email, author_info):
73 if author_name in CORE_SKIPS:
74 return True
75 if author_email in EMAIL_SKIPS:
76 return True
77 if change_date is not None:
78 if change_date.year < OLDEST_COMMIT_YEAR:
79 return True
80 return False
81
82
83def dump_changes(repo):
84 with auto_cwd(repo.location):
85 print("Analyzing repo %s (%s):" % (repo.name, repo.location))
86 print("Please wait...")
87 Changes.authors.clear()
88 Changes.authors_dateinfo.clear()
89 Changes.authors_by_email.clear()
90 Changes.emails_by_author.clear()
91
92 changes = Changes(repo)
93 # This is needed to flush out changes progress message...
94 sys.stdout.write("\n")
95 # Force population of this info...
96 changes_per_author = changes.get_authordateinfo_list()
97 just_authors = changes.get_authorinfo_list()
98 better_changes_per_author = {}
99 maybe_new_cores = {}
100 for c in changes.get_commits():
101 change_date = c.timestamp
102 author_name = c.author
103 author_email = c.email
104 change_date = datetime.datetime.fromtimestamp(int(change_date))
105 try:
106 author_info = changes.authors[author_name]
107 better_changes_per_author[(change_date, author_name)] = author_info
108 except KeyError:
109 pass
110 for (change_date, author_name) in better_changes_per_author.keys():
111 author_email = changes.get_latest_email_by_author(author_name)
112 author_info = better_changes_per_author[(change_date, author_name)]
113 author_info.email = author_email
114 if not should_discard(change_date, author_name, author_email, author_info):
115 if author_name in maybe_new_cores:
116 existing_info = maybe_new_cores[author_name]
117 if existing_info[2] < change_date:
118 existing_info[2] = change_date
119 else:
120 maybe_core = [
121 author_name.encode("ascii", errors='replace'),
122 author_email,
123 change_date,
124 author_info.insertions,
125 author_info.deletions,
126 author_info.commits,
127 ]
128 maybe_new_cores[author_name] = maybe_core
129 if maybe_new_cores:
130 print("%s potential new cores found!!" % len(maybe_new_cores))
131 tmp_maybe_new_cores = sorted(list(maybe_new_cores.values()),
132 cmp=new_core_compare, reverse=True)
133 headers = ['Name', 'Email', 'Last change made', 'Insertions', 'Deletions', 'Commits']
134 print(tabulate.tabulate(tmp_maybe_new_cores, headers=headers,
135 tablefmt="grid"))
136 else:
137 print("No new cores found!!")
138 return changes.authors.copy()
139
140
141def main(repos):
142 raw_repos = [os.path.abspath(p) for p in repos]
143 parsed_repos = []
144 for repo in raw_repos:
145 parsed_repos.append(Repository(os.path.basename(repo), repo))
146 all_authors = []
147 for repo in parsed_repos:
148 all_authors.append(dump_changes(repo))
149 if all_authors:
150 print("Combined changes of %s repos:" % len(parsed_repos))
151 maybe_new_cores = {}
152 for repo_authors in all_authors:
153 for author_name, author_info in repo_authors.items():
154 change_date = datetime.datetime.now()
155 if not should_discard(None, author_name, author_info.email, author_info):
156 if author_name in maybe_new_cores:
157 prior_author_info = maybe_new_cores[author_name]
158 prior_author_info[3] = prior_author_info[3] + author_info.insertions
159 prior_author_info[4] = prior_author_info[4] + author_info.deletions
160 prior_author_info[5] = prior_author_info[5] + author_info.commits
161 else:
162 maybe_new_cores[author_name] = [
163 author_name.encode("ascii", errors='replace'),
164 author_info.email,
165 u"N/A",
166 author_info.insertions,
167 author_info.deletions,
168 author_info.commits,
169 ]
170 tmp_maybe_new_cores = sorted(list(maybe_new_cores.values()),
171 cmp=new_core_compare, reverse=True)
172 headers = ['Name', 'Email', 'Last change made', 'Insertions', 'Deletions', 'Commits']
173 print(tabulate.tabulate(tmp_maybe_new_cores, headers=headers,
174 tablefmt="grid"))
175
176if __name__ == '__main__':
177 main(sys.argv[1:])
diff --git a/new_core_recruit.py b/new_core_recruit.py
deleted file mode 100755
index 742d07e..0000000
--- a/new_core_recruit.py
+++ /dev/null
@@ -1,70 +0,0 @@
1#!/usr/bin/env python
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15import random
16import sys
17
18import jinja2
19import parawrap
20
21
22def expand_template(contents, params):
23 if not params:
24 params = {}
25 tpl = jinja2.Template(source=contents, undefined=jinja2.StrictUndefined)
26 return tpl.render(**params)
27
28
29chosen_how = [
30 'selected',
31 'picked',
32 'targeted',
33]
34new_oslo_core_tpl = """
35Hi {{firstname}} {{lastname}},
36
37You have been {{chosen_how}} to be a new {{project}} core (if you are
38willing to accept this mission). We have been watching your commits and
39reviews and have noticed that you may be interested in a core position
40that would be granted to you (if you are willing to accept the
41responsibility of being a new core member[1] in project {{project}}).
42
43What do you think, are you able (and willing) to accept?
44
45If you have any questions, please feel free to respond or jump on
46freenode and chat with the team on channel #openstack-oslo (one of the
47other cores in oslo usually around).
48
49This message will self-destruct in 5 seconds.
50
51Sincerely,
52
53The Oslo Team
54
55[1] http://docs.openstack.org/infra/manual/core.html
56"""
57firstname = sys.argv[1]
58lastname = sys.argv[2]
59tpl_args = {
60 'firstname': firstname,
61 'project': sys.argv[3],
62 'lastname': lastname,
63 'firstname_title': firstname.title(),
64 'lastname_title': lastname.title(),
65 'chosen_how': random.choice(chosen_how),
66}
67
68tpl_value = expand_template(new_oslo_core_tpl.lstrip(), tpl_args)
69tpl_value = parawrap.fill(tpl_value)
70print(tpl_value)
diff --git a/remove-namespace-packages-project.txt b/remove-namespace-packages-project.txt
deleted file mode 100644
index 45399af..0000000
--- a/remove-namespace-packages-project.txt
+++ /dev/null
@@ -1,18 +0,0 @@
1Drop use of 'oslo' namespace package
2
3The Oslo libraries have moved all of their code out of the 'oslo'
4namespace package into per-library packages. The namespace package was
5retained during kilo for backwards compatibility, but will be removed by
6the liberty-2 milestone. This change removes the use of the namespace
7package, replacing it with the new package names.
8
9The patches in the libraries will be put on hold until application
10patches have landed, or L2, whichever comes first. At that point, new
11versions of the libraries without namespace packages will be released as
12a major version update.
13
14Please merge this patch, or an equivalent, before L2 to avoid problems
15with those library releases.
16
17Blueprint: remove-namespace-packages
18https://blueprints.launchpad.net/oslo-incubator/+spec/remove-namespace-packages
diff --git a/run_tests_common.sh b/run_tests_common.sh
deleted file mode 100755
index 6ccb7bd..0000000
--- a/run_tests_common.sh
+++ /dev/null
@@ -1,248 +0,0 @@
1#!/bin/bash
2
3set -eu
4
5function usage {
6 echo "Usage: $0 [OPTION]..."
7 echo "Run project's test suite(s)"
8 echo ""
9 echo " -V, --virtual-env Always use virtualenv. Install automatically if not present."
10 echo " -N, --no-virtual-env Don't use virtualenv. Run tests in local environment."
11 echo " -s, --no-site-packages Isolate the virtualenv from the global Python environment."
12 echo " -r, --recreate-db Recreate the test database (deprecated, as this is now the default)."
13 echo " -n, --no-recreate-db Don't recreate the test database."
14 echo " -f, --force Force a clean re-build of the virtual environment."
15 echo " Useful when dependencies have been added."
16 echo " -u, --update Update the virtual environment with any newer package versions."
17 echo " -p, --pep8 Just run PEP8 and HACKING compliance check."
18 echo " -P, --no-pep8 Don't run static code checks."
19 echo " -c, --coverage Generate coverage report."
20 echo " -d, --debug Run tests with testtools instead of testr."
21 echo " This allows you to use the debugger."
22 echo " -h, --help Print this usage message."
23 echo " --hide-elapsed Don't print the elapsed time for each test along with slow test list."
24 echo " --virtual-env-path <path> Location of the virtualenv directory."
25 echo " Default: \$(pwd)"
26 echo " --virtual-env-name <name> Name of the virtualenv directory."
27 echo " Default: .venv"
28 echo " --tools-path <dir> Location of the tools directory."
29 echo " Default: \$(pwd)"
30 echo ""
31 echo "Note: with no options specified, the script will try to run the tests in a virtual environment,"
32 echo " If no virtualenv is found, the script will ask if you would like to create one. If you "
33 echo " prefer to run tests NOT in a virtual environment, simply pass the -N option."
34 exit
35}
36
37function process_options {
38 i=1
39 while [ $i -le $# ]; do
40 case "${!i}" in
41 -h|--help) usage;;
42 -V|--virtual-env) ALWAYS_VENV=1; NEVER_VENV=0;;
43 -N|--no-virtual-env) ALWAYS_VENV=0; NEVER_VENV=1;;
44 -s|--no-site-packages) NO_SITE_PACKAGES=1;;
45 -r|--recreate-db) RECREATE_DB=1;;
46 -n|--no-recreate-db) RECREATE_DB=0;;
47 -f|--force) FORCE=1;;
48 -u|--update) UPDATE=1;;
49 -p|--pep8) JUST_PEP8=1;;
50 -P|--no-pep8) NO_PEP8=1;;
51 -c|--coverage) COVERAGE=1;;
52 -d|--debug) DEBUG=1;;
53 --virtual-env-path)
54 (( i++ ))
55 VENV_PATH=${!i}
56 ;;
57 --virtual-env-name)
58 (( i++ ))
59 VENV_DIR=${!i}
60 ;;
61 --tools-path)
62 (( i++ ))
63 TOOLS_PATH=${!i}
64 ;;
65 -*) TESTOPTS="$TESTOPTS ${!i}";;
66 *) TESTRARGS="$TESTRARGS ${!i}"
67 esac
68 (( i++ ))
69 done
70}
71
72
73TOOLS_PATH=${TOOLS_PATH:-${PWD}}
74VENV_PATH=${VENV_PATH:-${PWD}}
75VENV_DIR=${VENV_DIR:-.venv}
76WITH_VENV=${TOOLS_PATH}/tools/with_venv.sh
77
78ALWAYS_VENV=0
79NEVER_VENV=0
80FORCE=0
81NO_SITE_PACKAGES=1
82INSTALLVENVOPTS=
83TESTRARGS=
84TESTOPTS=
85WRAPPER=""
86JUST_PEP8=0
87NO_PEP8=0
88COVERAGE=0
89DEBUG=0
90RECREATE_DB=1
91UPDATE=0
92
93LANG=en_US.UTF-8
94LANGUAGE=en_US:en
95LC_ALL=C
96
97process_options $@
98# Make our paths available to other scripts we call
99export VENV_PATH
100export TOOLS_PATH
101export VENV_DIR
102export WITH_VENV
103export VENV=${VENV_PATH}/${VENV_DIR}
104
105
106function run_tests {
107 # Cleanup *pyc
108 ${WRAPPER} find . -type f -name "*.pyc" -delete
109
110 if [ ${DEBUG} -eq 1 ]; then
111 if [ "${TESTOPTS}" = "" ] && [ "${TESTRARGS}" = "" ]; then
112 # Default to running all tests if specific test is not
113 # provided.
114 TESTRARGS="discover ./${TESTS_DIR}"
115 fi
116 ${WRAPPER} python -m testtools.run ${TESTOPTS} ${TESTRARGS}
117
118 # Short circuit because all of the testr and coverage stuff
119 # below does not make sense when running testtools.run for
120 # debugging purposes.
121 return $?
122 fi
123
124 if [ ${COVERAGE} -eq 1 ]; then
125 TESTRTESTS="${TESTRTESTS} --coverage"
126 else
127 TESTRTESTS="${TESTRTESTS}"
128 fi
129
130 # Just run the test suites in current environment
131 set +e
132 TESTRARGS=`echo "${TESTRARGS}" | sed -e's/^\s*\(.*\)\s*$/\1/'`
133
134 if [ ${WORKERS_COUNT} -ne 0 ]; then
135 TESTRTESTS="${TESTRTESTS} --testr-args='--concurrency=${WORKERS_COUNT} --subunit ${TESTOPTS} ${TESTRARGS}'"
136 else
137 TESTRTESTS="${TESTRTESTS} --testr-args='--subunit ${TESTOPTS} ${TESTRARGS}'"
138 fi
139
140 if [ setup.cfg -nt ${EGG_INFO_FILE} ]; then
141 ${WRAPPER} python setup.py egg_info
142 fi
143
144 echo "Running \`${WRAPPER} ${TESTRTESTS}\`"
145 if ${WRAPPER} which subunit-2to1 2>&1 > /dev/null; then
146 # subunit-2to1 is present, testr subunit stream should be in version 2
147 # format. Convert to version one before colorizing.
148 bash -c "${WRAPPER} ${TESTRTESTS} | ${WRAPPER} subunit-2to1 | ${WRAPPER} ${TOOLS_PATH}/tools/colorizer.py"
149 else
150 bash -c "${WRAPPER} ${TESTRTESTS} | ${WRAPPER} ${TOOLS_PATH}/tools/colorizer.py"
151 fi
152 RESULT=$?
153 set -e
154
155 copy_subunit_log
156
157 if [ $COVERAGE -eq 1 ]; then
158 echo "Generating coverage report in covhtml/"
159 ${WRAPPER} coverage combine
160 # Don't compute coverage for common code, which is tested elsewhere
161 # if we are not in `oslo-incubator` project
162 if [ ${OMIT_OSLO_FROM_COVERAGE} -eq 0 ]; then
163 OMIT_OSLO=""
164 else
165 OMIT_OSLO="--omit='${PROJECT_NAME}/openstack/common/*'"
166 fi
167 ${WRAPPER} coverage html --include='${PROJECT_NAME}/*' ${OMIT_OSLO} -d covhtml -i
168 fi
169
170 return ${RESULT}
171}
172
173function copy_subunit_log {
174 LOGNAME=`cat .testrepository/next-stream`
175 LOGNAME=$((${LOGNAME} - 1))
176 LOGNAME=".testrepository/${LOGNAME}"
177 cp ${LOGNAME} subunit.log
178}
179
180function run_pep8 {
181 echo "Running flake8 ..."
182 bash -c "${WRAPPER} flake8"
183}
184
185
186TESTRTESTS="lockutils-wrapper stestr"
187
188if [ ${NO_SITE_PACKAGES} -eq 1 ]; then
189 INSTALLVENVOPTS="--no-site-packages"
190fi
191
192if [ ${NEVER_VENV} -eq 0 ]; then
193 # Remove the virtual environment if -f or --force used
194 if [ ${FORCE} -eq 1 ]; then
195 echo "Cleaning virtualenv..."
196 rm -rf ${VENV}
197 fi
198
199 # Update the virtual environment if -u or --update used
200 if [ ${UPDATE} -eq 1 ]; then
201 echo "Updating virtualenv..."
202 python ${TOOLS_PATH}/tools/install_venv.py ${INSTALLVENVOPTS}
203 fi
204
205 if [ -e ${VENV} ]; then
206 WRAPPER="${WITH_VENV}"
207 else
208 if [ ${ALWAYS_VENV} -eq 1 ]; then
209 # Automatically install the virtualenv
210 python ${TOOLS_PATH}/tools/install_venv.py ${INSTALLVENVOPTS}
211 WRAPPER="${WITH_VENV}"
212 else
213 echo -e "No virtual environment found...create one? (Y/n) \c"
214 read USE_VENV
215 if [ "x${USE_VENV}" = "xY" -o "x${USE_VENV}" = "x" -o "x${USE_VENV}" = "xy" ]; then
216 # Install the virtualenv and run the test suite in it
217 python ${TOOLS_PATH}/tools/install_venv.py ${INSTALLVENVOPTS}
218 WRAPPER=${WITH_VENV}
219 fi
220 fi
221 fi
222fi
223
224# Delete old coverage data from previous runs
225if [ ${COVERAGE} -eq 1 ]; then
226 ${WRAPPER} coverage erase
227fi
228
229if [ ${JUST_PEP8} -eq 1 ]; then
230 run_pep8
231 exit
232fi
233
234if [ ${RECREATE_DB} -eq 1 ]; then
235 rm -f tests.sqlite
236fi
237
238run_tests
239
240# NOTE(sirp): we only want to run pep8 when we're running the full-test suite,
241# not when we're running tests individually. To handle this, we need to
242# distinguish between options (testropts), which begin with a '-', and
243# arguments (testrargs).
244if [ -z "${TESTRARGS}" ]; then
245 if [ ${NO_PEP8} -eq 0 ]; then
246 run_pep8
247 fi
248fi
diff --git a/virtual_sprint.py b/virtual_sprint.py
deleted file mode 100644
index c6c4371..0000000
--- a/virtual_sprint.py
+++ /dev/null
@@ -1,80 +0,0 @@
1#!/usr/bin/env python
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15import sys
16
17import delorean
18import jinja2
19import parawrap
20
21
22def expand_template(contents, params):
23 if not params:
24 params = {}
25 tpl = jinja2.Template(source=contents, undefined=jinja2.StrictUndefined)
26 return tpl.render(**params)
27
28
29TPL = """
30Hi everyone,
31
32The OpenStack {{ team }} team will be hosting a virtual sprint in
33the Freenode IRC channel #{{ channel }} for the {{ for }}
34on {{ when }} starting at {{ starts_at }} and going for ~{{ duration }} hours.
35
36The goal of this sprint is to work on any open reviews, documentation or
37any other integration questions, development and so-on, so that we can help
38progress the {{ for }} forward at a good rate.
39
40Live version of the current documentation is available here:
41
42{{ docs }}
43
44The code itself lives in the openstack/{{ project }} respository.
45
46{{ git_tree }}
47
48Please feel free to join if interested, curious, or able.
49
50Much appreciated,
51
52{{ author }}
53"""
54
55# Example:
56#
57# python tools/virtual_sprint.py "taskflow" "next tuesday" "Joshua Harlow"
58if len(sys.argv) != 4:
59 print("%s project when author" % sys.argv[0])
60 sys.exit(1)
61
62# Something like 'next tuesday' is expected...
63d = delorean.Delorean()
64when = getattr(d, sys.argv[2].replace(" ", "_"))
65project = sys.argv[1]
66author = sys.argv[3]
67params = {
68 'team': 'oslo',
69 'project': project,
70 'channel': 'openstack-oslo',
71 'docs': 'http://docs.openstack.org/developer/%s/' % project,
72 'when': when().datetime.strftime('%A %m-%d-%Y'),
73 'starts_at': '16:00 UTC',
74 'duration': 8,
75 'author': author,
76 'git_tree': 'http://git.openstack.org/cgit/openstack/%s/tree' % project,
77}
78params['for'] = params['project'] + ' ' + 'subproject'
79for line in parawrap.wrap(expand_template(TPL.strip(), params)):
80 print(line)
diff --git a/with_venv.sh b/with_venv.sh
deleted file mode 100755
index 165c883..0000000
--- a/with_venv.sh
+++ /dev/null
@@ -1,6 +0,0 @@
1#!/bin/bash
2TOOLS_PATH=${TOOLS_PATH:-$(dirname $0)/../}
3VENV_PATH=${VENV_PATH:-${TOOLS_PATH}}
4VENV_DIR=${VENV_DIR:-/.venv}
5VENV=${VENV:-${VENV_PATH}/${VENV_DIR}}
6source ${VENV}/bin/activate && "$@"