Merge "Remove dead tooling"

This commit is contained in:
Zuul 2018-10-12 15:19:04 +00:00 committed by Gerrit Code Review
commit 4315e28baf
22 changed files with 0 additions and 1953 deletions

View File

@ -1,90 +0,0 @@
#!/bin/bash
# This requires gitinspector to be installed
# it can be gotten from:
#
# - https://pypi.org/project/gitinspector/0.3.2
# - https://github.com/ejwa/gitinspector
# Check out a new copy of a repository and set it up to be a useful
# local copy.
function clone_new {
typeset repo="$1"
typeset url="$2"
echo
echo "Cloning $repo"
git clone $url $repo
return 0
}
# Determine the current branch of a local repository.
function current_branch {
(cd $1 && git rev-parse --abbrev-ref HEAD)
}
# Update an existing copy of a repository, including all remotes and
# pulling into the local master branch if we're on that branch
# already.
function update_existing {
typeset repo="$1"
echo
echo "Updating $repo"
(cd $repo && git remote update)
RC=$?
if [ $RC -ne 0 ]
then
return $RC
fi
# Only run git pull for repos where I'm not working in a branch.
typeset b=$(current_branch $repo)
if [ $b == "master" ]
then
if (cd $repo && git diff --exit-code >/dev/null)
then
(cd $repo && git pull)
else
echo "Skipping pull for master branch with local changes"
(cd $repo && git status)
fi
else
echo "Skipping pull for branch $b"
branched="$branched $repo"
fi
}
# Process a single repository found in gerrit, determining whether it
# exists locally already or not.
function get_one_repo {
typeset repo="$1"
typeset url="$2"
typeset pardir=$(dirname $repo)
if [ ! -z "$pardir" ]
then
mkdir -p $pardir
fi
if [ ! -d $repo ] ; then
clone_new $repo $url
else
update_existing $repo
fi
RC=$?
return $RC
}
current_dir=`pwd`
base="git://git.openstack.org"
projects=$(ssh review.openstack.org -p 29418 gerrit ls-projects | grep -v 'attic' | grep "oslo")
projects="$projects openstack/taskflow openstack/tooz openstack/cliff openstack/debtcollector"
projects="$projects openstack/futurist openstack/stevedore openstack-dev/cookiecutter"
projects="$projects openstack/automaton"
for repo in $projects; do
get_one_repo "$repo" "$base/$repo"
RC=$?
if [ $RC -ne 0 ] ; then
echo "Unable to obtain $repo"
exit 1
fi
done
python new_core_analyzer.py $projects > "${current_dir}/oslo_reports.txt"

View File

@ -1,46 +0,0 @@
#!/bin/bash
#
# Apply the Oslo cookiecutter template to an existing directory,
# usually as part of the graduation process.
COOKIECUTTER_TEMPLATE_REPO=${COOKIECUTTER_TEMPLATE_REPO:-https://git.openstack.org/openstack-dev/oslo-cookiecutter}
function usage {
echo "Usage: apply_cookiecutter.sh newlib" 1>&2
}
if [ $# -lt 1 ]
then
usage
exit 1
fi
new_lib="$1"
if [[ $new_lib =~ oslo.* ]]
then
echo "You probably don't want 'oslo' in the lib name." 1>&2
exit 2
fi
# Set up a virtualenv with cookiecutter
tmpdir=$(mktemp -d -t oslo-cookiecutter.XXXX)
echo "Installing cookiecutter..."
venv=$tmpdir/venv
virtualenv $venv
$venv/bin/python -m pip install cookiecutter
cookiecutter=$venv/bin/cookiecutter
# Apply the cookiecutter template by building out a fresh copy using
# the name chosen for this library and then copying any parts of the
# results into the local tree, without overwriting files that already
# exist.
git clone $COOKIECUTTER_TEMPLATE_REPO $tmpdir/oslo-cookiecutter
# FIXME(dhellmann): We need a better non-interactive mode for cookiecutter
(cd $tmpdir && $cookiecutter $tmpdir/oslo-cookiecutter) <<EOF
$new_lib
openstack
oslo.${new_lib} library
EOF
rsync -a --verbose --ignore-existing $tmpdir/oslo.${new_lib}/ .

View File

@ -1,16 +0,0 @@
#!/bin/bash
#
# Process the dashboard files and emit the URLs
creator_dir=$1
dashboard_dir=$2
cd $creator_dir
for f in $dashboard_dir/*.dash
do
echo '----------------------------------------'
echo $(basename $f .dash)
echo '----------------------------------------'
./gerrit-dash-creator $f
done

View File

@ -1,36 +0,0 @@
#!/bin/bash
#
# Script to replace imports from the 'oslo' namespace package with the
# appropriate alternative in the dist-specific packages.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
name=$(python setup.py --name)
dir=${1:-$name}
echo "Updating $dir"
sed -i \
-e 's/from oslo\./from oslo_/g' \
-e 's/import oslo\./import oslo_/g' \
-e 's/from oslo import i18n/import oslo_i18n as i18n/g' \
-e 's/from oslo import messaging/import oslo_messaging as messaging/g' \
-e 's/from oslo import config/import oslo_config as config/g' \
-e 's/from oslo import serialization/import oslo_serialization as serialization/g' \
-e 's/from oslo import utils/import oslo_utils as utils/g' \
-e 's/oslo\.i18n\.TranslatorFactory/oslo_i18n.TranslatorFactory/g' \
$(find $dir -name '*.py' | grep -v "$name/tests/unit/test_hacking.py")
set -x
git grep 'from oslo import'
git grep 'oslo\.'

View File

@ -1,196 +0,0 @@
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Check out every active repository from git.openstack.org. For new
# copies, set up git-review. For any existing copies, update their
# remotes and pull changes up to the local master.
#
# This script is based on prior art from mordred on the openstack-dev
# mailing list.
# http://lists.openstack.org/pipermail/openstack-dev/2013-October/017532.html
#
# Usage:
#
# Check out everything under the current directory:
# $ clone_openstack.sh
#
# Check out a specific project (you can list multiple names):
# $ clone_openstack.sh openstack/oslo-incubator
#
trouble_with=""
branched=""
# Figure out if git-hooks is installed and should be used.
# https://github.com/icefox/git-hooks
which git-hooks 2>&1 > /dev/null
USE_GIT_HOOKS=$?
# Users can set INCLUDE_STACKFORGE=1 if they want to always check out
# new copies of stackforge projects.
INCLUDE_STACKFORGE=${INCLUDE_STACKFORGE:-0}
# If we have any trouble at all working with a repository, report that
# and then record the name for the summary at the end.
function track_trouble {
if [ $1 -ne 0 ]
then
echo "Remembering trouble with $2"
trouble_with="$trouble_with $2"
fi
}
# Determine the current branch of a local repository.
function current_branch {
(cd $1 && git rev-parse --abbrev-ref HEAD)
}
# Print a summary report for any repositories that had trouble
# updating.
function report_trouble {
if [ ! -z "$trouble_with" ]
then
echo
echo "Had trouble updating:"
for r in $trouble_with
do
echo " $r - $(current_branch $r)"
done
fi
}
# Print a summary report for any repositories that were not on the
# master branch when we updated them.
function report_branched {
if [ ! -z "$branched" ]
then
echo
echo "Branched repos:"
for r in $branched
do
echo " $r - $(current_branch $r)"
done
fi
}
# Check out a new copy of a repository and set it up to be a useful
# local copy.
function clone_new {
typeset repo="$1"
typeset url="$2"
# Ignore stackforge projects unless told otherwise.
if [[ $repo =~ ^stackforge/.* ]]
then
if [ $INCLUDE_STACKFORGE -ne 1 ]
then
return 0
fi
fi
echo
echo "Cloning $repo"
git clone $url $repo
(cd $repo && git review -s)
if [ $USE_GIT_HOOKS -eq 0 ]
then
echo "Configuring git hooks"
(cd $repo && git hooks --install)
fi
return 0
}
# Update an existing copy of a repository, including all remotes and
# pulling into the local master branch if we're on that branch
# already.
function update_existing {
typeset repo="$1"
echo
echo "Updating $repo"
(cd $repo && git remote update)
RC=$?
if [ $RC -ne 0 ]
then
return $RC
fi
# Only run git pull for repos where I'm not working in a branch.
typeset b=$(current_branch $repo)
if [ $b == "master" ]
then
if (cd $repo && git diff --exit-code >/dev/null)
then
(cd $repo && git pull)
else
echo "Skipping pull for master branch with local changes"
(cd $repo && git status)
fi
else
echo "Skipping pull for branch $b"
branched="$branched $repo"
fi
}
# Process a single repository found in gerrit, determining whether it
# exists locally already or not.
function get_one_repo {
typeset repo="$1"
typeset url="$2"
typeset pardir=$(dirname $repo)
if [ ! -z "$pardir" ]
then
mkdir -p $pardir
fi
if [ ! -d $repo ] ; then
clone_new $repo $url
else
update_existing $repo
fi
RC=$?
return $RC
}
# If we are given a list of projects on the command line, we will only
# work on those. Otherwise, ask gerrit for the full list of openstack
# projects, ignoring the ones in the attic. Stackforge projects are
# ignored if they do not exist locally, so we include them in the
# output list and check for them when we decide what to do with each
# repository.
projects="$*"
if [ -z "$projects" ]
then
projects=$(ssh review.openstack.org -p 29418 gerrit ls-projects | grep '^openstack' | grep -v 'attic')
RC=$?
if [ $RC -ne 0 ]
then
echo "Unable to obtain a list of projects from gerrit. Check your ssh credientials for review.openstack.org"
userid=`id -un`
gerrit_userid=`git config --get gitreview.username`
if [ $userid != $gerrit_userid ]
then
echo "Identified a possible userid difference between $userid and $gerrit_userid"
fi
exit $RC
fi
else
# Go ahead and set things up so we will work with stackforge
# repositories, in case the caller has specified one on the
# command line.
INCLUDE_STACKFORGE=1
fi
for repo in $projects; do
get_one_repo $repo git://git.openstack.org/$repo
track_trouble $? $repo
done
report_branched
report_trouble

View File

@ -1,333 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2013, Nebula, Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Colorizer Code is borrowed from Twisted:
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Display a subunit stream through a colorized unittest test runner."""
import heapq
import sys
import unittest
import six
import subunit
import testtools
class _AnsiColorizer(object):
"""Colorizer allows callers to write text in a particular color.
A colorizer is an object that loosely wraps around a stream, allowing
callers to write text to the stream in a particular color.
Colorizer classes must implement C{supported()} and C{write(text, color)}.
"""
_colors = dict(black=30, red=31, green=32, yellow=33,
blue=34, magenta=35, cyan=36, white=37)
def __init__(self, stream):
self.stream = stream
def supported(cls, stream=sys.stdout):
"""Check is the current platform supports coloring terminal output.
A class method that returns True if the current platform supports
coloring terminal output using this method. Returns False otherwise.
"""
if not stream.isatty():
return False # auto color only on TTYs
try:
import curses
except ImportError:
return False
else:
try:
try:
return curses.tigetnum("colors") > 2
except curses.error:
curses.setupterm()
return curses.tigetnum("colors") > 2
except Exception:
# guess false in case of error
return False
supported = classmethod(supported)
def write(self, text, color):
"""Write the given text to the stream in the given color.
@param text: Text to be written to the stream.
@param color: A string label for a color. e.g. 'red', 'white'.
"""
color = self._colors[color]
self.stream.write('\x1b[%s;1m%s\x1b[0m' % (color, text))
class _Win32Colorizer(object):
"""See _AnsiColorizer docstring."""
def __init__(self, stream):
import win32console
red, green, blue, bold = (win32console.FOREGROUND_RED,
win32console.FOREGROUND_GREEN,
win32console.FOREGROUND_BLUE,
win32console.FOREGROUND_INTENSITY)
self.stream = stream
self.screenBuffer = win32console.GetStdHandle(
win32console.STD_OUT_HANDLE)
self._colors = {
'normal': red | green | blue,
'red': red | bold,
'green': green | bold,
'blue': blue | bold,
'yellow': red | green | bold,
'magenta': red | blue | bold,
'cyan': green | blue | bold,
'white': red | green | blue | bold,
}
def supported(cls, stream=sys.stdout):
try:
import win32console
screenBuffer = win32console.GetStdHandle(
win32console.STD_OUT_HANDLE)
except ImportError:
return False
import pywintypes
try:
screenBuffer.SetConsoleTextAttribute(
win32console.FOREGROUND_RED |
win32console.FOREGROUND_GREEN |
win32console.FOREGROUND_BLUE)
except pywintypes.error:
return False
else:
return True
supported = classmethod(supported)
def write(self, text, color):
color = self._colors[color]
self.screenBuffer.SetConsoleTextAttribute(color)
self.stream.write(text)
self.screenBuffer.SetConsoleTextAttribute(self._colors['normal'])
class _NullColorizer(object):
"""See _AnsiColorizer docstring."""
def __init__(self, stream):
self.stream = stream
def supported(cls, stream=sys.stdout):
return True
supported = classmethod(supported)
def write(self, text, color):
self.stream.write(text)
def get_elapsed_time_color(elapsed_time):
if elapsed_time > 1.0:
return 'red'
elif elapsed_time > 0.25:
return 'yellow'
else:
return 'green'
class OpenStackTestResult(testtools.TestResult):
def __init__(self, stream, descriptions, verbosity):
super(OpenStackTestResult, self).__init__()
self.stream = stream
self.showAll = verbosity > 1
self.num_slow_tests = 10
self.slow_tests = [] # this is a fixed-sized heap
self.colorizer = None
# NOTE(vish): reset stdout for the terminal check
stdout = sys.stdout
sys.stdout = sys.__stdout__
for colorizer in [_Win32Colorizer, _AnsiColorizer, _NullColorizer]:
if colorizer.supported():
self.colorizer = colorizer(self.stream)
break
sys.stdout = stdout
self.start_time = None
self.last_time = {}
self.results = {}
self.last_written = None
def _writeElapsedTime(self, elapsed):
color = get_elapsed_time_color(elapsed)
self.colorizer.write(" %.2f" % elapsed, color)
def _addResult(self, test, *args):
try:
name = test.id()
except AttributeError:
name = 'Unknown.unknown'
test_class, test_name = name.rsplit('.', 1)
elapsed = (self._now() - self.start_time).total_seconds()
item = (elapsed, test_class, test_name)
if len(self.slow_tests) >= self.num_slow_tests:
heapq.heappushpop(self.slow_tests, item)
else:
heapq.heappush(self.slow_tests, item)
self.results.setdefault(test_class, [])
self.results[test_class].append((test_name, elapsed) + args)
self.last_time[test_class] = self._now()
self.writeTests()
def _writeResult(self, test_name, elapsed, long_result, color,
short_result, success):
if self.showAll:
self.stream.write(' %s' % str(test_name).ljust(66))
self.colorizer.write(long_result, color)
if success:
self._writeElapsedTime(elapsed)
self.stream.writeln()
else:
self.colorizer.write(short_result, color)
def addSuccess(self, test):
super(OpenStackTestResult, self).addSuccess(test)
self._addResult(test, 'OK', 'green', '.', True)
def addFailure(self, test, err):
if test.id() == 'process-returncode':
return
super(OpenStackTestResult, self).addFailure(test, err)
self._addResult(test, 'FAIL', 'red', 'F', False)
def addError(self, test, err):
super(OpenStackTestResult, self).addFailure(test, err)
self._addResult(test, 'ERROR', 'red', 'E', False)
def addSkip(self, test, reason=None, details=None):
super(OpenStackTestResult, self).addSkip(test, reason, details)
self._addResult(test, 'SKIP', 'blue', 'S', True)
def startTest(self, test):
self.start_time = self._now()
super(OpenStackTestResult, self).startTest(test)
def writeTestCase(self, cls):
if not self.results.get(cls):
return
if cls != self.last_written:
self.colorizer.write(cls, 'white')
self.stream.writeln()
for result in self.results[cls]:
self._writeResult(*result)
del self.results[cls]
self.stream.flush()
self.last_written = cls
def writeTests(self):
time = self.last_time.get(self.last_written, self._now())
if not self.last_written or (self._now() - time).total_seconds() > 2.0:
diff = 3.0
while diff > 2.0:
classes = self.results.keys()
oldest = min(classes, key=lambda x: self.last_time[x])
diff = (self._now() - self.last_time[oldest]).total_seconds()
self.writeTestCase(oldest)
else:
self.writeTestCase(self.last_written)
def done(self):
self.stopTestRun()
def stopTestRun(self):
for cls in list(six.iterkeys(self.results)):
self.writeTestCase(cls)
self.stream.writeln()
self.writeSlowTests()
def writeSlowTests(self):
# Pare out 'fast' tests
slow_tests = [item for item in self.slow_tests
if get_elapsed_time_color(item[0]) != 'green']
if slow_tests:
slow_total_time = sum(item[0] for item in slow_tests)
slow = ("Slowest %i tests took %.2f secs:"
% (len(slow_tests), slow_total_time))
self.colorizer.write(slow, 'yellow')
self.stream.writeln()
last_cls = None
# sort by name
for elapsed, cls, name in sorted(slow_tests,
key=lambda x: x[1] + x[2]):
if cls != last_cls:
self.colorizer.write(cls, 'white')
self.stream.writeln()
last_cls = cls
self.stream.write(' %s' % str(name).ljust(68))
self._writeElapsedTime(elapsed)
self.stream.writeln()
def printErrors(self):
if self.showAll:
self.stream.writeln()
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
def printErrorList(self, flavor, errors):
for test, err in errors:
self.colorizer.write("=" * 70, 'red')
self.stream.writeln()
self.colorizer.write(flavor, 'red')
self.stream.writeln(": %s" % test.id())
self.colorizer.write("-" * 70, 'red')
self.stream.writeln()
self.stream.writeln("%s" % err)
test = subunit.ProtocolTestCase(sys.stdin, passthrough=None)
if sys.version_info[0:2] <= (2, 6):
runner = unittest.TextTestRunner(verbosity=2)
else:
runner = unittest.TextTestRunner(verbosity=2,
resultclass=OpenStackTestResult)
if runner.run(test).wasSuccessful():
exit_code = 0
else:
exit_code = 1
sys.exit(exit_code)

View File

@ -1,31 +0,0 @@
[dashboard]
title = Oslo Review Inbox(Part One)
description = Review Inbox(Part One)
foreach = status:open NOT owner:self
[section "Oslo Specs"]
query = project:openstack/oslo-specs
[section "automaton"]
query = project:openstack/automaton
[section "castellan"]
query = project:openstack/castellan
[section "cookiecutter"]
query = project:openstack-dev/cookiecutter
[section "debtcollector"]
query = project:openstack/debtcollector
[section "futurist"]
query = project:openstack/futurist
[section "mox3"]
query = project:openstack/mox3
[section "oslo-cookiecutter"]
query = project:openstack-dev/oslo-cookiecutter
[section "oslo.cache"]
query = project:openstack/oslo.cache

View File

@ -1,31 +0,0 @@
[dashboard]
title = Oslo Review Inbox(Part Two)
description = Review Inbox(Part Two)
foreach = status:open NOT owner:self
[section "oslo.privsep"]
query = project:openstack/oslo.privsep
[section "oslo.reports"]
query = project:openstack/oslo.reports
[section "oslo.rootwrap"]
query = project:openstack/oslo.rootwrap
[section "oslo.serialization"]
query = project:openstack/oslo.serialization
[section "oslo.service"]
query = project:openstack/oslo.service
[section "oslo.tools"]
query = project:openstack/oslo.tools
[section "oslo.utils"]
query = project:openstack/oslo.utils
[section "oslo.versionedobjects"]
query = project:openstack/oslo.versionedobjects
[section "oslo.vmware"]
query = project:openstack/oslo.vmware

View File

@ -1,31 +0,0 @@
[dashboard]
title = Oslo Review Inbox(Part Three)
description = Review Inbox(Part Three)
foreach = status:open NOT owner:self
[section "oslo.concurrency"]
query = project:openstack/oslo.concurrency
[section "oslo.config"]
query = project:openstack/oslo.config
[section "oslo.context"]
query = project:openstack/oslo.context
[section "oslo.db"]
query = project:openstack/oslo.db
[section "oslo.i18n"]
query = project:openstack/oslo.i18n
[section "oslo.log"]
query = project:openstack/oslo.log
[section "oslo.messaging"]
query = project:openstack/oslo.messaging
[section "oslo.middleware"]
query = project:openstack/oslo.middleware
[section "oslo.policy"]
query = project:openstack/oslo.policy

View File

@ -1,28 +0,0 @@
[dashboard]
title = Oslo Review Inbox(Part Four)
description = Review Inbox(Part Four)
foreach = status:open NOT owner:self
[section "oslosphinx"]
query = project:openstack/oslosphinx
[section "oslotest"]
query = project:openstack/oslotest
[section "osprofiler"]
query = project:openstack/osprofiler
[section "pbr"]
query = project:openstack-dev/pbr
[section "pylockfile"]
query = project:openstack/pylockfile
[section "stevedore"]
query = project:openstack/stevedore
[section "taskflow"]
query = project:openstack/taskflow
[section "tooz"]
query = project:openstack/tooz

View File

@ -1,39 +0,0 @@
[dashboard]
title = Oslo Review Inbox
description = Review Inbox
foreach = (project:^openstack/oslo.* OR project:openstack/debtcollector OR
project:openstack/pylockfile OR project:openstack/castellan OR
project:openstack/futurist OR project:openstack/automaton OR
project:openstack/stevedore OR project:openstack/taskflow OR
project:openstack/tooz OR project:openstack-dev/cookiecutter OR
project:openstack-dev/pbr OR project:openstack/debtcollector OR
project:openstack-dev/oslo-cookiecutter OR project:openstack/mox3)
status:open NOT owner:self NOT label:Workflow<=-1 label:Verified>=1
NOT reviewedby:self
[section "Oslo Specs"]
query = project:openstack/oslo-specs
[section "Bug Fixes"]
query = topic:^bug/.*
[section "Blueprints"]
query = message:"Blueprint"
[section "Needs Feedback (Changes older than 5 days that have not been reviewed by anyone)"]
query = NOT label:Code-Review<=2 age:5d
[section "You are a reviewer, but haven't voted in the current revision"]
query = reviewer:self
[section "Needs final +2"]
query = label:Code-Review>=2 limit:50
[section "New Contributors"]
query = reviewer:10068
[section "Passed Jenkins, No Negative Feedback"]
query = NOT label:Code-Review>=2 NOT label:Code-Review<=-1 limit:50
[section "Wayward Changes (Changes with no code review in the last 2days)"]
query = NOT label:Code-Review<=2 age:2d

View File

@ -1,116 +0,0 @@
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Look through the openstack-common.conf files for projects to find
any that are using modules that have been deleted from the
incubator.
"""
from __future__ import print_function
import glob
import os
import sys
from oslo_config import cfg
# Extend sys.path to find update.py
my_dir = os.path.dirname(__file__)
incubator_root = os.path.abspath(os.path.dirname(my_dir))
sys.path.append(incubator_root)
import update
def main(argv=sys.argv[1:]):
repodir = os.path.abspath(
os.path.join(my_dir, os.pardir, os.pardir, os.pardir)
)
main_cfg = cfg.ConfigOpts()
main_cfg.register_cli_opt(
cfg.MultiStrOpt(
# NOTE(dhellmann): We can't call this "project" because
# that conflicts with another property of the ConfigOpts
# class.
'proj',
default=[],
positional=True,
help='list of repo subdirs to scan, e.g. "openstack/nova"',
)
)
main_cfg(argv)
# If the user gave us project names, turn them into full paths to
# the project directory. If not, build a full list of all the
# projects we find.
projects = main_cfg.proj
if projects:
projects = [os.path.join(repodir, p) for p in projects]
else:
projects = glob.glob(
os.path.join(repodir, '*', '*')
)
base_dir = os.path.join(
incubator_root,
'openstack',
'common',
)
tools_dir = os.path.join(incubator_root, 'tools')
previous_project = None
for project_path in projects:
conf_file = os.path.join(project_path, 'openstack-common.conf')
if not os.path.exists(conf_file):
# This is not a directory using oslo-incubator.
continue
project_name = '/'.join(project_path.split('/')[-2:])
# Use a separate parser for each configuration file.
pcfg = cfg.ConfigOpts()
pcfg.register_opts(update.opts)
pcfg(['--config-file', conf_file])
# The list of modules can come in a couple of different
# options, so combine the results.
modules = pcfg.module + pcfg.modules
for mod in modules:
# Build a few filenames and patterns for looking for
# versions of the module being used by the project before
# testing them all.
mod_path = os.path.join(
base_dir,
mod.replace('.', os.sep),
)
mod_file = '%s.py' % mod_path
tool_pattern = os.path.join(tools_dir, mod + '*')
tool_subdir_pattern = os.path.join(tools_dir, mod, '*.sh')
if (os.path.isfile(mod_file)
or
os.path.isdir(mod_path)
or
glob.glob(tool_pattern)
or
glob.glob(tool_subdir_pattern)):
# Found something we would have copied in update.py.
continue
else:
if project_name != previous_project:
previous_project = project_name
print()
print('%s: %s' % (project_name, mod))
if __name__ == '__main__':
main()

View File

@ -1,70 +0,0 @@
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2010 OpenStack Foundation
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
import install_venv_common as install_venv # noqa
def print_help(venv, root):
help = """
OpenStack development environment setup is complete.
OpenStack development uses virtualenv to track and manage Python
dependencies while in development and testing.
To activate the OpenStack virtualenv for the extent of your current shell
session you can run:
$ source %s/bin/activate
Or, if you prefer, you can run commands in the virtualenv on a case by case
basis by running:
$ %s/tools/with_venv.sh <your command>
Also, make test will automatically use the virtualenv.
"""
print(help % (venv, root))
def main(argv):
root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
if os.environ.get('TOOLS_PATH'):
root = os.environ['TOOLS_PATH']
venv = os.path.join(root, '.venv')
if os.environ.get('VENV'):
venv = os.environ['VENV']
pip_requires = os.path.join(root, 'requirements.txt')
test_requires = os.path.join(root, 'test-requirements.txt')
py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1])
project = 'OpenStack'
install = install_venv.InstallVenv(root, venv, pip_requires, test_requires,
py_version, project)
options = install.parse_args(argv)
install.check_dependencies()
install.create_virtualenv(no_site_packages=options.no_site_packages)
install.install_dependencies()
print_help(venv, root)
if __name__ == '__main__':
main(sys.argv)

View File

@ -1,165 +0,0 @@
# Copyright 2013 OpenStack Foundation
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Provides methods needed by installation script for OpenStack development
virtual environments.
Synced in from openstack-common
"""
from __future__ import print_function
import optparse
import os
import subprocess
import sys
class InstallVenv(object):
def __init__(self, root, venv, requirements,
test_requirements, py_version,
project):
self.root = root
self.venv = venv
self.requirements = requirements
self.test_requirements = test_requirements
self.py_version = py_version
self.project = project
def die(self, message, *args):
print(message % args, file=sys.stderr)
sys.exit(1)
def run_command_with_code(self, cmd, redirect_output=True,
check_exit_code=True):
"""Runs a command in an out-of-process shell.
Returns the output of that command. Working directory is self.root.
"""
if redirect_output:
stdout = subprocess.PIPE
else:
stdout = None
proc = subprocess.Popen(cmd, cwd=self.root, stdout=stdout)
output = proc.communicate()[0]
if check_exit_code and proc.returncode != 0:
self.die('Command "%s" failed.\n%s', ' '.join(cmd), output)
return (output, proc.returncode)
def run_command(self, cmd, redirect_output=True, check_exit_code=True):
return self.run_command_with_code(cmd, redirect_output,
check_exit_code)[0]
def get_distro(self):
if (os.path.exists('/etc/fedora-release') or
os.path.exists('/etc/redhat-release')):
return Fedora(
self.root, self.venv, self.requirements,
self.test_requirements, self.py_version, self.project)
else:
return Distro(
self.root, self.venv, self.requirements,
self.test_requirements, self.py_version, self.project)
def check_dependencies(self):
self.get_distro().install_virtualenv()
def create_virtualenv(self, no_site_packages=True):
"""Creates the virtual environment and installs PIP.
Creates the virtual environment and installs PIP only into the
virtual environment.
"""
if not os.path.isdir(self.venv):
print('Creating venv...', end=' ')
if no_site_packages:
self.run_command(['virtualenv', '-q', '--no-site-packages',
self.venv])
else:
self.run_command(['virtualenv', '-q', self.venv])
print('done.')
else:
print("venv already exists...")
pass
def pip_install(self, *args):
self.run_command(['tools/with_venv.sh',
'pip', 'install', '--upgrade'] + list(args),
redirect_output=False)
def install_dependencies(self):
print('Installing dependencies with pip (this can take a while)...')
# First things first, make sure our venv has the latest pip and
# setuptools and pbr
self.pip_install('pip>=1.4')
self.pip_install('setuptools')
self.pip_install('pbr')
self.pip_install('-r', self.requirements, '-r', self.test_requirements)
def parse_args(self, argv):
"""Parses command-line arguments."""
parser = optparse.OptionParser()
parser.add_option('-n', '--no-site-packages',
action='store_true',
help="Do not inherit packages from global Python "
"install.")
return parser.parse_args(argv[1:])[0]
class Distro(InstallVenv):
def check_cmd(self, cmd):
return bool(self.run_command(['which', cmd],
check_exit_code=False).strip())
def install_virtualenv(self):
if self.check_cmd('virtualenv'):
return
if self.check_cmd('easy_install'):
print('Installing virtualenv via easy_install...', end=' ')
if self.run_command(['easy_install', 'virtualenv']):
print('Succeeded')
return
else:
print('Failed')
self.die('ERROR: virtualenv not found.\n\n%s development'
' requires virtualenv, please install it using your'
' favorite package management tool' % self.project)
class Fedora(Distro):
"""This covers all Fedora-based distributions.
Includes: Fedora, RHEL, CentOS, Scientific Linux
"""
def check_pkg(self, pkg):
return self.run_command_with_code(['rpm', '-q', pkg],
check_exit_code=False)[1] == 0
def install_virtualenv(self):
if self.check_cmd('virtualenv'):
return
if not self.check_pkg('python-virtualenv'):
self.die("Please install 'python-virtualenv'.")
super(Fedora, self).install_virtualenv()

30
lint.py
View File

@ -1,30 +0,0 @@
# Copyright (c) 2013 Intel Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import sys
from pylint import lint
ENABLED_PYLINT_MSGS = ['W0611']
def main(dirpath):
enable_opt = '--enable=%s' % ','.join(ENABLED_PYLINT_MSGS)
lint.Run(['--reports=n', '--disable=all', enable_opt, dirpath])
if __name__ == '__main__':
main(sys.argv[1])

View File

@ -1,96 +0,0 @@
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
New core email content generator.
"""
import argparse
import jinja2
import parawrap
CORE_TPL = """
Greetings all stackers,
I propose that we add {{FULL_NAME}}[1] to the {{TEAM_CORE}}[2] team.
{{FIRST_NAME}} has been actively contributing to {{TEAM}} for a while now, both
in helping make {{TEAM}} better via code contribution(s) and by helping with
the review load when {{HE_SHE_LOWER}} can. {{HE_SHE}} has provided quality
reviews and is doing an awesome job with the various {{TEAM}} concepts and
helping make {{TEAM}} the best it can be!
Overall I think {{HE_SHE_LOWER}} would make a great addition to the core
review team.
Please respond with +1/-1.
Thanks much!
- {{ME}}
"""
CORE_TPL = CORE_TPL.strip()
def expand_template(contents, params):
if not params:
params = {}
tpl = jinja2.Template(source=contents, undefined=jinja2.StrictUndefined)
return tpl.render(**params)
def generate_email(args):
params = {
'FULL_NAME': args.who,
'HE_SHE': args.gender.title(),
'TEAM_CORE': '%s-core' % args.team,
'ME': args.sender,
}
params['TEAM'] = args.team.strip().lower()
params['HE_SHE_LOWER'] = params['HE_SHE'].lower()
params['FIRST_NAME'] = params['FULL_NAME'].split()[0]
contents = expand_template(CORE_TPL, params)
contents = parawrap.fill(contents.strip(), width=75)
# Put the links on after so they are not affected by the wrapping...
links = [
'https://launchpad.net/~%s' % args.who_launchpad_id,
'https://launchpad.net/%s' % params['TEAM'],
]
contents += "\n\n"
for i, link in enumerate(links, 1):
contents += "[%s] %s\n" % (i, link)
return contents.rstrip()
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--adding-who', action="store", dest="who",
required=True, metavar="<full-name>")
parser.add_argument('--adding-who-launchpad-id', action="store",
dest="who_launchpad_id",
required=True, metavar="<launchpad-id>")
parser.add_argument('--from-who', action="store", dest="sender",
metavar="<full-name>", required=True)
parser.add_argument('--team', action="store", dest="team",
metavar="<team>", required=True)
parser.add_argument('--gender', action="store", dest="gender",
metavar="<he/she>", required=True)
args = parser.parse_args()
print(generate_email(args))
if __name__ == '__main__':
main()

View File

@ -1,177 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import contextlib
import datetime
import os
import sys
import tabulate
from gitinspector.changes import Changes
from gitinspector.metrics import MetricsLogic
Repository = collections.namedtuple('Repository', 'name,location')
CORE_SKIPS = frozenset([
u'Julien Danjou',
u'Davanum Srinivas',
u'Ben Nemec',
u'Joshua Harlow',
u'Brant Knudson',
u'Doug Hellmann',
u'Victor Stinner',
u'Michael Still',
u'Flavio Percoco',
u'Mehdi Abaakouk',
u'Robert Collins',
])
EMAIL_SKIPS = frozenset([
'openstack-infra@lists.openstack.org',
'flaper87@gmail.com',
'fpercoco@redhat.com',
])
OLDEST_COMMIT_YEAR = 2014
@contextlib.contextmanager
def auto_cwd(target_dir):
old_dir = os.getcwd()
if old_dir == target_dir:
yield
else:
os.chdir(target_dir)
try:
yield
finally:
os.chdir(old_dir)
def new_core_compare(c1, c2):
# Sort by insertions, deletions...
c1_info = (c1[3], c1[4], c1[5])
c2_info = (c2[3], c2[4], c2[5])
if c1_info == c2_info:
return 0
if c1_info < c2_info:
return -1
else:
return 1
def should_discard(change_date, author_name, author_email, author_info):
if author_name in CORE_SKIPS:
return True
if author_email in EMAIL_SKIPS:
return True
if change_date is not None:
if change_date.year < OLDEST_COMMIT_YEAR:
return True
return False
def dump_changes(repo):
with auto_cwd(repo.location):
print("Analyzing repo %s (%s):" % (repo.name, repo.location))
print("Please wait...")
Changes.authors.clear()
Changes.authors_dateinfo.clear()
Changes.authors_by_email.clear()
Changes.emails_by_author.clear()
changes = Changes(repo)
# This is needed to flush out changes progress message...
sys.stdout.write("\n")
# Force population of this info...
changes_per_author = changes.get_authordateinfo_list()
just_authors = changes.get_authorinfo_list()
better_changes_per_author = {}
maybe_new_cores = {}
for c in changes.get_commits():
change_date = c.timestamp
author_name = c.author
author_email = c.email
change_date = datetime.datetime.fromtimestamp(int(change_date))
try:
author_info = changes.authors[author_name]
better_changes_per_author[(change_date, author_name)] = author_info
except KeyError:
pass
for (change_date, author_name) in better_changes_per_author.keys():
author_email = changes.get_latest_email_by_author(author_name)
author_info = better_changes_per_author[(change_date, author_name)]
author_info.email = author_email
if not should_discard(change_date, author_name, author_email, author_info):
if author_name in maybe_new_cores:
existing_info = maybe_new_cores[author_name]
if existing_info[2] < change_date:
existing_info[2] = change_date
else:
maybe_core = [
author_name.encode("ascii", errors='replace'),
author_email,
change_date,
author_info.insertions,
author_info.deletions,
author_info.commits,
]
maybe_new_cores[author_name] = maybe_core
if maybe_new_cores:
print("%s potential new cores found!!" % len(maybe_new_cores))
tmp_maybe_new_cores = sorted(list(maybe_new_cores.values()),
cmp=new_core_compare, reverse=True)
headers = ['Name', 'Email', 'Last change made', 'Insertions', 'Deletions', 'Commits']
print(tabulate.tabulate(tmp_maybe_new_cores, headers=headers,
tablefmt="grid"))
else:
print("No new cores found!!")
return changes.authors.copy()
def main(repos):
raw_repos = [os.path.abspath(p) for p in repos]
parsed_repos = []
for repo in raw_repos:
parsed_repos.append(Repository(os.path.basename(repo), repo))
all_authors = []
for repo in parsed_repos:
all_authors.append(dump_changes(repo))
if all_authors:
print("Combined changes of %s repos:" % len(parsed_repos))
maybe_new_cores = {}
for repo_authors in all_authors:
for author_name, author_info in repo_authors.items():
change_date = datetime.datetime.now()
if not should_discard(None, author_name, author_info.email, author_info):
if author_name in maybe_new_cores:
prior_author_info = maybe_new_cores[author_name]
prior_author_info[3] = prior_author_info[3] + author_info.insertions
prior_author_info[4] = prior_author_info[4] + author_info.deletions
prior_author_info[5] = prior_author_info[5] + author_info.commits
else:
maybe_new_cores[author_name] = [
author_name.encode("ascii", errors='replace'),
author_info.email,
u"N/A",
author_info.insertions,
author_info.deletions,
author_info.commits,
]
tmp_maybe_new_cores = sorted(list(maybe_new_cores.values()),
cmp=new_core_compare, reverse=True)
headers = ['Name', 'Email', 'Last change made', 'Insertions', 'Deletions', 'Commits']
print(tabulate.tabulate(tmp_maybe_new_cores, headers=headers,
tablefmt="grid"))
if __name__ == '__main__':
main(sys.argv[1:])

View File

@ -1,70 +0,0 @@
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import sys
import jinja2
import parawrap
def expand_template(contents, params):
if not params:
params = {}
tpl = jinja2.Template(source=contents, undefined=jinja2.StrictUndefined)
return tpl.render(**params)
chosen_how = [
'selected',
'picked',
'targeted',
]
new_oslo_core_tpl = """
Hi {{firstname}} {{lastname}},
You have been {{chosen_how}} to be a new {{project}} core (if you are
willing to accept this mission). We have been watching your commits and
reviews and have noticed that you may be interested in a core position
that would be granted to you (if you are willing to accept the
responsibility of being a new core member[1] in project {{project}}).
What do you think, are you able (and willing) to accept?
If you have any questions, please feel free to respond or jump on
freenode and chat with the team on channel #openstack-oslo (one of the
other cores in oslo usually around).
This message will self-destruct in 5 seconds.
Sincerely,
The Oslo Team
[1] http://docs.openstack.org/infra/manual/core.html
"""
firstname = sys.argv[1]
lastname = sys.argv[2]
tpl_args = {
'firstname': firstname,
'project': sys.argv[3],
'lastname': lastname,
'firstname_title': firstname.title(),
'lastname_title': lastname.title(),
'chosen_how': random.choice(chosen_how),
}
tpl_value = expand_template(new_oslo_core_tpl.lstrip(), tpl_args)
tpl_value = parawrap.fill(tpl_value)
print(tpl_value)

View File

@ -1,18 +0,0 @@
Drop use of 'oslo' namespace package
The Oslo libraries have moved all of their code out of the 'oslo'
namespace package into per-library packages. The namespace package was
retained during kilo for backwards compatibility, but will be removed by
the liberty-2 milestone. This change removes the use of the namespace
package, replacing it with the new package names.
The patches in the libraries will be put on hold until application
patches have landed, or L2, whichever comes first. At that point, new
versions of the libraries without namespace packages will be released as
a major version update.
Please merge this patch, or an equivalent, before L2 to avoid problems
with those library releases.
Blueprint: remove-namespace-packages
https://blueprints.launchpad.net/oslo-incubator/+spec/remove-namespace-packages

View File

@ -1,248 +0,0 @@
#!/bin/bash
set -eu
function usage {
echo "Usage: $0 [OPTION]..."
echo "Run project's test suite(s)"
echo ""
echo " -V, --virtual-env Always use virtualenv. Install automatically if not present."
echo " -N, --no-virtual-env Don't use virtualenv. Run tests in local environment."
echo " -s, --no-site-packages Isolate the virtualenv from the global Python environment."
echo " -r, --recreate-db Recreate the test database (deprecated, as this is now the default)."
echo " -n, --no-recreate-db Don't recreate the test database."
echo " -f, --force Force a clean re-build of the virtual environment."
echo " Useful when dependencies have been added."
echo " -u, --update Update the virtual environment with any newer package versions."
echo " -p, --pep8 Just run PEP8 and HACKING compliance check."
echo " -P, --no-pep8 Don't run static code checks."
echo " -c, --coverage Generate coverage report."
echo " -d, --debug Run tests with testtools instead of testr."
echo " This allows you to use the debugger."
echo " -h, --help Print this usage message."
echo " --hide-elapsed Don't print the elapsed time for each test along with slow test list."
echo " --virtual-env-path <path> Location of the virtualenv directory."
echo " Default: \$(pwd)"
echo " --virtual-env-name <name> Name of the virtualenv directory."
echo " Default: .venv"
echo " --tools-path <dir> Location of the tools directory."
echo " Default: \$(pwd)"
echo ""
echo "Note: with no options specified, the script will try to run the tests in a virtual environment,"
echo " If no virtualenv is found, the script will ask if you would like to create one. If you "
echo " prefer to run tests NOT in a virtual environment, simply pass the -N option."
exit
}
function process_options {
i=1
while [ $i -le $# ]; do
case "${!i}" in
-h|--help) usage;;
-V|--virtual-env) ALWAYS_VENV=1; NEVER_VENV=0;;
-N|--no-virtual-env) ALWAYS_VENV=0; NEVER_VENV=1;;
-s|--no-site-packages) NO_SITE_PACKAGES=1;;
-r|--recreate-db) RECREATE_DB=1;;
-n|--no-recreate-db) RECREATE_DB=0;;
-f|--force) FORCE=1;;
-u|--update) UPDATE=1;;
-p|--pep8) JUST_PEP8=1;;
-P|--no-pep8) NO_PEP8=1;;
-c|--coverage) COVERAGE=1;;
-d|--debug) DEBUG=1;;
--virtual-env-path)
(( i++ ))
VENV_PATH=${!i}
;;
--virtual-env-name)
(( i++ ))
VENV_DIR=${!i}
;;
--tools-path)
(( i++ ))
TOOLS_PATH=${!i}
;;
-*) TESTOPTS="$TESTOPTS ${!i}";;
*) TESTRARGS="$TESTRARGS ${!i}"
esac
(( i++ ))
done
}
TOOLS_PATH=${TOOLS_PATH:-${PWD}}
VENV_PATH=${VENV_PATH:-${PWD}}
VENV_DIR=${VENV_DIR:-.venv}
WITH_VENV=${TOOLS_PATH}/tools/with_venv.sh
ALWAYS_VENV=0
NEVER_VENV=0
FORCE=0
NO_SITE_PACKAGES=1
INSTALLVENVOPTS=
TESTRARGS=
TESTOPTS=
WRAPPER=""
JUST_PEP8=0
NO_PEP8=0
COVERAGE=0
DEBUG=0
RECREATE_DB=1
UPDATE=0
LANG=en_US.UTF-8
LANGUAGE=en_US:en
LC_ALL=C
process_options $@
# Make our paths available to other scripts we call
export VENV_PATH
export TOOLS_PATH
export VENV_DIR
export WITH_VENV
export VENV=${VENV_PATH}/${VENV_DIR}
function run_tests {
# Cleanup *pyc
${WRAPPER} find . -type f -name "*.pyc" -delete
if [ ${DEBUG} -eq 1 ]; then
if [ "${TESTOPTS}" = "" ] && [ "${TESTRARGS}" = "" ]; then
# Default to running all tests if specific test is not
# provided.
TESTRARGS="discover ./${TESTS_DIR}"
fi
${WRAPPER} python -m testtools.run ${TESTOPTS} ${TESTRARGS}
# Short circuit because all of the testr and coverage stuff
# below does not make sense when running testtools.run for
# debugging purposes.
return $?
fi
if [ ${COVERAGE} -eq 1 ]; then
TESTRTESTS="${TESTRTESTS} --coverage"
else
TESTRTESTS="${TESTRTESTS}"
fi
# Just run the test suites in current environment
set +e
TESTRARGS=`echo "${TESTRARGS}" | sed -e's/^\s*\(.*\)\s*$/\1/'`
if [ ${WORKERS_COUNT} -ne 0 ]; then
TESTRTESTS="${TESTRTESTS} --testr-args='--concurrency=${WORKERS_COUNT} --subunit ${TESTOPTS} ${TESTRARGS}'"
else
TESTRTESTS="${TESTRTESTS} --testr-args='--subunit ${TESTOPTS} ${TESTRARGS}'"
fi
if [ setup.cfg -nt ${EGG_INFO_FILE} ]; then
${WRAPPER} python setup.py egg_info
fi
echo "Running \`${WRAPPER} ${TESTRTESTS}\`"
if ${WRAPPER} which subunit-2to1 2>&1 > /dev/null; then
# subunit-2to1 is present, testr subunit stream should be in version 2
# format. Convert to version one before colorizing.
bash -c "${WRAPPER} ${TESTRTESTS} | ${WRAPPER} subunit-2to1 | ${WRAPPER} ${TOOLS_PATH}/tools/colorizer.py"
else
bash -c "${WRAPPER} ${TESTRTESTS} | ${WRAPPER} ${TOOLS_PATH}/tools/colorizer.py"
fi
RESULT=$?
set -e
copy_subunit_log
if [ $COVERAGE -eq 1 ]; then
echo "Generating coverage report in covhtml/"
${WRAPPER} coverage combine
# Don't compute coverage for common code, which is tested elsewhere
# if we are not in `oslo-incubator` project
if [ ${OMIT_OSLO_FROM_COVERAGE} -eq 0 ]; then
OMIT_OSLO=""
else
OMIT_OSLO="--omit='${PROJECT_NAME}/openstack/common/*'"
fi
${WRAPPER} coverage html --include='${PROJECT_NAME}/*' ${OMIT_OSLO} -d covhtml -i
fi
return ${RESULT}
}
function copy_subunit_log {
LOGNAME=`cat .testrepository/next-stream`
LOGNAME=$((${LOGNAME} - 1))
LOGNAME=".testrepository/${LOGNAME}"
cp ${LOGNAME} subunit.log
}
function run_pep8 {
echo "Running flake8 ..."
bash -c "${WRAPPER} flake8"
}
TESTRTESTS="lockutils-wrapper stestr"
if [ ${NO_SITE_PACKAGES} -eq 1 ]; then
INSTALLVENVOPTS="--no-site-packages"
fi
if [ ${NEVER_VENV} -eq 0 ]; then
# Remove the virtual environment if -f or --force used
if [ ${FORCE} -eq 1 ]; then
echo "Cleaning virtualenv..."
rm -rf ${VENV}
fi
# Update the virtual environment if -u or --update used
if [ ${UPDATE} -eq 1 ]; then
echo "Updating virtualenv..."
python ${TOOLS_PATH}/tools/install_venv.py ${INSTALLVENVOPTS}
fi
if [ -e ${VENV} ]; then
WRAPPER="${WITH_VENV}"
else
if [ ${ALWAYS_VENV} -eq 1 ]; then
# Automatically install the virtualenv
python ${TOOLS_PATH}/tools/install_venv.py ${INSTALLVENVOPTS}
WRAPPER="${WITH_VENV}"
else
echo -e "No virtual environment found...create one? (Y/n) \c"
read USE_VENV
if [ "x${USE_VENV}" = "xY" -o "x${USE_VENV}" = "x" -o "x${USE_VENV}" = "xy" ]; then
# Install the virtualenv and run the test suite in it
python ${TOOLS_PATH}/tools/install_venv.py ${INSTALLVENVOPTS}
WRAPPER=${WITH_VENV}
fi
fi
fi
fi
# Delete old coverage data from previous runs
if [ ${COVERAGE} -eq 1 ]; then
${WRAPPER} coverage erase
fi
if [ ${JUST_PEP8} -eq 1 ]; then
run_pep8
exit
fi
if [ ${RECREATE_DB} -eq 1 ]; then
rm -f tests.sqlite
fi
run_tests
# NOTE(sirp): we only want to run pep8 when we're running the full-test suite,
# not when we're running tests individually. To handle this, we need to
# distinguish between options (testropts), which begin with a '-', and
# arguments (testrargs).
if [ -z "${TESTRARGS}" ]; then
if [ ${NO_PEP8} -eq 0 ]; then
run_pep8
fi
fi

View File

@ -1,80 +0,0 @@
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import delorean
import jinja2
import parawrap
def expand_template(contents, params):
if not params:
params = {}
tpl = jinja2.Template(source=contents, undefined=jinja2.StrictUndefined)
return tpl.render(**params)
TPL = """
Hi everyone,
The OpenStack {{ team }} team will be hosting a virtual sprint in
the Freenode IRC channel #{{ channel }} for the {{ for }}
on {{ when }} starting at {{ starts_at }} and going for ~{{ duration }} hours.
The goal of this sprint is to work on any open reviews, documentation or
any other integration questions, development and so-on, so that we can help
progress the {{ for }} forward at a good rate.
Live version of the current documentation is available here:
{{ docs }}
The code itself lives in the openstack/{{ project }} respository.
{{ git_tree }}
Please feel free to join if interested, curious, or able.
Much appreciated,
{{ author }}
"""
# Example:
#
# python tools/virtual_sprint.py "taskflow" "next tuesday" "Joshua Harlow"
if len(sys.argv) != 4:
print("%s project when author" % sys.argv[0])
sys.exit(1)
# Something like 'next tuesday' is expected...
d = delorean.Delorean()
when = getattr(d, sys.argv[2].replace(" ", "_"))
project = sys.argv[1]
author = sys.argv[3]
params = {
'team': 'oslo',
'project': project,
'channel': 'openstack-oslo',
'docs': 'http://docs.openstack.org/developer/%s/' % project,
'when': when().datetime.strftime('%A %m-%d-%Y'),
'starts_at': '16:00 UTC',
'duration': 8,
'author': author,
'git_tree': 'http://git.openstack.org/cgit/openstack/%s/tree' % project,
}
params['for'] = params['project'] + ' ' + 'subproject'
for line in parawrap.wrap(expand_template(TPL.strip(), params)):
print(line)

View File

@ -1,6 +0,0 @@
#!/bin/bash
TOOLS_PATH=${TOOLS_PATH:-$(dirname $0)/../}
VENV_PATH=${VENV_PATH:-${TOOLS_PATH}}
VENV_DIR=${VENV_DIR:-/.venv}
VENV=${VENV:-${VENV_PATH}/${VENV_DIR}}
source ${VENV}/bin/activate && "$@"