Remove obsolete unused utils

- utils/simple_http_daemon.py is not necessary any more;
  there's a CentOS container running apache during
  the build
- utils/reduce-requirements-deb.sh is not necessary because
  there is no requirements-deb.txt any more
- utils/rename.sh has never been used
- utils/add_license.sh is not used
- utils/tftp_probe.py has never been used
- utils/pairwise_testcases.py is not used
- utils/jenkins/review-request.sh and utils/jenkins/review-accept.sh
  are deprecated in favor of using OpenStack gerrit.
- utils/jenkins/fuelweb_fakeui.sh is not necessary; we use puppet
  to deploy fake UI on demo.fuel-infra.org, the same for the following
    -- utils/jenkins/init.d/nailgun
    -- utils/jenkins/nginx/nailgun.conf
    -- utils/jenkins/common.sh
- utils/git-helper is not necessary any more (nobody remembers this)
- utils/jenkins/report-exporter is not necessary (ask @bookwar for details)
- utils/jenkins/nailgun-docs.sh is not used any more

Change-Id: I4ee2301be600b9b950b32aa1bb9df3ad9201aac2
This commit is contained in:
Vladimir Kozhukalov 2015-09-29 11:36:45 +03:00
parent 18cf3a51f7
commit cac1c728e3
25 changed files with 0 additions and 1484 deletions

View File

@ -84,7 +84,6 @@ $(BUILD_DIR)/docker/sources.done: \
$(find-files $(SOURCE_DIR)/docker)
mkdir -p $(BUILD_DIR)/docker/sources $(BUILD_DIR)/docker/utils
find $(SOURCE_DIR)/docker -mindepth 1 -type d -not -name '*fuel-centos-build*' | xargs cp -r --target-directory=$(BUILD_DIR)/docker/sources
cp -r $(SOURCE_DIR)/utils/simple_http_daemon.py $(BUILD_DIR)/docker/utils
$(ACTION.TOUCH)
$(foreach cnt,$(containers),$(eval $(call build_container,$(cnt))))

View File

@ -1,26 +0,0 @@
#!/bin/zsh
copyright='# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'
for i in **/*.(py|rb); do
if ! grep -q Copyright $i; then
cp $i $i.bak
echo "$copyright" > $i.new
cat $i.new $i.bak > $i
rm -f $i.new $i.bak
fi
done

View File

@ -1,33 +0,0 @@
git-helper
==========
git_api.py is wrapper on git subcommands,
the code was inspired by https://github.com/alekseiko/autoMaster,
but significally rewritten.
Usage example:
```python
>>> import git_api
>>> engine = git_api.GitEngine()
Executing command git status in cwd=test-repo
Executing command mkdir test-repo in cwd=.
Executing command git init in cwd=test-repo
>>> engine.fetch()
Executing command git fetch git@github.com:mihgen/test-repo.git +refs/heads/*:refs/remotes/origin/* in cwd=test-repo
>>> commits = engine.diff_commits("remotes/origin/master", "remotes/origin/newbr")
Executing command git log remotes/origin/master..remotes/origin/newbr --pretty=format:%H in cwd=test-repo
>>> commits
['ebe7d216a3ad2268693946d122eff14fb2986051']
>>> engine.checkout_from_remote_branch("remotes/origin/master")
Executing command git branch -D temp-for-engine in cwd=test-repo
ERRRO: Command: 'git branch -D temp-for-engine' Status: 1 err: 'error: branch 'temp-for-engine' not found.' out: ''
Executing command git checkout remotes/origin/master -b temp-for-engine in cwd=test-repo
>>>
>>> for sha in commits:
... engine.cherry_pick(sha)
...
Executing command git cherry-pick ebe7d216a3ad2268693946d122eff14fb2986051 in cwd=test-repo
>>> engine.push("master")
Executing command git push git@github.com:mihgen/test-repo.git temp-for-engine:master in cwd=test-repo
```

View File

@ -1,229 +0,0 @@
#!/usr/bin/env python
import subprocess
import sys
import os
import json
from restkit import Resource, BasicAuth, Connection, request
from socketpool import ConnectionPool
class GithubEngine(object):
def __init__(self, user, token):
self.pool = ConnectionPool(factory=Connection)
self.token = token
self.headers = {'Content-Type': 'application/json',
'Authorization': 'token %s' % self.token}
# We don't use this method, but it can be useful in some cases
def create_token(self, user, password):
serverurl = "https://api.github.com"
auth = BasicAuth(user, password)
authreqdata = {"scopes": ["repo"], "note": "admin script"}
resource = Resource('https://api.github.com/authorizations',
pool=self.pool, filters=[auth])
response = resource.post(headers={"Content-Type": "application/json"},
payload=json.dumps(authreqdata))
self.token = json.loads(response.body_string())['token']
def list_repos(self):
resource = Resource('https://api.github.com/user/repos',
pool=self.pool)
response = resource.get(headers=self.headers)
return json.loads(response.body_string())
def get_pull_request_by_label(self, user, repo, label):
resource = Resource("https://api.github.com/repos/%s/%s/pulls" %
(user, repo))
pulls = json.loads(resource.get(headers=self.headers).body_string())
pulls_by_label = filter(lambda p: p['head']['label'] == label, pulls)
return pulls_by_label # I hope there is no more than one
def update_pull_request(self, user, repo, number, data):
resource = Resource("https://api.github.com/repos/%s/%s/pulls/%s" %
(user, repo, number))
res = resource.post(headers=self.headers,
payload=json.dumps(data))
return json.loads(res.body_string())
def create_pull_request(self, user, repo, to_user, base_branch,
branch, title="", body=""):
if not title:
title = "Robot pull request. Please review."
resource = Resource("https://api.github.com/repos/%s/%s/pulls" %
(to_user, repo))
pulldata = {"title": title, "body": body,
"head": "%s:%s" % (user, branch), "base": base_branch}
response = resource.post(headers=self.headers,
payload=json.dumps(pulldata))
return json.loads(response.body_string())
class GitEngine(object):
def __init__(self, local_repo, repo_url):
self.local_repo = local_repo
self.remote_path = repo_url
self.local_branch = "temp-for-engine"
self.refs_name = "origin"
try:
# Raises exception if can't change dir or can't get git info
self.__exec("git status")
except:
# Let's create repo dir and initialize repo.
self.__exec("mkdir %s" % local_repo, ".")
self.__exec("git init")
def __exec(self, command, cwd=None):
if not cwd:
cwd = self.local_repo
print "Executing command %s in cwd=%s" % (command, cwd)
proc = subprocess.Popen(command, cwd=cwd,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
shell=True)
try:
stdout_value = proc.stdout.read().rstrip()
stderr_value = proc.stderr.read().rstrip()
status = proc.wait()
finally:
proc.stdout.close()
proc.stderr.close()
if status != 0:
print "ERRROR: Command: '%s' Status: %s err: '%s' out: '%s'" % \
(command, status, stderr_value, stdout_value)
raise GitEngineError(status, stderr_value)
return stdout_value
def push(self, remote_branch, remote_path=None, local_branch=None):
if not local_branch:
local_branch = self.local_branch
if not remote_path:
remote_path = self.remote_path
# Check if we can do fast-forward
if not self.is_rebased(local_branch, "remotes/%s/%s" % (
self.refs_name, remote_branch)):
print "ERROR: Not able to push. " \
"Branch %s was not rebased to %s" % \
(local_branch, remote_branch)
raise
command = "git push %s %s:%s" % (remote_path, local_branch,
remote_branch)
try:
self.__exec(command)
except GitEngineError as e:
if e.status == 1:
print "ERROR: Not able to push. " \
"Possible reason: Branch %s was not rebased to %s." % \
(local_branch, remote_branch)
raise
def remove_remote_branch(self, remote_branch, remote_path=None):
if not remote_path:
remote_path = self.remote_path
self.__exec("git push %s :%s" % (remote_path, remote_branch))
def fetch(self, remote_path=None, refs_name="origin"):
if not remote_path:
remote_path = self.remote_path
self.refs_name = refs_name
command = "git fetch -p " + remote_path
# add refs definition
command += " +refs/heads/*:refs/remotes/%s/*" % refs_name
self.__exec(command)
def submodule_init(self):
command = "git submodule init"
self.__exec(command)
def submodule_update(self):
command = "git submodule update"
self.__exec(command)
def wipe_all_submodules_helper(self):
command = "for path in `git submodule status|cut -d' ' -f3`;" \
" do rm -rf $path; done"
self.__exec(command)
def cherry_pick(self, from_sha):
command = "git cherry-pick %s" % from_sha
self.__exec(command)
def merge_fast_forward(self, branch_to_merge):
command = "git merge %s --ff-only" % branch_to_merge
self.__exec(command)
def diff_commits(self, master_branch, slave_branch):
""" return ordered (from older to newer) list of sha's"""
command = "git log %s..%s" % (master_branch, slave_branch)
command += " --pretty=format:%H"
out = self.__exec(command)
# if commits aren't found
if out == "":
return []
# split commit shas to list
commits = [line for line in out.split("\n")]
return commits[::-1]
def checkout_from_remote_branch(self, remote_branch, local_branch=None):
command = "git checkout %s" % remote_branch
if not local_branch:
local_branch = self.local_branch
else:
# Store local_branch, we may need it later
self.local_branch = local_branch
command += " -b " + local_branch
# Make sure we overwrite existing branch
# Detaching HEAD to be able to remove branch we are currently on
self.__exec("git checkout -f %s" % remote_branch)
try:
# Deleting branch
self.__exec("git branch -D %s" % local_branch)
except:
# Exception is raised if there is no branch to delete
pass
self.__exec(command)
def rebase(self, branch):
self.__exec("rm -fr .git/rebase-apply")
self.__exec("git rebase %s" % branch)
def is_rebased(self, source, destination):
if not source:
source = self.local_branch
# Get commits that differ between branches
commits = self.diff_commits(destination, source)
if not commits:
# It means the branch has been rebased and fast-forwarded already
return True
# Check if parent of the first commit is refers to top dest. branch
command = "git rev-parse %s^1" % commits[0]
parent = self.__exec(command)
if parent == "":
raise GitEngineError(0, "Could not determine parent commit")
head_in_dest = self.__exec("git rev-parse %s" % destination)
if parent == head_in_dest:
return True
else:
return False
class GitEngineError(Exception):
def __init__(self, status, error):
self.status = status
self.error = error
def __str__(self):
return repr(self.status + " " + self.error)

View File

@ -1,6 +0,0 @@
; Copy this configuration file to ~/review.conf and edit
; Create token manually on https://github.com/settings/applications
[github]
user: user
token: token

View File

@ -1,148 +0,0 @@
#!/usr/bin/env python
import sys
import os
import argparse
import restkit
import re
import ConfigParser
import git_api
class Review(object):
def __init__(self, params):
self.git = git_api.GitEngine("local_repo", params.repo_url)
self.github = None
p = re.compile('git@github.com:(\S+)\/(\S+)\.git')
self.user, self.repo = p.match(params.repo_url).groups()
self.repo_url = params.repo_url
self.remote_branch = params.remote_branch
if params.origin_repo_url:
self.origin_repo_url = params.origin_repo_url
else:
self.origin_repo_url = params.repo_url
self.origin_branch = params.origin_branch
self.origin_user, self.origin_repo = p.match(
self.origin_repo_url).groups()
config = ConfigParser.ConfigParser()
config.read(os.path.expanduser("~/.review.conf"))
self.github_user = config.get('github', 'user')
self.github_token = config.get('github', 'token')
def rebase(self):
self.git.fetch(refs_name='devel')
self.git.fetch(remote_path=self.origin_repo_url, refs_name='origin')
self.git.checkout_from_remote_branch(
"remotes/devel/%s" % self.remote_branch)
self.git.submodule_init()
# Wipe all submodule's dirs before rebasing.
self.git.wipe_all_submodules_helper()
try:
self.git.rebase("remotes/origin/%s" % self.origin_branch)
except:
raise Exception(
"ERROR: Auto-rebase of %s failed. Try to "
"'git rebase origin/%s' from your local branch "
"and push again" % (self.remote_branch, self.origin_branch))
self.git.submodule_update()
def push(self):
self.git.push(remote_branch=self.origin_branch,
remote_path=self.origin_repo_url)
# Remove remote branch as we don't need it after merge
self.git.remove_remote_branch(remote_branch=self.remote_branch,
remote_path=self.repo_url)
print "Closing pull request.."
self._github_lazy_init()
pull_requests = self.github.get_pull_request_by_label(
self.origin_user, self.origin_repo, "%s:%s" % (
self.user, self.remote_branch))
if pull_requests:
pull_number = pull_requests[0]['number']
print "Found pull request #%s. Closing.." % pull_number
newdata = {'state': 'closed'}
self.github.update_pull_request(self.origin_user, self.origin_repo,
pull_number, newdata)
def add_pull_request(self, title="default title", body="default body"):
self._github_lazy_init()
try:
res = self.github.create_pull_request(
self.user, self.repo, self.origin_user, self.origin_branch,
self.remote_branch, title, body)
pull_number = res['number']
except restkit.errors.RequestFailed as e:
print "Error occured while creating pull request." \
" Possibly it already exists."
pull_requests = self.github.get_pull_request_by_label(
self.origin_user, self.origin_repo, "%s:%s" % (
self.user, self.remote_branch))
pull_number = pull_requests[0]['number']
url = "https://github.com/%s/%s/pull/%s" % (
self.origin_user, self.origin_repo, pull_number)
print "<a href=\"%s\">Pull request #%s</a>" % (url, pull_number)
def _github_lazy_init(self):
if not self.github:
self.github = git_api.GithubEngine(self.github_user,
self.github_token)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Review system")
parser.add_argument('--repo', dest='repo_url', type=str, required=True,
help='URL to repository, format: '
'git@github.com:<user>/<repo>.git')
parser.add_argument('--branch', dest='remote_branch', type=str,
required=True, help='Remote branch')
parser.add_argument('--origin-repo', dest='origin_repo_url', type=str,
required=False, help='URL to repository, format: git'
'@github.com:<user>/<repo>.git')
parser.add_argument('--origin-branch', dest='origin_branch',
default='master', required=False, type=str,
help='Remote branch')
parser.add_argument('-t' '--pull_title', dest='pull_title', type=str,
help='Title for pull request')
parser.add_argument('-b' '--pull_body', dest='pull_body', type=str,
help='Body for pull request')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
'-c', '--check', action='store_true',
help='Check if branch can be rebased. Prepare it for tests.')
group.add_argument(
'-a', '--add', action='store_true',
help='Add pull request from user branch to master')
group.add_argument(
'-p', '--push', action='store_true',
help='Pushes rebased code from user branch to remote master')
params = parser.parse_args()
rvw = Review(params)
# Expected flow:
# 1. --check for attempts to rebase
# 2. ./run_tests against current code (out of this script)
# 3. --add to create pull request on github
# 4. Someone reviews code on github
# 5. Release manager runs --push to rebase user branch and push to master
if params.check:
rvw.rebase()
elif params.add:
rvw.add_pull_request(params.pull_title, params.pull_body)
elif params.push:
rvw.rebase()
rvw.push()

View File

@ -1,37 +0,0 @@
function license_check {
# License information must be in every source file
cd $WORKSPACE/local_repo
tmpfile=`tempfile`
find nailgun astute -not -path "astute/docs/*" -regex ".*\.\(rb\|py\|js\)" -type f -print0 | xargs -0 grep -Li License > $tmpfile
files_with_no_license=`wc -l $tmpfile | awk '{print $1}'`
if [ $files_with_no_license -gt 0 ]; then
echo "ERROR: Found files without license, see files below:"
cat $tmpfile
rm -f $tmpfile
exit 1
fi
rm -f $tmpfile
}
function nailgun_deps {
# Installing nailgun dependencies
if [ -n "${VIRTUAL_ENV}" ]; then
pip install -r $WORKSPACE/requirements-eggs.txt
else
echo "INFO: install dependencies system-wide"
sudo pip install -r $WORKSPACE/requirements-eggs.txt
fi
}
function nailgun_checks {
nailgun_deps
cd $WORKSPACE/local_repo/nailgun
# ***** Running Python unit tests, includes pep8 check of nailgun *****
./run_tests.sh --with-xunit # --no-ui-tests
}
function ruby_checks {
cd $WORKSPACE/local_repo/astute
WORKSPACE=$WORKSPACE/local_repo/astute ./run_tests.sh
}

View File

@ -1 +0,0 @@
Jenkins jobs configuration moved to https://review.fuel-infra.org/fuel-infra/jenkins-jobs.git

View File

@ -1,49 +0,0 @@
#!/bin/bash
. $WORKSPACE/utils/jenkins/common.sh
topdir=$WORKSPACE/utils/jenkins
sudo ln -sf $topdir/init.d/nailgun /etc/init.d/nailgun
sudo WORKSPACE=$WORKSPACE /etc/init.d/nailgun stop
# Installing nailgun dependencies
nailgun_deps
make clean
make $WORKSPACE/build/repos/nailgun.done
cd $WORKSPACE/build/repos/nailgun/nailgun
npm install
# Cleaning database
./manage.py dropdb
# Loading data
./manage.py syncdb
./manage.py loaddefault
./manage.py loaddata nailgun/fixtures/sample_environment.json
# Compressing javascript
./node_modules/.bin/gulp build --static-dir=static_compressed
# Replace static path with the one pointing to compressed static content folder
STATIC_DIR=$WORKSPACE/build/repos/nailgun/nailgun/static_compressed
sed 's|_replace_me_static_compressed_path_|'"$STATIC_DIR"'|' -i $topdir/nginx/nailgun.conf
sed 's|_replace_me_static_path_|'"$WORKSPACE"'/build/repos/nailgun/nailgun/static|' -i $topdir/nginx/nailgun.conf
sudo ln -sf $topdir/nginx/nailgun.conf /etc/nginx/conf.d/nailgun.conf
sed 's|^TEMPLATE_DIR:.*$|TEMPLATE_DIR: '"$STATIC_DIR"'|' -i $WORKSPACE/build/repos/nailgun/nailgun/nailgun/settings.yaml
sed 's|^STATIC_DIR:.*$|STATIC_DIR: '"$STATIC_DIR"'|' -i $WORKSPACE/build/repos/nailgun/nailgun/nailgun/settings.yaml
sed 's|^DEVELOPMENT:.*$|DEVELOPMENT: false|' -i $WORKSPACE/build/repos/nailgun/nailgun/nailgun/settings.yaml
# Show date and commit hash in ui, enable mirantis logo
VERSION_TEXT="`git show -s --format=%ci HEAD` `git rev-parse --verify HEAD`"
sed 's| release:.*$| release: "'"$VERSION_TEXT"'"|' -i $WORKSPACE/build/repos/nailgun/nailgun/nailgun/settings.yaml
if [ "$MIRANTIS" = "yes" ]; then
sed 's| mirantis:.*$| mirantis: "yes"|' -i $WORKSPACE/build/repos/nailgun/nailgun/nailgun/settings.yaml
fi
# Starting fake UI
sudo WORKSPACE=$WORKSPACE /etc/init.d/nailgun start
# Reload updated config file
sudo /etc/init.d/nginx reload

View File

@ -1,87 +0,0 @@
#!/bin/sh
#
# nailgun - this script starts and stops the nailgun daemon
#
# chkconfig: - 84 15
# description: Nailgun service
# processname: nailgun
# pidfile: /var/run/nailgun.pid
# Source function library.
. /etc/rc.d/init.d/functions
prog="nailgun"
sysconfig="/etc/sysconfig/$prog"
pidfile="/var/run/${prog}.pid"
[ -f $sysconfig ] && . $sysconfig
[ -z $WORKSPACE ] && { echo "ERROR: WORKSPACE var is not set. Please set WORKSPACE var pointing to the root of repository"; exit 1; }
start() {
echo -n $"Starting $prog: "
su -l jenkins --shell=/bin/bash -c "cd $WORKSPACE/build/repos/nailgun/nailgun && nohup python manage.py run -p 8001 --fake-tasks 2>&1 | grep --line-buffered -v -e HTTP -e '^$' >> /var/log/nailgun.log &"
retval=$?
echo
return $retval
}
stop() {
echo -n $"Stopping $prog: "
for i in `ps aux|grep 'manage.py run'|grep -v grep|awk '{print $2}'`; do
kill -9 $i
done
retval=$?
echo
return $retval
}
restart() {
stop
start
}
reload() {
echo -n $"Reloading $prog: "
killproc -p $pidfile $prog -HUP
echo
}
rh_status() {
status $prog
}
rh_status_q() {
rh_status >/dev/null 2>&1
}
case "$1" in
start)
$1
;;
stop)
$1
;;
restart|configtest|reopen_logs)
$1
;;
force-reload|upgrade)
rh_status_q || exit 7
upgrade
;;
reload)
rh_status_q || exit 7
$1
;;
status|status_q)
rh_$1
;;
condrestart|try-restart)
rh_status_q || exit 7
restart
;;
*)
echo $"Usage: $0 {start|stop|reload|configtest|status|force-reload|upgrade|restart|reopen_logs}"
exit 2
esac

View File

@ -1,14 +0,0 @@
#!/bin/bash
. $WORKSPACE/utils/jenkins/common.sh
nailgun_deps
make clean
make $WORKSPACE/build/repos/nailgun.done
cd $WORKSPACE/build/repos/nailgun/docs
make clean
make html
rsync -avz -e ssh --delete _build/html/ jenkins@mos-docs.vm.mirantis.net:/var/www/fuel-dev
cd $WORKSPACE

View File

@ -1,66 +0,0 @@
upstream nailgun-application {
server 127.0.0.1:8001;
}
server {
listen 8000;
server_name localhost;
access_log on;
error_log /var/log/nginx/error.log debug;
charset utf-8;
client_max_body_size 10M;
location = /favicon.ico {
log_not_found off;
access_log off;
}
location /static {
autoindex on;
alias _replace_me_static_compressed_path_;
}
location / {
proxy_pass http://nailgun-application;
proxy_read_timeout 2m;
proxy_redirect off;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}
server {
listen 8002;
server_name localhost;
access_log on;
error_log /var/log/nginx/error.log debug;
charset utf-8;
client_max_body_size 10M;
location = /favicon.ico {
log_not_found off;
access_log off;
}
location /static {
autoindex on;
alias _replace_me_static_path_;
}
location / {
proxy_pass http://nailgun-application;
proxy_read_timeout 2m;
proxy_redirect off;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}

View File

@ -1,116 +0,0 @@
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from conf import JENKINS
from jenkins import Jenkins
import json
import logging
import urllib2
logger = logging.getLogger(__package__)
J = Jenkins(JENKINS["url"])
def get_test_data(url):
test_url = "/".join([url.rstrip("/"), 'testReport', 'api/json'])
logger.debug("Request test data from {}".format(test_url))
req = urllib2.Request(test_url)
opener = urllib2.build_opener(urllib2.HTTPHandler)
s = opener.open(req).read()
return json.loads(s)
def get_jobs_for_view(view):
"""Return list of jobs from specified view
"""
view_url = "/".join([JENKINS["url"], 'view', view, 'api/json'])
logger.debug("Request view data from {}".format(view_url))
req = urllib2.Request(view_url)
opener = urllib2.build_opener(urllib2.HTTPHandler)
s = opener.open(req).read()
view_data = json.loads(s)
jobs = [job["name"] for job in view_data["jobs"]]
return jobs
class Build():
def __init__(self, name, number):
"""Get build info via Jenkins API, get test info via direct HTTP
request.
If number is 'latest', get latest completed build.
"""
self.name = name
if number == 'latest':
job_info = J.get_job_info(self.name, depth=1)
self.number = job_info["lastCompletedBuild"]["number"]
else:
self.number = int(number)
self.build_data = J.get_build_info(self.name, self.number, depth=1)
self.url = self.build_data["url"]
def test_data(self):
try:
data = get_test_data(self.url)
except Exception as e:
logger.warning("No test data for {0}: {1}".format(
self.url,
e,
))
# If we failed to get any tests for the build, return
# meta test case 'jenkins' with status 'failed'.
data = {
"suites": [
{
"cases": [
{
"className": "jenkins",
"status": "failed"
}
]
}
]
}
return data
def __str__(self):
string = "\n".join([
"{0}: {1}".format(*item) for item in self.build_record()
])
return string
def build_record(self):
"""Return list of pairs.
We cannot use dictionary, because columns are ordered.
"""
data = [
('number', str(self.number)),
('id', self.build_data["id"]),
('description', self.build_data["description"]),
('url', self.build_data["url"]),
]
test_data = self.test_data()
for suite in test_data['suites']:
for case in suite['cases']:
column_id = case['className'].lower().replace("_", "-")
data.append((column_id, case['status'].lower()))
return data

View File

@ -1,11 +0,0 @@
import os
JENKINS = {
'url': os.environ.get('JENKINS_URL', 'http://localhost/'),
}
GOOGLE = {
'user': os.environ.get('GOOGLE_USER'),
'password': os.environ.get('GOOGLE_PASSWORD'),
'key': os.environ.get('GOOGLE_KEY'),
}

View File

@ -1,119 +0,0 @@
#!/usr/bin/env python
#
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from optparse import OptionParser
from builds import Build
from builds import get_jobs_for_view
from spreadsheet import Document
logger = logging.getLogger(__package__)
logger.addHandler(logging.StreamHandler())
def page_for_build(build_name):
"""Get version-independent lower-case page name.
"""
return build_name[build_name.index("system_test."):].lower()
def report_build(document, name, number='latest'):
"""If number='latest', report last completed build.
"""
page = document.get_page(page_for_build(name))
build = Build(name, number)
if page.build_exists(build.number):
# We have to use 'build.number' and not 'number' variable
# here, because number can be specified as 'latest'.
# build.number is properly resolved in Build.__init__()
logger.debug("Build {0} exists".format(build.number))
return None
page.add_build(build.build_record())
def report_view(document, view):
jobs = get_jobs_for_view(view)
system_test_jobs = [name for name in jobs if '.system_test.' in name]
failures = []
for name in system_test_jobs:
try:
report_build(document, name, 'latest')
except Exception as e:
logger.debug(
"Failed to report {0} with error: {1}".format(name, e)
)
failures.append(name)
# Retry failed
for name in failures:
logger.debug("Retry failed {0}".format(name))
try:
report_build(document, name, 'latest')
except Exception as e:
logger.debug(
"Failed again to report {0}: {1}".format(name, e)
)
else:
failures.remove(name)
logger.debug("Failures: {0}".format(",".join(failures)))
return system_test_jobs, failures
def main():
parser = OptionParser(
description="Publish results of Jenkins build to Google Spreadsheet."
" See conf.py for configuration."
)
parser.add_option('-j', '--job-name', dest='job_name',
help='Jenkins job name')
parser.add_option('-N', '--build-number', dest='build_number',
default='latest',
help='Jenkins build number')
parser.add_option('--view', dest='view',
help='Jenkins view name')
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose", default=False,
help="Enable debug output")
(options, args) = parser.parse_args()
if options.verbose:
logger.setLevel(logging.DEBUG)
d = Document()
if options.job_name:
report_build(d, options.job_name, options.build_number)
return options.job_name
if options.view:
jobs, failures = report_view(d, options.view)
exit(len(failures))
if __name__ == "__main__":
main()

View File

@ -1,2 +0,0 @@
python-jenkins
gdata

View File

@ -1,120 +0,0 @@
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from conf import GOOGLE
from gdata.spreadsheet import text_db
import logging
logger = logging.getLogger(__package__)
class Document():
def __init__(self):
self.gclient = text_db.DatabaseClient(
GOOGLE["user"],
GOOGLE["password"],
)
self.gspreadsheet = self.gclient.GetDatabases(
spreadsheet_key=GOOGLE["key"]
)[0]
def get_page(self, name):
tables = self.gspreadsheet.GetTables(name=name)
# GetTables by name searches by substring in the table name.
# GetTables(name="smth") can return ["smth","smth_else"]
# Thus we run additional check for table.name
tables = [table for table in tables if table.name == name]
if len(tables) == 0:
# Create new worksheet
logger.debug("Create new worksheet {0}".format(name))
wrksh = self.gspreadsheet.client._GetSpreadsheetsClient().AddWorksheet(
title=name,
row_count=1,
col_count=50,
key=self.gspreadsheet.spreadsheet_key,
)
table = text_db.Table(
name=name,
worksheet_entry=wrksh,
database_client=self.gspreadsheet.client,
spreadsheet_key=self.gspreadsheet.spreadsheet_key
)
elif len(tables) == 1:
table = tables[0]
logger.debug("Use worksheet {0}".format(table.name))
else:
logger.error(
"There are {0} tables named {1}".format(
len(tables),
name,
)
)
raise
return Page(table)
class Page():
def __init__(self, table):
self.table = table
self.table.LookupFields()
def build_exists(self, number):
records = self.table.FindRecords(
"number == {0}".format(number)
)
return records
def add_build(self, build_record):
"""Adds build to the table
If there is a row with same build id and build number,
do nothing.
"""
build_number = build_record[0][1]
if self.build_exists(build_number):
logger.debug(
"Build {0} is already there".format(build_number)
)
return None
logger.debug("Create record "
"for build {0}".format(build_number))
self.update_columns(build_record)
self.table.AddRecord(dict(build_record))
logger.info("Created record "
"for build {0}".format(build_number))
def update_columns(self, build_record):
"""Update table columns
If current build has more tests than the previous one
we extend the table by appending more columns.
"""
fields_changed = False
fields = self.table.fields
for key in [key for key, value in build_record if key not in fields]:
fields_changed = True
fields.append(key)
if fields_changed:
logger.debug("New columns: {}".format(fields))
self.table.SetFields(fields)
logger.debug("New columns added")
return fields

View File

@ -1,26 +0,0 @@
[tox]
minversion = 1.6
skipsdist = True
envlist = pep8
[testenv]
usedevelop = True
install_command = pip install --allow-external -U {opts} {packages}
deps = -r{toxinidir}/requirements.txt
commands =
[tox:jenkins]
downloadcache = ~/cache/pip
[testenv:pep8]
deps = hacking==0.7
usedevelop = False
commands =
flake8 {posargs:.}
[flake8]
ignore = H234,H302,H802
exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,tools,__init__.py,docs
show-pep8 = True
show-source = True
count = True

View File

@ -1,17 +0,0 @@
#!/bin/bash
. $WORKSPACE/utils/jenkins/common.sh
$WORKSPACE/utils/git-helper/review.py --master-repo $master_repo --master-branch $master_branch --repo $repo --branch $branch --check
license_check
# pep8 check for tests. If you need more than this, please create function in review-common.sh
[ -d $WORKSPACE/local_repo/fuelweb_test ] && pep8 fuelweb_test
[ -d $WORKSPACE/local_repo/nailgun ] && nailgun_checks
[ -d $WORKSPACE/local_repo/asute ] && ruby_checks
# Push the branch into master
$WORKSPACE/utils/git-helper/review.py --repo $repo --branch $branch -p

View File

@ -1,21 +0,0 @@
#!/bin/bash
. $WORKSPACE/utils/jenkins/common.sh
$WORKSPACE/utils/git-helper/review.py --master-repo $master_repo --master-branch $master_branch --repo $repo --branch $branch --check
# Build checks
[ -z "$pull_title" ] && { echo "ERROR: Specify title for pull request"; exit 1; }
[ -z "$pull_body" ] && { echo "ERROR: Specify body for pull request (how did you test your code??)"; exit 1; }
license_check
# pep8 check for tests. If you need more than this, please create function in review-common.sh
[ -d $WORKSPACE/local_repo/fuelweb_test ] && pep8 fuelweb_test
[ -d $WORKSPACE/local_repo/nailgun ] && nailgun_checks
[ -d $WORKSPACE/local_repo/asute ] && ruby_checks
# Create pull request
$WORKSPACE/utils/git-helper/review.py --repo $repo --branch $branch -t "$pull_title" -b "$pull_body" --add

View File

@ -1,76 +0,0 @@
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import metacomm.combinatorics.all_pairs2
all_pairs = metacomm.combinatorics.all_pairs2.all_pairs2
parameters = [
#("os", ["CentOS", "RedHat", "Ubuntu"]),
("mode", ["simple", "HA"]),
("controller", [1, 3]),
("compute", [1, 2]),
("cinder", [1, 2, 0]),
("ceph", [2, 3, 0]),
("network", ["nova flat", "nova vlan", "neutron GRE", "neutron VLAN"]),
("tagging", ["yes", "no"]),
("storage volume", ["default", "ceph"]),
("storage images", ["default", "ceph"]),
("sahara", ["yes", "no"]),
("murano", ["yes", "no"]),
("ceilometer", ["yes", "no"])
]
def is_valid_combination(values, names):
"""
Should return True if combination is valid and False otherwise.
Dictionary that is passed here can be incomplete.
To prevent search for unnecessary items filtering function
is executed with found subset of data to validate it.
"""
dictionary = dict(zip(names, values))
rules = [
lambda d: "RedHat" == d["os"] and "ceph" == d["storage volume"],
lambda d: "RedHat" == d["os"] and "ceph" == d["storage images"],
lambda d: "RedHat" == d["os"] and "yes" == d["sahara"],
lambda d: "RedHat" == d["os"] and "yes" == d["murano"],
lambda d: "RedHat" == d["os"] and "neutron GRE" == d["network"],
lambda d: "RedHat" == d["os"] and "neutron VLAN" == d["network"],
lambda d: d["cinder"] > 0 and d["storage volume"] == "default",
lambda d: d["ceph"] > 0 and d["storage volume"] == "default"
and d["storage images"] == "default"
]
for rule in rules:
try:
if rule(dictionary):
return False
except KeyError:
pass
return True
pairwise = all_pairs(
[x[1] for x in parameters],
filter_func=lambda values: is_valid_combination(values,
[x[0] for x in parameters])
)
for i, v in enumerate(pairwise):
print "%i:\t%s" % (i, v)

View File

@ -1,57 +0,0 @@
#!/bin/sh
set -e
myname="reduce-requirements-deb"
initial_pkgs="$@"
if [ -z "$initial_pkgs" ]; then
echo "$myname: no initial packages specified"
exit 0
fi
cd "$(dirname $0)/.."
cfg_UBUNTU_RELEASE=`sed -n -e '/^UBUNTU_RELEASE/ { s/^UBUNTU_RELEASE:=//p }' config.mk`
cfg_MIRROR_UBUNTU=`sed -n -e '/^MIRROR_UBUNTU.[=]http/ { s/^MIRROR_UBUNTU.[=]//p }' config.mk`
PRODUCT_VERSION=`sed -n -e '/^PRODUCT_VERSION/ { s/^PRODUCT_VERSION:=//p }' config.mk`
MIRROR_FUEL_UBUNTU="http://osci-obs.vm.mirantis.net:82/ubuntu-fuel-${PRODUCT_VERSION}-stable/reprepro"
if [ -z "$MIRROR_UBUNTU" ]; then
MIRROR_UBUNTU="$cfg_MIRROR_UBUNTU"
fi
if [ -z "$UBUNTU_RELEASE" ]; then
UBUNTU_RELEASE="$cfg_UBUNTU_RELEASE"
fi
rm -rf germinate seeds </dev/null >/dev/null
mkdir germinate seeds
touch seeds/blacklist
touch seeds/supported
cat > seeds/STRUCTURE << EOF
required:
supported:
EOF
for pkg in $initial_pkgs; do
echo " * $pkg"
done > seeds/required
old_pwd="`pwd`"
cd germinate
germinate -v \
-m "$MIRROR_UBUNTU" \
-m "$MIRROR_FUEL_UBUNTU" \
-d $UBUNTU_RELEASE \
-a amd64 \
-c main,universe,multiverse \
-s seeds \
-S "file://${old_pwd}"
cd ..
sed -n -e '/^--------/,/^---------/ { s/^\([a-z][^ \t]\+\).*$/\1/p }' germinate/required.depends > packages-expanded.tmp
sort < packages-expanded.tmp > packages-expanded.txt
sort < requirements-deb.txt > requirements-deb-sorted.txt
comm -23 requirements-deb-sorted.txt packages-expanded.txt > requirements-deb-reduced.txt

View File

@ -1,28 +0,0 @@
#!/bin/bash
usage(){
echo "Usage: `basename $0` <project_root_dir> <old_string> <new_string>"
exit 1
}
if [ $# -ne 3 ]; then
usage
fi
(
cd $1
old=$2
new=$3
grep -r $old * 2>/dev/null | awk -F: '{print $1}' | sort | uniq | while read file; do
echo "sed -i -e \"s/${old}/${new}/g\" $file"
sed -i -e "s/${old}/${new}/g" $file
done
find -name "*${old}*" | sort -r | while read oldfile; do
d=`dirname $oldfile`
f=`basename $oldfile`
newfile=${d}/`echo $f | sed -e "s/${old}/${new}/g"`
echo "mv $oldfile $newfile"
mv $oldfile $newfile
done
)

View File

@ -1,76 +0,0 @@
#!/usr/bin/env python
#
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import os
import time
import daemon
try:
from daemon.pidlockfile import PIDLockFile
except ImportError:
from lockfile.pidlockfile import PIDLockFile
import BaseHTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
class SimpleHTTPDaemon:
def __init__(self, address='0.0.0.0', port='9001',
pid_file='/var/run/simplehttpd.pid', ttl=600):
self.address = address
self.port = port
self.pid_file = pid_file
self.ttl = ttl
self.end = 0
def run_http_server(self):
HandlerClass = SimpleHTTPRequestHandler
ServerClass = BaseHTTPServer.HTTPServer
Protocol = "HTTP/1.0"
server_address = (self.address, self.port)
HandlerClass.protocol_version = Protocol
httpd = ServerClass(server_address, HandlerClass)
while time.time() < self.end:
httpd.handle_request()
def start(self):
self.end = time.time() + self.ttl
context = daemon.DaemonContext(
working_directory=os.getcwd(),
umask=0o002,
pidfile=PIDLockFile(self.pid_file)
)
with context:
self.run_http_server()
if __name__ == "__main__":
if sys.argv[1:]:
port = int(sys.argv[1])
else:
port = 9001
if sys.argv[2:]:
pid = sys.argv[2]
else:
pid = '/var/run/simplehttpd.pid'
if sys.argv[3:]:
timeout = int(sys.argv[3])
else:
timeout = sys.maxint
server = SimpleHTTPDaemon('0.0.0.0', port, pid, timeout)
server.start()

View File

@ -1,118 +0,0 @@
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger()
import os
import sys
import threading
import argparse
import time
import tftpy
tftpy.setLogLevel(logging.WARNING)
class TClient(threading.Thread):
def __init__(self, hostname, port, remote_filename, local_filename=None):
logger.debug("Initializing TClient instance: hostname: '%s' port: %s "
"remote file: '%s' local file: '%s'",
hostname, port, remote_filename, local_filename)
super(TClient, self).__init__()
self.hostname = hostname
self.port = port
self.remote_filename = remote_filename
self.local_filename = local_filename
self.daemon = True
if not self.local_filename:
self.local_filename = os.devnull
def run(self):
self.client = tftpy.TftpClient(self.hostname, self.port)
self.client.download(self.remote_filename, self.local_filename)
def stat(self):
s = self.client.context.metrics
return s.bytes, s.duration, s.kbps
def term_handler(signum, sigframe):
sys.exit()
def parse_arguments():
description = """
This script is for testing TFTP servers.
Example: ./tftp_client.py -a localhost -p 69 -f tftpfile -n 2
"""
parser = argparse.ArgumentParser(epilog=description)
parser.add_argument(
'-a', '--host', dest='host', action='store', type=str,
help='hostname where TFTP server listens', required=True
)
parser.add_argument(
'-p', '--port', dest='port', action='store', type=int,
help='port where TFTP server listens', required=True
)
parser.add_argument(
'-f', '--file', dest='file', action='store', type=str,
help='filename on TFTP server', required=True
)
parser.add_argument(
'-o', '--output', dest='output', action='store', type=str,
help='output path prefix for output files, '
'will be appended with thread number. '
'/dev/null will be used if not set', default=None
)
parser.add_argument(
'-n', '--num_threads', dest='num_threads', action='store', type=int,
help='number TFTP threads', required=True
)
params, other_params = parser.parse_known_args()
return [params, other_params]
if __name__ == "__main__":
params, other_params = parse_arguments()
clients = []
for i in xrange(params.num_threads):
logger.debug("Thread: %s", i)
o = params.output
if params.output:
o = "%s%05d" % (params.output, i)
client = TClient(params.host, params.port, params.file, o)
clients.append(client)
logger.debug("===")
clients[-1].start()
try:
while True:
if not any(map(lambda c: c.isAlive(), clients)):
break
time.sleep(1)
except KeyboardInterrupt:
logger.debug("Interruption signal catched")
sys.exit(0)
except SystemExit:
logger.debug("TERM signal catched")
for i, c in enumerate(clients):
logger.debug("Statistics tftp client thread: %s", i)
logger.info("Bytes: %s, Duration: %s, Speed: %s kbps" % c.stat())