Port in git sha changes from 0.10 line

Stop including git sha in version strings

We include it in pbr.json now. Including it is contentious in the world
of python, and it's up for debate as to whether or not it provides value.

Write and read more complex git sha info

Instead of encoding the git sha into the version string, add it to
a metadata file. This will allow us to get out of the business of
arguing with pip and setuptools about version info. In order to make
this really nice, provide a command line utility called "pbr" that has
subcommands to print out the metadata that we're now including in the
egg-info dir.

Only import sphinx during hook processing

When pbr is imported to handle writing the egg_info file because of
the entry point, it's causing sphinx to get imported. This has a
cascading effect once docutils is trying to be installed on a system
with pbr installed. If some of the imports fail along the way, allow
pbr to continue usefully but without the Sphinx extensions
available. Eventually, when everything is installed, those
extensions will work again when the commands for build_sphinx, etc.
are run separately.

Also slip in a change to reorder the default list of environments
run by tox so the testr database is created using a dbm format
available to all python versions.

Integration test PBR commits

Make sure that if a PBR commit is being tested then we install and
use that source rather than the latest PBR release.

Change-Id: Ie121e795be2eef30822daaa5fe8ab1c2315577ae
(cherry picked from commit 65f4fafd90)
(cherry picked from commit cd7da23937)
Closes-Bug: #1403510
Co-Authored-By: Clark Boylan <clark.boylan@gmail.com>
Co-Authored-By: Doug Hellmann <doug@doughellmann.com>
Co-Authored-By: Jeremy Stanley <fungi@yuggoth.org>
This commit is contained in:
Monty Taylor 2014-12-18 15:09:33 -08:00 committed by Jeremy Stanley
parent 1f5c9f71f9
commit c01b8dae1e
16 changed files with 779 additions and 552 deletions

216
pbr/builddoc.py Normal file
View File

@ -0,0 +1,216 @@
# Copyright 2011 OpenStack LLC.
# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from distutils import log
import os
import sys
try:
import cStringIO
except ImportError:
import io as cStringIO
try:
from sphinx import apidoc
from sphinx import application
from sphinx import config
from sphinx import setup_command
except Exception as e:
# NOTE(dhellmann): During the installation of docutils, setuptools
# tries to import pbr code to find the egg_info.writer hooks. That
# imports this module, which imports sphinx, which imports
# docutils, which is being installed. Because docutils uses 2to3
# to convert its code during installation under python 3, the
# import fails, but it fails with an error other than ImportError
# (today it's a NameError on StandardError, an exception base
# class). Convert the exception type here so it can be caught in
# packaging.py where we try to determine if we can import and use
# sphinx by importing this module. See bug #1403510 for details.
raise ImportError(str(e))
from pbr import git
from pbr import options
_rst_template = """%(heading)s
%(underline)s
.. automodule:: %(module)s
:members:
:undoc-members:
:show-inheritance:
"""
def _find_modules(arg, dirname, files):
for filename in files:
if filename.endswith('.py') and filename != '__init__.py':
arg["%s.%s" % (dirname.replace('/', '.'),
filename[:-3])] = True
class LocalBuildDoc(setup_command.BuildDoc):
command_name = 'build_sphinx'
builders = ['html', 'man']
def _get_source_dir(self):
option_dict = self.distribution.get_option_dict('build_sphinx')
if 'source_dir' in option_dict:
source_dir = os.path.join(option_dict['source_dir'][1], 'api')
else:
source_dir = 'doc/source/api'
if not os.path.exists(source_dir):
os.makedirs(source_dir)
return source_dir
def generate_autoindex(self, excluded_modules=None):
log.info("[pbr] Autodocumenting from %s"
% os.path.abspath(os.curdir))
modules = {}
source_dir = self._get_source_dir()
for pkg in self.distribution.packages:
if '.' not in pkg:
for dirpath, dirnames, files in os.walk(pkg):
_find_modules(modules, dirpath, files)
module_list = set(modules.keys())
if excluded_modules is not None:
module_list -= set(excluded_modules)
module_list = sorted(module_list)
autoindex_filename = os.path.join(source_dir, 'autoindex.rst')
with open(autoindex_filename, 'w') as autoindex:
autoindex.write(""".. toctree::
:maxdepth: 1
""")
for module in module_list:
output_filename = os.path.join(source_dir,
"%s.rst" % module)
heading = "The :mod:`%s` Module" % module
underline = "=" * len(heading)
values = dict(module=module, heading=heading,
underline=underline)
log.info("[pbr] Generating %s"
% output_filename)
with open(output_filename, 'w') as output_file:
output_file.write(_rst_template % values)
autoindex.write(" %s.rst\n" % module)
def _sphinx_tree(self):
source_dir = self._get_source_dir()
cmd = ['apidoc', '.', '-H', 'Modules', '-o', source_dir]
apidoc.main(cmd + self.autodoc_tree_excludes)
def _sphinx_run(self):
if not self.verbose:
status_stream = cStringIO.StringIO()
else:
status_stream = sys.stdout
confoverrides = {}
if self.version:
confoverrides['version'] = self.version
if self.release:
confoverrides['release'] = self.release
if self.today:
confoverrides['today'] = self.today
sphinx_config = config.Config(self.config_dir, 'conf.py', {}, [])
sphinx_config.init_values()
if self.builder == 'man' and len(sphinx_config.man_pages) == 0:
return
app = application.Sphinx(
self.source_dir, self.config_dir,
self.builder_target_dir, self.doctree_dir,
self.builder, confoverrides, status_stream,
freshenv=self.fresh_env, warningiserror=True)
try:
app.build(force_all=self.all_files)
except Exception as err:
from docutils import utils
if isinstance(err, utils.SystemMessage):
sys.stder.write('reST markup error:\n')
sys.stderr.write(err.args[0].encode('ascii',
'backslashreplace'))
sys.stderr.write('\n')
else:
raise
if self.link_index:
src = app.config.master_doc + app.builder.out_suffix
dst = app.builder.get_outfilename('index')
os.symlink(src, dst)
def run(self):
option_dict = self.distribution.get_option_dict('pbr')
if git._git_is_installed():
git.write_git_changelog(option_dict=option_dict)
git.generate_authors(option_dict=option_dict)
tree_index = options.get_boolean_option(option_dict,
'autodoc_tree_index_modules',
'AUTODOC_TREE_INDEX_MODULES')
auto_index = options.get_boolean_option(option_dict,
'autodoc_index_modules',
'AUTODOC_INDEX_MODULES')
if not os.getenv('SPHINX_DEBUG'):
# NOTE(afazekas): These options can be used together,
# but they do a very similar thing in a different way
if tree_index:
self._sphinx_tree()
if auto_index:
self.generate_autoindex(
option_dict.get(
"autodoc_exclude_modules",
[None, ""])[1].split())
for builder in self.builders:
self.builder = builder
self.finalize_options()
self.project = self.distribution.get_name()
self.version = self.distribution.get_version()
self.release = self.distribution.get_version()
if options.get_boolean_option(option_dict,
'warnerrors', 'WARNERRORS'):
self._sphinx_run()
else:
setup_command.BuildDoc.run(self)
def initialize_options(self):
# Not a new style class, super keyword does not work.
setup_command.BuildDoc.initialize_options(self)
# NOTE(dstanek): exclude setup.py from the autodoc tree index
# builds because all projects will have an issue with it
self.autodoc_tree_excludes = ['setup.py']
def finalize_options(self):
# Not a new style class, super keyword does not work.
setup_command.BuildDoc.finalize_options(self)
# Allow builders to be configurable - as a comma separated list.
if not isinstance(self.builders, list) and self.builders:
self.builders = self.builders.split(',')
# NOTE(dstanek): check for autodoc tree exclusion overrides
# in the setup.cfg
opt = 'autodoc_tree_excludes'
option_dict = self.distribution.get_option_dict('pbr')
if opt in option_dict:
self.autodoc_tree_excludes = option_dict[opt][1]
self.ensure_string_list(opt)
class LocalBuildLatex(LocalBuildDoc):
builders = ['latex']
command_name = 'build_sphinx_latex'

0
pbr/cmd/__init__.py Normal file
View File

110
pbr/cmd/main.py Normal file
View File

@ -0,0 +1,110 @@
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import json
import sys
import pkg_resources
import pbr.version
def _get_metadata(package_name):
try:
return json.loads(
pkg_resources.get_distribution(
package_name).get_metadata('pbr.json'))
except pkg_resources.DistributionNotFound:
raise Exception('Package {0} not installed'.format(package_name))
except Exception:
return None
def get_sha(args):
sha = _get_info(args.name)['sha']
if sha:
print(sha)
def get_info(args):
print("{name}\t{version}\t{released}\t{sha}".format(
**_get_info(args.name)))
def _get_info(name):
metadata = _get_metadata(name)
version = pkg_resources.get_distribution(name).version
if metadata:
if metadata['is_release']:
released = 'released'
else:
released = 'pre-release'
sha = metadata['git_version']
else:
version_parts = version.split('.')
if version_parts[-1].startswith('g'):
sha = version_parts[-1][1:]
released = 'pre-release'
else:
sha = ""
released = "released"
for part in version_parts:
if not part.isdigit():
released = "pre-release"
return dict(name=name, version=version, sha=sha, released=released)
def freeze(args):
for dist in pkg_resources.working_set:
info = _get_info(dist.project_name)
output = "{name}=={version}".format(**info)
if info['sha']:
output += " # git sha {sha}".format(**info)
print(output)
def main():
parser = argparse.ArgumentParser(
description='pbr: Python Build Reasonableness')
parser.add_argument(
'-v', '--version', action='version',
version=str(pbr.version.VersionInfo('pbr')))
subparsers = parser.add_subparsers(
title='commands', description='valid commands', help='additional help')
cmd_sha = subparsers.add_parser('sha', help='print sha of package')
cmd_sha.set_defaults(func=get_sha)
cmd_sha.add_argument('name', help='package to print sha of')
cmd_sha = subparsers.add_parser(
'info', help='print version info for package')
cmd_sha.set_defaults(func=get_info)
cmd_sha.add_argument('name', help='package to print info of')
cmd_sha = subparsers.add_parser(
'freeze', help='print version info for all installed packages')
cmd_sha.set_defaults(func=freeze)
args = parser.parse_args()
try:
args.func(args)
except Exception as e:
print(e)
if __name__ == '__main__':
sys.exit(main())

276
pbr/git.py Normal file
View File

@ -0,0 +1,276 @@
# Copyright 2011 OpenStack LLC.
# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
import distutils.errors
from distutils import log
import io
import os
import re
import subprocess
import pkg_resources
from pbr import options
def _run_shell_command(cmd, throw_on_error=False, buffer=True, env=None):
if buffer:
out_location = subprocess.PIPE
err_location = subprocess.PIPE
else:
out_location = None
err_location = None
newenv = os.environ.copy()
if env:
newenv.update(env)
output = subprocess.Popen(cmd,
stdout=out_location,
stderr=err_location,
env=newenv)
out = output.communicate()
if output.returncode and throw_on_error:
raise distutils.errors.DistutilsError(
"%s returned %d" % (cmd, output.returncode))
if len(out) == 0 or not out[0] or not out[0].strip():
return ''
return out[0].strip().decode('utf-8')
def _run_git_command(cmd, git_dir, **kwargs):
if not isinstance(cmd, (list, tuple)):
cmd = [cmd]
return _run_shell_command(
['git', '--git-dir=%s' % git_dir] + cmd, **kwargs)
def _get_git_directory():
return _run_shell_command(['git', 'rev-parse', '--git-dir'])
def _git_is_installed():
try:
# We cannot use 'which git' as it may not be available
# in some distributions, So just try 'git --version'
# to see if we run into trouble
_run_shell_command(['git', '--version'])
except OSError:
return False
return True
def _get_highest_tag(tags):
"""Find the highest tag from a list.
Pass in a list of tag strings and this will return the highest
(latest) as sorted by the pkg_resources version parser.
"""
return max(tags, key=pkg_resources.parse_version)
def _find_git_files(dirname='', git_dir=None):
"""Behave like a file finder entrypoint plugin.
We don't actually use the entrypoints system for this because it runs
at absurd times. We only want to do this when we are building an sdist.
"""
file_list = []
if git_dir is None and _git_is_installed():
git_dir = _get_git_directory()
if git_dir:
log.info("[pbr] In git context, generating filelist from git")
file_list = _run_git_command(['ls-files', '-z'], git_dir)
file_list = file_list.split(b'\x00'.decode('utf-8'))
return [f for f in file_list if f]
def _get_raw_tag_info(git_dir):
describe = _run_git_command(['describe', '--always'], git_dir)
if "-" in describe:
return describe.rsplit("-", 2)[-2]
if "." in describe:
return 0
return None
def get_is_release(git_dir):
return _get_raw_tag_info(git_dir) == 0
def _run_git_functions():
git_dir = _get_git_directory()
if git_dir and _git_is_installed():
return git_dir
return None
def get_git_short_sha(git_dir=None):
"""Return the short sha for this repo, if it exists."""
if not git_dir:
git_dir = _run_git_functions()
if git_dir:
return _run_git_command(
['log', '-n1', '--pretty=format:%h'], git_dir)
return None
def _iter_changelog(changelog):
"""Convert a oneline log iterator to formatted strings.
:param changelog: An iterator of one line log entries like
that given by _iter_log_oneline.
:return: An iterator over (release, formatted changelog) tuples.
"""
first_line = True
current_release = None
yield current_release, "CHANGES\n=======\n\n"
for hash, tags, msg in changelog:
if tags:
current_release = _get_highest_tag(tags)
underline = len(current_release) * '-'
if not first_line:
yield current_release, '\n'
yield current_release, (
"%(tag)s\n%(underline)s\n\n" %
dict(tag=current_release, underline=underline))
if not msg.startswith("Merge "):
if msg.endswith("."):
msg = msg[:-1]
yield current_release, "* %(msg)s\n" % dict(msg=msg)
first_line = False
def _iter_log_oneline(git_dir=None, option_dict=None):
"""Iterate over --oneline log entries if possible.
This parses the output into a structured form but does not apply
presentation logic to the output - making it suitable for different
uses.
:return: An iterator of (hash, tags_set, 1st_line) tuples, or None if
changelog generation is disabled / not available.
"""
if not option_dict:
option_dict = {}
should_skip = options.get_boolean_option(option_dict, 'skip_changelog',
'SKIP_WRITE_GIT_CHANGELOG')
if should_skip:
return
if git_dir is None:
git_dir = _get_git_directory()
if not git_dir:
return
return _iter_log_inner(git_dir)
def _iter_log_inner(git_dir):
"""Iterate over --oneline log entries.
This parses the output intro a structured form but does not apply
presentation logic to the output - making it suitable for different
uses.
:return: An iterator of (hash, tags_set, 1st_line) tuples.
"""
log.info('[pbr] Generating ChangeLog')
log_cmd = ['log', '--oneline', '--decorate']
changelog = _run_git_command(log_cmd, git_dir)
for line in changelog.split('\n'):
line_parts = line.split()
if len(line_parts) < 2:
continue
# Tags are in a list contained in ()'s. If a commit
# subject that is tagged happens to have ()'s in it
# this will fail
if line_parts[1].startswith('(') and ')' in line:
msg = line.split(')')[1].strip()
else:
msg = " ".join(line_parts[1:])
if "tag:" in line:
tags = set([
tag.split(",")[0]
for tag in line.split(")")[0].split("tag: ")[1:]])
else:
tags = set()
yield line_parts[0], tags, msg
def write_git_changelog(git_dir=None, dest_dir=os.path.curdir,
option_dict=dict(), changelog=None):
"""Write a changelog based on the git changelog."""
if not changelog:
changelog = _iter_log_oneline(git_dir=git_dir, option_dict=option_dict)
if changelog:
changelog = _iter_changelog(changelog)
if not changelog:
return
log.info('[pbr] Writing ChangeLog')
new_changelog = os.path.join(dest_dir, 'ChangeLog')
# If there's already a ChangeLog and it's not writable, just use it
if (os.path.exists(new_changelog)
and not os.access(new_changelog, os.W_OK)):
return
with io.open(new_changelog, "w", encoding="utf-8") as changelog_file:
for release, content in changelog:
changelog_file.write(content)
def generate_authors(git_dir=None, dest_dir='.', option_dict=dict()):
"""Create AUTHORS file using git commits."""
should_skip = options.get_boolean_option(option_dict, 'skip_authors',
'SKIP_GENERATE_AUTHORS')
if should_skip:
return
old_authors = os.path.join(dest_dir, 'AUTHORS.in')
new_authors = os.path.join(dest_dir, 'AUTHORS')
# If there's already an AUTHORS file and it's not writable, just use it
if (os.path.exists(new_authors)
and not os.access(new_authors, os.W_OK)):
return
log.info('[pbr] Generating AUTHORS')
ignore_emails = '(jenkins@review|infra@lists|jenkins@openstack)'
if git_dir is None:
git_dir = _get_git_directory()
if git_dir:
authors = []
# don't include jenkins email address in AUTHORS file
git_log_cmd = ['log', '--format=%aN <%aE>']
authors += _run_git_command(git_log_cmd, git_dir).split('\n')
authors = [a for a in authors if not re.search(ignore_emails, a)]
# get all co-authors from commit messages
co_authors_out = _run_git_command('log', git_dir)
co_authors = re.findall('Co-authored-by:.+', co_authors_out,
re.MULTILINE)
co_authors = [signed.split(":", 1)[1].strip()
for signed in co_authors if signed]
authors += co_authors
authors = sorted(set(authors))
with open(new_authors, 'wb') as new_authors_fh:
if os.path.exists(old_authors):
with open(old_authors, "rb") as old_authors_fh:
new_authors_fh.write(old_authors_fh.read())
new_authors_fh.write(('\n'.join(authors) + '\n')
.encode('utf-8'))

View File

@ -18,6 +18,7 @@ import os
from setuptools.command import easy_install
from pbr.hooks import base
from pbr import options
from pbr import packaging
@ -44,8 +45,8 @@ class CommandsConfig(base.BaseConfig):
easy_install.get_script_args = packaging.override_get_script_args
if packaging.have_sphinx():
self.add_command('pbr.packaging.LocalBuildDoc')
self.add_command('pbr.packaging.LocalBuildLatex')
self.add_command('pbr.builddoc.LocalBuildDoc')
self.add_command('pbr.builddoc.LocalBuildLatex')
if os.path.exists('.testr.conf') and packaging.have_testr():
# There is a .testr.conf file. We want to use it.
@ -54,7 +55,7 @@ class CommandsConfig(base.BaseConfig):
# We seem to still have nose configured
self.add_command('pbr.packaging.NoseTest')
use_egg = packaging.get_boolean_option(
use_egg = options.get_boolean_option(
self.pbr_config, 'use-egg', 'PBR_USE_EGG')
# We always want non-egg install unless explicitly requested
if 'manpages' in self.pbr_config or not use_egg:

48
pbr/options.py Normal file
View File

@ -0,0 +1,48 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (C) 2013 Association of Universities for Research in Astronomy
# (AURA)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of AURA and its representatives may not be used to
# endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
import os
TRUE_VALUES = ('true', '1', 'yes')
def get_boolean_option(option_dict, option_name, env_name):
return ((option_name in option_dict
and option_dict[option_name][1].lower() in TRUE_VALUES) or
str(os.getenv(env_name)).lower() in TRUE_VALUES)

View File

@ -21,21 +21,15 @@ Utilities with minimum-depends for use in setup.py
from __future__ import unicode_literals
from distutils.command import install as du_install
import distutils.errors
from distutils import log
import email
import functools
import io
import itertools
import json
import os
import platform
import re
import subprocess
import sys
try:
import cStringIO
except ImportError:
import io as cStringIO
import pkg_resources
from setuptools.command import easy_install
@ -45,9 +39,10 @@ from setuptools.command import install_scripts
from setuptools.command import sdist
from pbr import extra_files
from pbr import git
from pbr import options
from pbr import version
TRUE_VALUES = ('true', '1', 'yes')
REQUIREMENTS_FILES = ('requirements.txt', 'tools/pip-requires')
TEST_REQUIREMENTS_FILES = ('test-requirements.txt', 'tools/test-requires')
@ -92,7 +87,7 @@ def append_text_list(config, key, text_list):
def _pip_install(links, requires, root=None, option_dict=dict()):
if get_boolean_option(
if options.get_boolean_option(
option_dict, 'skip_pip_install', 'SKIP_PIP_INSTALL'):
return
cmd = [sys.executable, '-m', 'pip.__init__', 'install']
@ -103,7 +98,7 @@ def _pip_install(links, requires, root=None, option_dict=dict()):
cmd.append(link)
# NOTE(ociuhandu): popen on Windows does not accept unicode strings
_run_shell_command(
git._run_shell_command(
cmd + requires,
throw_on_error=True, buffer=False, env=dict(PIP_USE_WHEEL=b"true"))
@ -196,247 +191,6 @@ def parse_dependency_links(requirements_files=None):
return dependency_links
def _run_git_command(cmd, git_dir, **kwargs):
if not isinstance(cmd, (list, tuple)):
cmd = [cmd]
return _run_shell_command(
['git', '--git-dir=%s' % git_dir] + cmd, **kwargs)
def _run_shell_command(cmd, throw_on_error=False, buffer=True, env=None):
if buffer:
out_location = subprocess.PIPE
err_location = subprocess.PIPE
else:
out_location = None
err_location = None
newenv = os.environ.copy()
if env:
newenv.update(env)
output = subprocess.Popen(cmd,
stdout=out_location,
stderr=err_location,
env=newenv)
out = output.communicate()
if output.returncode and throw_on_error:
raise distutils.errors.DistutilsError(
"%s returned %d" % (cmd, output.returncode))
if len(out) == 0 or not out[0] or not out[0].strip():
return ''
return out[0].strip().decode('utf-8')
def _get_git_directory():
return _run_shell_command(['git', 'rev-parse', '--git-dir'])
def _git_is_installed():
try:
# We cannot use 'which git' as it may not be available
# in some distributions, So just try 'git --version'
# to see if we run into trouble
_run_shell_command(['git', '--version'])
except OSError:
return False
return True
def _get_highest_tag(tags):
"""Find the highest tag from a list.
Pass in a list of tag strings and this will return the highest
(latest) as sorted by the pkg_resources version parser.
"""
return max(tags, key=pkg_resources.parse_version)
def get_boolean_option(option_dict, option_name, env_name):
return ((option_name in option_dict
and option_dict[option_name][1].lower() in TRUE_VALUES) or
str(os.getenv(env_name)).lower() in TRUE_VALUES)
def _iter_changelog(changelog):
"""Convert a oneline log iterator to formatted strings.
:param changelog: An iterator of one line log entries like
that given by _iter_log_oneline.
:return: An iterator over (release, formatted changelog) tuples.
"""
first_line = True
current_release = None
yield current_release, "CHANGES\n=======\n\n"
for hash, tags, msg in changelog:
if tags:
current_release = _get_highest_tag(tags)
underline = len(current_release) * '-'
if not first_line:
yield current_release, '\n'
yield current_release, (
"%(tag)s\n%(underline)s\n\n" %
dict(tag=current_release, underline=underline))
if not msg.startswith("Merge "):
if msg.endswith("."):
msg = msg[:-1]
yield current_release, "* %(msg)s\n" % dict(msg=msg)
first_line = False
def _iter_log_oneline(git_dir=None, option_dict=None):
"""Iterate over --oneline log entries if possible.
This parses the output into a structured form but does not apply
presentation logic to the output - making it suitable for different
uses.
:return: An iterator of (hash, tags_set, 1st_line) tuples, or None if
changelog generation is disabled / not available.
"""
if not option_dict:
option_dict = {}
should_skip = get_boolean_option(option_dict, 'skip_changelog',
'SKIP_WRITE_GIT_CHANGELOG')
if should_skip:
return
if git_dir is None:
git_dir = _get_git_directory()
if not git_dir:
return
return _iter_log_inner(git_dir)
def _iter_log_inner(git_dir):
"""Iterate over --oneline log entries.
This parses the output intro a structured form but does not apply
presentation logic to the output - making it suitable for different
uses.
:return: An iterator of (hash, tags_set, 1st_line) tuples.
"""
log.info('[pbr] Generating ChangeLog')
log_cmd = ['log', '--oneline', '--decorate']
changelog = _run_git_command(log_cmd, git_dir)
for line in changelog.split('\n'):
line_parts = line.split()
if len(line_parts) < 2:
continue
# Tags are in a list contained in ()'s. If a commit
# subject that is tagged happens to have ()'s in it
# this will fail
if line_parts[1].startswith('(') and ')' in line:
msg = line.split(')')[1].strip()
else:
msg = " ".join(line_parts[1:])
if "tag:" in line:
tags = set([
tag.split(",")[0]
for tag in line.split(")")[0].split("tag: ")[1:]])
else:
tags = set()
yield line_parts[0], tags, msg
def write_git_changelog(git_dir=None, dest_dir=os.path.curdir,
option_dict=dict(), changelog=None):
"""Write a changelog based on the git changelog."""
if not changelog:
changelog = _iter_log_oneline(git_dir=git_dir, option_dict=option_dict)
if changelog:
changelog = _iter_changelog(changelog)
if not changelog:
return
log.info('[pbr] Writing ChangeLog')
new_changelog = os.path.join(dest_dir, 'ChangeLog')
# If there's already a ChangeLog and it's not writable, just use it
if (os.path.exists(new_changelog)
and not os.access(new_changelog, os.W_OK)):
return
with io.open(new_changelog, "w", encoding="utf-8") as changelog_file:
for release, content in changelog:
changelog_file.write(content)
def generate_authors(git_dir=None, dest_dir='.', option_dict=dict()):
"""Create AUTHORS file using git commits."""
should_skip = get_boolean_option(option_dict, 'skip_authors',
'SKIP_GENERATE_AUTHORS')
if should_skip:
return
old_authors = os.path.join(dest_dir, 'AUTHORS.in')
new_authors = os.path.join(dest_dir, 'AUTHORS')
# If there's already an AUTHORS file and it's not writable, just use it
if (os.path.exists(new_authors)
and not os.access(new_authors, os.W_OK)):
return
log.info('[pbr] Generating AUTHORS')
ignore_emails = '(jenkins@review|infra@lists|jenkins@openstack)'
if git_dir is None:
git_dir = _get_git_directory()
if git_dir:
authors = []
# don't include jenkins email address in AUTHORS file
git_log_cmd = ['log', '--format=%aN <%aE>']
authors += _run_git_command(git_log_cmd, git_dir).split('\n')
authors = [a for a in authors if not re.search(ignore_emails, a)]
# get all co-authors from commit messages
co_authors_out = _run_git_command('log', git_dir)
co_authors = re.findall('Co-authored-by:.+', co_authors_out,
re.MULTILINE)
co_authors = [signed.split(":", 1)[1].strip()
for signed in co_authors if signed]
authors += co_authors
authors = sorted(set(authors))
with open(new_authors, 'wb') as new_authors_fh:
if os.path.exists(old_authors):
with open(old_authors, "rb") as old_authors_fh:
new_authors_fh.write(old_authors_fh.read())
new_authors_fh.write(('\n'.join(authors) + '\n')
.encode('utf-8'))
def _find_git_files(dirname='', git_dir=None):
"""Behave like a file finder entrypoint plugin.
We don't actually use the entrypoints system for this because it runs
at absurd times. We only want to do this when we are building an sdist.
"""
file_list = []
if git_dir is None and _git_is_installed():
git_dir = _get_git_directory()
if git_dir:
log.info("[pbr] In git context, generating filelist from git")
file_list = _run_git_command(['ls-files', '-z'], git_dir)
file_list = file_list.split(b'\x00'.decode('utf-8'))
return [f for f in file_list if f]
_rst_template = """%(heading)s
%(underline)s
.. automodule:: %(module)s
:members:
:undoc-members:
:show-inheritance:
"""
def _find_modules(arg, dirname, files):
for filename in files:
if filename.endswith('.py') and filename != '__init__.py':
arg["%s.%s" % (dirname.replace('/', '.'),
filename[:-3])] = True
class LocalInstall(install.install):
"""Runs python setup.py install in a sensible manner.
@ -643,10 +397,10 @@ class LocalManifestMaker(egg_info.manifest_maker):
self.filelist.append(self.template)
self.filelist.append(self.manifest)
self.filelist.extend(extra_files.get_extra_files())
should_skip = get_boolean_option(option_dict, 'skip_git_sdist',
'SKIP_GIT_SDIST')
should_skip = options.get_boolean_option(option_dict, 'skip_git_sdist',
'SKIP_GIT_SDIST')
if not should_skip:
rcfiles = _find_git_files()
rcfiles = git._find_git_files()
if rcfiles:
self.filelist.extend(rcfiles)
elif os.path.exists(self.manifest):
@ -689,176 +443,25 @@ class LocalSDist(sdist.sdist):
def run(self):
option_dict = self.distribution.get_option_dict('pbr')
changelog = _iter_log_oneline(option_dict=option_dict)
changelog = git._iter_log_oneline(option_dict=option_dict)
if changelog:
changelog = _iter_changelog(changelog)
write_git_changelog(option_dict=option_dict, changelog=changelog)
generate_authors(option_dict=option_dict)
changelog = git._iter_changelog(changelog)
git.write_git_changelog(option_dict=option_dict, changelog=changelog)
git.generate_authors(option_dict=option_dict)
# sdist.sdist is an old style class, can't use super()
sdist.sdist.run(self)
try:
from sphinx import apidoc
from sphinx import application
from sphinx import config
from sphinx import setup_command
class LocalBuildDoc(setup_command.BuildDoc):
command_name = 'build_sphinx'
builders = ['html', 'man']
def _get_source_dir(self):
option_dict = self.distribution.get_option_dict('build_sphinx')
if 'source_dir' in option_dict:
source_dir = os.path.join(option_dict['source_dir'][1], 'api')
else:
source_dir = 'doc/source/api'
if not os.path.exists(source_dir):
os.makedirs(source_dir)
return source_dir
def generate_autoindex(self, excluded_modules=None):
log.info("[pbr] Autodocumenting from %s"
% os.path.abspath(os.curdir))
modules = {}
source_dir = self._get_source_dir()
for pkg in self.distribution.packages:
if '.' not in pkg:
for dirpath, dirnames, files in os.walk(pkg):
_find_modules(modules, dirpath, files)
module_list = set(modules.keys())
if excluded_modules is not None:
module_list -= set(excluded_modules)
module_list = sorted(module_list)
autoindex_filename = os.path.join(source_dir, 'autoindex.rst')
with open(autoindex_filename, 'w') as autoindex:
autoindex.write(""".. toctree::
:maxdepth: 1
""")
for module in module_list:
output_filename = os.path.join(source_dir,
"%s.rst" % module)
heading = "The :mod:`%s` Module" % module
underline = "=" * len(heading)
values = dict(module=module, heading=heading,
underline=underline)
log.info("[pbr] Generating %s"
% output_filename)
with open(output_filename, 'w') as output_file:
output_file.write(_rst_template % values)
autoindex.write(" %s.rst\n" % module)
def _sphinx_tree(self):
source_dir = self._get_source_dir()
cmd = ['apidoc', '.', '-H', 'Modules', '-o', source_dir]
apidoc.main(cmd + self.autodoc_tree_excludes)
def _sphinx_run(self):
if not self.verbose:
status_stream = cStringIO.StringIO()
else:
status_stream = sys.stdout
confoverrides = {}
if self.version:
confoverrides['version'] = self.version
if self.release:
confoverrides['release'] = self.release
if self.today:
confoverrides['today'] = self.today
sphinx_config = config.Config(self.config_dir, 'conf.py', {}, [])
sphinx_config.init_values()
if self.builder == 'man' and len(sphinx_config.man_pages) == 0:
return
app = application.Sphinx(
self.source_dir, self.config_dir,
self.builder_target_dir, self.doctree_dir,
self.builder, confoverrides, status_stream,
freshenv=self.fresh_env, warningiserror=True)
try:
app.build(force_all=self.all_files)
except Exception as err:
from docutils import utils
if isinstance(err, utils.SystemMessage):
sys.stder.write('reST markup error:\n')
sys.stderr.write(err.args[0].encode('ascii',
'backslashreplace'))
sys.stderr.write('\n')
else:
raise
if self.link_index:
src = app.config.master_doc + app.builder.out_suffix
dst = app.builder.get_outfilename('index')
os.symlink(src, dst)
def run(self):
option_dict = self.distribution.get_option_dict('pbr')
if _git_is_installed():
write_git_changelog(option_dict=option_dict)
generate_authors(option_dict=option_dict)
tree_index = get_boolean_option(option_dict,
'autodoc_tree_index_modules',
'AUTODOC_TREE_INDEX_MODULES')
auto_index = get_boolean_option(option_dict,
'autodoc_index_modules',
'AUTODOC_INDEX_MODULES')
if not os.getenv('SPHINX_DEBUG'):
# NOTE(afazekas): These options can be used together,
# but they do a very similar thing in a different way
if tree_index:
self._sphinx_tree()
if auto_index:
self.generate_autoindex(
option_dict.get(
"autodoc_exclude_modules",
[None, ""])[1].split())
for builder in self.builders:
self.builder = builder
self.finalize_options()
self.project = self.distribution.get_name()
self.version = self.distribution.get_version()
self.release = self.distribution.get_version()
if get_boolean_option(option_dict, 'warnerrors', 'WARNERRORS'):
self._sphinx_run()
else:
setup_command.BuildDoc.run(self)
def initialize_options(self):
# Not a new style class, super keyword does not work.
setup_command.BuildDoc.initialize_options(self)
# NOTE(dstanek): exclude setup.py from the autodoc tree index
# builds because all projects will have an issue with it
self.autodoc_tree_excludes = ['setup.py']
def finalize_options(self):
# Not a new style class, super keyword does not work.
setup_command.BuildDoc.finalize_options(self)
# Allow builders to be configurable - as a comma separated list.
if not isinstance(self.builders, list) and self.builders:
self.builders = self.builders.split(',')
# NOTE(dstanek): check for autodoc tree exclusion overrides
# in the setup.cfg
opt = 'autodoc_tree_excludes'
option_dict = self.distribution.get_option_dict('pbr')
if opt in option_dict:
self.autodoc_tree_excludes = option_dict[opt][1]
self.ensure_string_list(opt)
class LocalBuildLatex(LocalBuildDoc):
builders = ['latex']
command_name = 'build_sphinx_latex'
from pbr import builddoc
_have_sphinx = True
# Import the symbols from their new home so the package API stays
# compatible.
LocalBuildDoc = builddoc.LocalBuildDoc
LocalBuildLatex = builddoc.LocalBuildLatex
except ImportError:
_have_sphinx = False
LocalBuildDoc = None
LocalBuildLatex = None
def have_sphinx():
@ -878,7 +481,7 @@ def _get_increment_kwargs(git_dir, tag):
version_spec = tag + "..HEAD"
else:
version_spec = "HEAD"
changelog = _run_git_command(['log', version_spec], git_dir)
changelog = git._run_git_command(['log', version_spec], git_dir)
header_len = len(' sem-ver:')
commands = [line[header_len:].strip() for line in changelog.split('\n')
if line.lower().startswith(' sem-ver:')]
@ -902,6 +505,19 @@ def _get_increment_kwargs(git_dir, tag):
return result
def write_pbr_json(cmd, basename, filename):
git_dir = git._run_git_functions()
if not git_dir:
return
values = dict()
git_version = git.get_git_short_sha(git_dir)
is_release = git.get_is_release(git_dir)
if git_version is not None:
values['git_version'] = git_version
values['is_release'] = is_release
cmd.write_file('pbr', filename, json.dumps(values))
def _get_revno_and_last_tag(git_dir):
"""Return the commit data about the most recent tag.
@ -909,7 +525,7 @@ def _get_revno_and_last_tag(git_dir):
tags then we fall back to counting commits since the beginning
of time.
"""
changelog = _iter_log_oneline(git_dir=git_dir)
changelog = git._iter_log_oneline(git_dir=git_dir)
row_count = 0
for row_count, (ignored, tag_set, ignored) in enumerate(changelog):
version_tags = set()
@ -938,8 +554,6 @@ def _get_version_from_git_target(git_dir, target_version):
exception is raised.
:return: A semver version object.
"""
sha = _run_git_command(
['log', '-n1', '--pretty=format:%h'], git_dir)
tag, distance = _get_revno_and_last_tag(git_dir)
last_semver = version.SemanticVersion.from_pip_string(tag or '0')
if distance == 0:
@ -955,9 +569,9 @@ def _get_version_from_git_target(git_dir, target_version):
if distance == 0:
return last_semver
if target_version is not None:
return target_version.to_dev(distance, sha)
return target_version.to_dev(distance)
else:
return new_version.to_dev(distance, sha)
return new_version.to_dev(distance)
def _get_version_from_git(pre_version=None):
@ -972,10 +586,10 @@ def _get_version_from_git(pre_version=None):
:param pre_version: If supplied use this as the target version rather than
inferring one from the last tag + commit messages.
"""
git_dir = _get_git_directory()
if git_dir and _git_is_installed():
git_dir = git._get_git_directory()
if git_dir and git._git_is_installed():
try:
tagged = _run_git_command(
tagged = git._run_git_command(
['describe', '--exact-match'], git_dir,
throw_on_error=True).replace('-', '.')
target_version = version.SemanticVersion.from_pip_string(tagged)

View File

@ -48,7 +48,7 @@ import fixtures
import testresources
import testtools
from pbr import packaging
from pbr import options
class DiveDir(fixtures.Fixture):
@ -81,10 +81,10 @@ class BaseTestCase(testtools.TestCase, testresources.ResourcedTestCase):
if test_timeout > 0:
self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
if os.environ.get('OS_STDOUT_CAPTURE') in packaging.TRUE_VALUES:
if os.environ.get('OS_STDOUT_CAPTURE') in options.TRUE_VALUES:
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if os.environ.get('OS_STDERR_CAPTURE') in packaging.TRUE_VALUES:
if os.environ.get('OS_STDERR_CAPTURE') in options.TRUE_VALUES:
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.log_fixture = self.useFixture(

View File

@ -47,6 +47,7 @@ import mock
import testscenarios
from testtools import matchers
from pbr import git
from pbr import packaging
from pbr.tests import base
@ -189,16 +190,16 @@ class TestPackagingInPlainDirectory(base.BaseTestCase):
class TestPresenceOfGit(base.BaseTestCase):
def testGitIsInstalled(self):
with mock.patch.object(packaging,
with mock.patch.object(git,
'_run_shell_command') as _command:
_command.return_value = 'git version 1.8.4.1'
self.assertEqual(True, packaging._git_is_installed())
self.assertEqual(True, git._git_is_installed())
def testGitIsNotInstalled(self):
with mock.patch.object(packaging,
with mock.patch.object(git,
'_run_shell_command') as _command:
_command.side_effect = OSError
self.assertEqual(False, packaging._git_is_installed())
self.assertEqual(False, git._git_is_installed())
class TestNestedRequirements(base.BaseTestCase):
@ -259,14 +260,14 @@ class TestVersions(base.BaseTestCase):
self.repo.tag('1.2.3')
self.repo.commit('Sem-Ver: api-break')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('2.0.0.dev1.g'))
self.assertThat(version, matchers.StartsWith('2.0.0.dev1'))
def test_capitalized_headers_partial(self):
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit('Sem-ver: api-break')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('2.0.0.dev1.g'))
self.assertThat(version, matchers.StartsWith('2.0.0.dev1'))
def test_tagged_version_has_tag_version(self):
self.repo.commit()
@ -279,28 +280,28 @@ class TestVersions(base.BaseTestCase):
self.repo.tag('1.2.3')
self.repo.commit()
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.2.4.dev1.g'))
self.assertThat(version, matchers.StartsWith('1.2.4.dev1'))
def test_untagged_version_minor_bump(self):
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit('sem-ver: deprecation')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.3.0.dev1.g'))
self.assertThat(version, matchers.StartsWith('1.3.0.dev1'))
def test_untagged_version_major_bump(self):
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit('sem-ver: api-break')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('2.0.0.dev1.g'))
self.assertThat(version, matchers.StartsWith('2.0.0.dev1'))
def test_untagged_version_has_dev_version_preversion(self):
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit()
version = packaging._get_version_from_git('1.2.5')
self.assertThat(version, matchers.StartsWith('1.2.5.dev1.g'))
self.assertThat(version, matchers.StartsWith('1.2.5.dev1'))
def test_preversion_too_low_simple(self):
# That is, the target version is either already released or not high
@ -359,32 +360,32 @@ class TestVersions(base.BaseTestCase):
# when the tree is tagged and its wrong:
self.repo.tag('badver')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.0.1.dev1.g'))
self.assertThat(version, matchers.StartsWith('1.0.1.dev1'))
# When the tree isn't tagged, we also fall through.
self.repo.commit()
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.0.1.dev2.g'))
self.assertThat(version, matchers.StartsWith('1.0.1.dev2'))
# We don't fall through x.y versions
self.repo.commit()
self.repo.tag('1.2')
self.repo.commit()
self.repo.tag('badver2')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.2.1.dev1.g'))
self.assertThat(version, matchers.StartsWith('1.2.1.dev1'))
# Or x.y.z versions
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit()
self.repo.tag('badver3')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.2.4.dev1.g'))
self.assertThat(version, matchers.StartsWith('1.2.4.dev1'))
# Or alpha/beta/pre versions
self.repo.commit()
self.repo.tag('1.2.4.0a1')
self.repo.commit()
self.repo.tag('badver4')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.2.4.dev1.g'))
self.assertThat(version, matchers.StartsWith('1.2.4.dev1'))
def test_valid_tag_honoured(self):
# Fix for bug 1370608 - we converted any target into a 'dev version'

View File

@ -30,6 +30,8 @@ except ImportError:
import fixtures
import testscenarios
from pbr import git
from pbr import options
from pbr import packaging
from pbr.tests import base
@ -40,35 +42,35 @@ class SkipFileWrites(base.BaseTestCase):
('changelog_option_true',
dict(option_key='skip_changelog', option_value='True',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None,
pkg_func=packaging.write_git_changelog, filename='ChangeLog')),
pkg_func=git.write_git_changelog, filename='ChangeLog')),
('changelog_option_false',
dict(option_key='skip_changelog', option_value='False',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None,
pkg_func=packaging.write_git_changelog, filename='ChangeLog')),
pkg_func=git.write_git_changelog, filename='ChangeLog')),
('changelog_env_true',
dict(option_key='skip_changelog', option_value='False',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True',
pkg_func=packaging.write_git_changelog, filename='ChangeLog')),
pkg_func=git.write_git_changelog, filename='ChangeLog')),
('changelog_both_true',
dict(option_key='skip_changelog', option_value='True',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True',
pkg_func=packaging.write_git_changelog, filename='ChangeLog')),
pkg_func=git.write_git_changelog, filename='ChangeLog')),
('authors_option_true',
dict(option_key='skip_authors', option_value='True',
env_key='SKIP_GENERATE_AUTHORS', env_value=None,
pkg_func=packaging.generate_authors, filename='AUTHORS')),
pkg_func=git.generate_authors, filename='AUTHORS')),
('authors_option_false',
dict(option_key='skip_authors', option_value='False',
env_key='SKIP_GENERATE_AUTHORS', env_value=None,
pkg_func=packaging.generate_authors, filename='AUTHORS')),
pkg_func=git.generate_authors, filename='AUTHORS')),
('authors_env_true',
dict(option_key='skip_authors', option_value='False',
env_key='SKIP_GENERATE_AUTHORS', env_value='True',
pkg_func=packaging.generate_authors, filename='AUTHORS')),
pkg_func=git.generate_authors, filename='AUTHORS')),
('authors_both_true',
dict(option_key='skip_authors', option_value='True',
env_key='SKIP_GENERATE_AUTHORS', env_value='True',
pkg_func=packaging.generate_authors, filename='AUTHORS')),
pkg_func=git.generate_authors, filename='AUTHORS')),
]
def setUp(self):
@ -94,7 +96,7 @@ class SkipFileWrites(base.BaseTestCase):
option_dict=self.option_dict)
self.assertEqual(
not os.path.exists(self.filename),
(self.option_value.lower() in packaging.TRUE_VALUES
(self.option_value.lower() in options.TRUE_VALUES
or self.env_value is not None))
_changelog_content = """04316fe (review/monty_taylor/27519) Make python
@ -127,8 +129,8 @@ class GitLogsTest(base.BaseTestCase):
"stdout": BytesIO(_changelog_content.encode('utf-8'))
}))
packaging.write_git_changelog(git_dir=self.git_dir,
dest_dir=self.temp_path)
git.write_git_changelog(git_dir=self.git_dir,
dest_dir=self.temp_path)
with open(os.path.join(self.temp_path, "ChangeLog"), "r") as ch_fh:
changelog_contents = ch_fh.read()
@ -169,14 +171,14 @@ class GitLogsTest(base.BaseTestCase):
return cmd_map[" ".join(cmd)]
self.useFixture(fixtures.MonkeyPatch(
"pbr.packaging._run_shell_command",
"pbr.git._run_shell_command",
_fake_run_shell_command))
with open(os.path.join(self.temp_path, "AUTHORS.in"), "w") as auth_fh:
auth_fh.write("%s\n" % author_old)
packaging.generate_authors(git_dir=self.git_dir,
dest_dir=self.temp_path)
git.generate_authors(git_dir=self.git_dir,
dest_dir=self.temp_path)
with open(os.path.join(self.temp_path, "AUTHORS"), "r") as auth_fh:
authors = auth_fh.read()

View File

@ -36,10 +36,9 @@ class TestSemanticVersion(base.BaseTestCase):
pre_base2 = version.SemanticVersion(1, 2, 3, 'a', 4)
pre_type = version.SemanticVersion(1, 2, 3, 'b', 4)
pre_serial = version.SemanticVersion(1, 2, 3, 'a', 5)
dev_base = version.SemanticVersion(1, 2, 3, dev_count=6, githash='6')
dev_base2 = version.SemanticVersion(1, 2, 3, dev_count=6, githash='6')
dev_count = version.SemanticVersion(1, 2, 3, dev_count=7, githash='6')
githash = version.SemanticVersion(1, 2, 3, dev_count=6, githash='7')
dev_base = version.SemanticVersion(1, 2, 3, dev_count=6)
dev_base2 = version.SemanticVersion(1, 2, 3, dev_count=6)
dev_count = version.SemanticVersion(1, 2, 3, dev_count=7)
self.assertEqual(base, base2)
self.assertNotEqual(base, major)
self.assertNotEqual(base, minor)
@ -47,15 +46,12 @@ class TestSemanticVersion(base.BaseTestCase):
self.assertNotEqual(base, pre_type)
self.assertNotEqual(base, pre_serial)
self.assertNotEqual(base, dev_count)
self.assertNotEqual(base, githash)
self.assertEqual(pre_base, pre_base2)
self.assertNotEqual(pre_base, pre_type)
self.assertNotEqual(pre_base, pre_serial)
self.assertNotEqual(pre_base, dev_count)
self.assertNotEqual(pre_base, githash)
self.assertEqual(dev_base, dev_base2)
self.assertNotEqual(dev_base, dev_count)
self.assertNotEqual(dev_base, githash)
simple = version.SemanticVersion(1)
explicit_minor = version.SemanticVersion(1, 0)
explicit_patch = version.SemanticVersion(1, 0, 0)
@ -72,9 +68,8 @@ class TestSemanticVersion(base.BaseTestCase):
pre_beta = version.SemanticVersion(1, 2, 3, 'b', 3)
pre_rc = version.SemanticVersion(1, 2, 3, 'rc', 2)
pre_serial = version.SemanticVersion(1, 2, 3, 'a', 5)
dev_base = version.SemanticVersion(1, 2, 3, dev_count=6, githash='6')
dev_count = version.SemanticVersion(1, 2, 3, dev_count=7, githash='6')
githash = version.SemanticVersion(1, 2, 3, dev_count=6, githash='7')
dev_base = version.SemanticVersion(1, 2, 3, dev_count=6)
dev_count = version.SemanticVersion(1, 2, 3, dev_count=7)
self.assertThat(base, matchers.LessThan(major))
self.assertThat(major, matchers.GreaterThan(base))
self.assertThat(base, matchers.LessThan(minor))
@ -97,7 +92,6 @@ class TestSemanticVersion(base.BaseTestCase):
self.assertRaises(TypeError, operator.lt, dev_base, pre_alpha)
self.assertThat(dev_base, matchers.LessThan(dev_count))
self.assertThat(dev_count, matchers.GreaterThan(dev_base))
self.assertRaises(TypeError, operator.lt, dev_base, githash)
def test_from_pip_string_legacy_alpha(self):
expected = version.SemanticVersion(
@ -136,37 +130,6 @@ class TestSemanticVersion(base.BaseTestCase):
parsed = from_pip_string('2014.2.b2')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_dev(self):
expected = version.SemanticVersion(
0, 10, 1, dev_count=3, githash='83bef74')
parsed = from_pip_string('0.10.1.3.g83bef74')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_corner_case_dev(self):
# If the last tag is missing, or if the last tag has less than 3
# components, we need to 0 extend on parsing.
expected = version.SemanticVersion(
0, 0, 0, dev_count=1, githash='83bef74')
parsed = from_pip_string('0.0.g83bef74')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_short_dev(self):
# If the last tag is missing, or if the last tag has less than 3
# components, we need to 0 extend on parsing.
expected = version.SemanticVersion(
0, 0, 0, dev_count=1, githash='83bef74')
parsed = from_pip_string('0.g83bef74')
self.assertEqual(expected, parsed)
def test_from_pip_string_dev_missing_patch_version(self):
expected = version.SemanticVersion(
2014, 2, dev_count=21, githash='c4c8d0b')
parsed = from_pip_string('2014.2.dev21.gc4c8d0b')
self.assertEqual(expected, parsed)
def test_from_pip_string_pure_git_hash(self):
self.assertRaises(ValueError, from_pip_string, '6eed5ae')
def test_final_version(self):
semver = version.SemanticVersion(1, 2, 3)
self.assertEqual((1, 2, 3, 'final', 0), semver.version_tuple())
@ -183,13 +146,13 @@ class TestSemanticVersion(base.BaseTestCase):
self.assertEqual(semver, from_pip_string("1.0.0"))
def test_dev_version(self):
semver = version.SemanticVersion(1, 2, 4, dev_count=5, githash='12')
semver = version.SemanticVersion(1, 2, 4, dev_count=5)
self.assertEqual((1, 2, 4, 'dev', 4), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~dev5+g12", semver.debian_string())
self.assertEqual("1.2.4.dev5.g12", semver.release_string())
self.assertEqual("1.2.3.dev5+g12", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.dev5.g12"))
self.assertEqual("1.2.4~dev5", semver.debian_string())
self.assertEqual("1.2.4.dev5", semver.release_string())
self.assertEqual("1.2.3.dev5", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.dev5"))
def test_dev_no_git_version(self):
semver = version.SemanticVersion(1, 2, 4, dev_count=5)
@ -211,7 +174,7 @@ class TestSemanticVersion(base.BaseTestCase):
def test_alpha_dev_version(self):
self.assertRaises(
ValueError, version.SemanticVersion, 1, 2, 4, 'a', 1, 5, '12')
ValueError, version.SemanticVersion, 1, 2, 4, 'a', 1, '12')
def test_alpha_version(self):
semver = version.SemanticVersion(1, 2, 4, 'a', 1)
@ -251,7 +214,7 @@ class TestSemanticVersion(base.BaseTestCase):
def test_beta_dev_version(self):
self.assertRaises(
ValueError, version.SemanticVersion, 1, 2, 4, 'b', 1, 5, '12')
ValueError, version.SemanticVersion, 1, 2, 4, 'b', 5, '12')
def test_beta_version(self):
semver = version.SemanticVersion(1, 2, 4, 'b', 1)
@ -312,7 +275,7 @@ class TestSemanticVersion(base.BaseTestCase):
def test_rc_dev_version(self):
self.assertRaises(
ValueError, version.SemanticVersion, 1, 2, 4, 'rc', 1, 5, '12')
ValueError, version.SemanticVersion, 1, 2, 4, 'rc', 1, '12')
def test_rc_version(self):
semver = version.SemanticVersion(1, 2, 4, 'rc', 1)
@ -325,17 +288,17 @@ class TestSemanticVersion(base.BaseTestCase):
def test_to_dev(self):
self.assertEqual(
version.SemanticVersion(1, 2, 3, dev_count=1, githash='foo'),
version.SemanticVersion(1, 2, 3).to_dev(1, 'foo'))
version.SemanticVersion(1, 2, 3, dev_count=1),
version.SemanticVersion(1, 2, 3).to_dev(1))
self.assertEqual(
version.SemanticVersion(1, 2, 3, dev_count=1, githash='foo'),
version.SemanticVersion(1, 2, 3, 'rc', 1).to_dev(1, 'foo'))
version.SemanticVersion(1, 2, 3, dev_count=1),
version.SemanticVersion(1, 2, 3, 'rc', 1).to_dev(1))
def test_to_release(self):
self.assertEqual(
version.SemanticVersion(1, 2, 3),
version.SemanticVersion(
1, 2, 3, dev_count=1, githash='foo').to_release())
1, 2, 3, dev_count=1).to_release())
self.assertEqual(
version.SemanticVersion(1, 2, 3),
version.SemanticVersion(1, 2, 3, 'rc', 1).to_release())

View File

@ -354,8 +354,8 @@ def setup_cfg_to_setup_kwargs(config):
elif arg == 'cmdclass':
cmdclass = {}
dist = Distribution()
for cls in in_cfg_value:
cls = resolve_name(cls)
for cls_name in in_cfg_value:
cls = resolve_name(cls_name)
cmd = cls(dist)
cmdclass[cmd.get_command_name()] = cls
in_cfg_value = cmdclass

View File

@ -38,8 +38,9 @@ class SemanticVersion(object):
See the pbr doc 'semver' for details on the semantics.
"""
def __init__(self, major, minor=0, patch=0, prerelease_type=None,
prerelease=None, dev_count=None, githash=None):
def __init__(
self, major, minor=0, patch=0, prerelease_type=None,
prerelease=None, dev_count=None):
"""Create a SemanticVersion.
:param major: Major component of the version.
@ -50,12 +51,11 @@ class SemanticVersion(object):
:param prerelease: For prerelease versions, what number prerelease.
Defaults to 0.
:param dev_count: How many commits since the last release.
:param githash: What tree hash is this version for.
:raises: ValueError if both a prerelease version and dev_count or
githash are supplied. This is because semver (see the pbr semver
documentation) does not permit both a prerelease version and a dev
marker at the same time.
:raises: ValueError if both a prerelease version and dev_count is
supplied. This is because semver (see the pbr semver documentation)
does not permit both a prerelease version and a dev marker at the same
time.
"""
self._major = major
self._minor = minor
@ -65,7 +65,6 @@ class SemanticVersion(object):
if self._prerelease_type and not self._prerelease:
self._prerelease = 0
self._dev_count = dev_count
self._githash = githash
if prerelease_type is not None and dev_count is not None:
raise ValueError(
"invalid version: cannot have prerelease and dev strings %s %s"
@ -108,13 +107,8 @@ class SemanticVersion(object):
if other._dev_count:
if self._dev_count < other._dev_count:
return True
elif self._dev_count > other._dev_count:
else:
return False
elif self._githash == other._githash:
# == it not <
return False
raise TypeError(
"same version with different hash has no defined order")
elif other._prerelease_type:
raise TypeError(
"ordering pre-release with dev builds is undefined")
@ -189,7 +183,6 @@ class SemanticVersion(object):
dev_count = None
prerelease_type = None
prerelease = None
githash = None
def _parse_type(segment):
# Discard leading digits (the 0 in 0a1)
@ -226,19 +219,13 @@ class SemanticVersion(object):
component = remainder[0]
if component.startswith('dev'):
dev_count = int(component[3:])
elif component.startswith('g'):
# git hash - so use a dev_count of 1 as we have to have one
dev_count = 1
githash = component[1:]
else:
raise ValueError(
'Unknown remainder %r in %r'
% (remainder, version_string))
if len(remainder) > 1:
githash = remainder[1][1:]
return SemanticVersion(
major, minor, patch, prerelease_type=prerelease_type,
prerelease=prerelease, dev_count=dev_count, githash=githash)
prerelease=prerelease, dev_count=dev_count)
def brief_string(self):
"""Return the short version minus any alpha/beta tags."""
@ -250,7 +237,7 @@ class SemanticVersion(object):
This translates the PEP440/semver precedence rules into Debian version
sorting operators.
"""
return self._long_version("~", "+g")
return self._long_version("~")
def decrement(self, minor=False, major=False):
"""Return a decremented SemanticVersion.
@ -323,14 +310,13 @@ class SemanticVersion(object):
new_major, new_minor, new_patch,
new_prerelease_type, new_prerelease)
def _long_version(self, pre_separator, hash_separator, rc_marker=""):
def _long_version(self, pre_separator, rc_marker=""):
"""Construct a long string version of this semver.
:param pre_separator: What separator to use between components
that sort before rather than after. If None, use . and lower the
version number of the component to preserve sorting. (Used for
rpm support)
:param hash_separator: What separator to use to append the git hash.
"""
if ((self._prerelease_type or self._dev_count)
and pre_separator is None):
@ -346,9 +332,6 @@ class SemanticVersion(object):
segments.append(pre_separator)
segments.append('dev')
segments.append(self._dev_count)
if self._githash:
segments.append(hash_separator)
segments.append(self._githash)
return "".join(str(s) for s in segments)
def release_string(self):
@ -356,7 +339,7 @@ class SemanticVersion(object):
This including suffixes indicating VCS status.
"""
return self._long_version(".", ".g", "0")
return self._long_version(".", "0")
def rpm_string(self):
"""Return the version number to use when building an RPM package.
@ -366,17 +349,15 @@ class SemanticVersion(object):
~ operator in dpkg), we show all prerelease versions as being versions
of the release before.
"""
return self._long_version(None, "+g")
return self._long_version(None)
def to_dev(self, dev_count, githash):
def to_dev(self, dev_count):
"""Return a development version of this semver.
:param dev_count: The number of commits since the last release.
:param githash: The git hash of the tree with this version.
"""
return SemanticVersion(
self._major, self._minor, self._patch, dev_count=dev_count,
githash=githash)
self._major, self._minor, self._patch, dev_count=dev_count)
def to_release(self):
"""Discard any pre-release or dev metadata.

View File

@ -38,6 +38,10 @@ distutils.setup_keywords =
pbr = pbr.core:pbr
distutils.commands =
testr = pbr.testr_command:Testr
egg_info.writers =
pbr.json = pbr.packaging:write_pbr_json
console_scripts =
pbr = pbr.cmd.main:main
[build_sphinx]
all_files = 1

View File

@ -6,6 +6,11 @@ function mkvenv {
rm -rf $venv
virtualenv $venv
$venv/bin/pip install -U pip wheel
# If a change to PBR is being tested, preinstall from that source tree
if [ -n "$PBR_CHANGE" ] ; then
$venv/bin/pip install $pbrsdistdir
fi
}
# BASE should be a directory with a subdir called "new" and in that
@ -35,6 +40,11 @@ pbrsdistdir=$tmpdir/pbrsdist
git clone $REPODIR/pbr $pbrsdistdir
cd $pbrsdistdir
# Flag whether a change to PBR is being tested
if git fetch $ZUUL_URL/$ZUUL_PROJECT $ZUUL_REF ; then
PBR_CHANGE=1
fi
eptest=$tmpdir/eptest
mkdir $eptest
cd $eptest

View File

@ -7,7 +7,8 @@ envlist = py33,py34,py26,py27,pypy,pep8,docs
usedevelop = True
install_command = pip install {opts} {packages}
setenv = VIRTUAL_ENV={envdir}
deps = -r{toxinidir}/requirements.txt
deps = .
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands =
python setup.py testr --testr-args='{posargs}'