Merge branch 'stable-2.14' into stable-2.15
* stable-2.14: Document that the build works with Python 2 or 3 merge_jars.py: Fix for python 3 compatibility project.py: decode byte output from check_result license and doc: Add support for python3 Bazel: Make build tool chain python 3 compatible Change-Id: Ibe8a97c021b5b2bcaa2bd075ae28888e5762a4d6
This commit is contained in:
commit
f12894007f
|
@ -3,7 +3,7 @@
|
||||||
[[installation]]
|
[[installation]]
|
||||||
== Installation
|
== Installation
|
||||||
|
|
||||||
You need to use Python 2, Java 8, and Node.js for building gerrit.
|
You need to use Python (2 or 3), Java 8, and Node.js for building gerrit.
|
||||||
|
|
||||||
You can install Bazel from the bazel.io:
|
You can install Bazel from the bazel.io:
|
||||||
https://www.bazel.io/versions/master/docs/install.html
|
https://www.bazel.io/versions/master/docs/install.html
|
||||||
|
|
|
@ -229,12 +229,16 @@ opts.add_option('--no-searchbox', action="store_false", dest='searchbox',
|
||||||
options, _ = opts.parse_args()
|
options, _ = opts.parse_args()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
out_file = open(options.out, 'w')
|
try:
|
||||||
src_file = open(options.src, 'r')
|
out_file = open(options.out, 'w', errors='ignore')
|
||||||
|
src_file = open(options.src, 'r', errors='ignore')
|
||||||
|
except TypeError:
|
||||||
|
out_file = open(options.out, 'w')
|
||||||
|
src_file = open(options.src, 'r')
|
||||||
last_line = ''
|
last_line = ''
|
||||||
ignore_next_line = False
|
ignore_next_line = False
|
||||||
last_title = ''
|
last_title = ''
|
||||||
for line in src_file.xreadlines():
|
for line in src_file:
|
||||||
if PAT_GERRIT.match(last_line):
|
if PAT_GERRIT.match(last_line):
|
||||||
# Case of "GERRIT\n------" at the footer
|
# Case of "GERRIT\n------" at the footer
|
||||||
out_file.write(GERRIT_UPLINK)
|
out_file.write(GERRIT_UPLINK)
|
||||||
|
|
|
@ -113,8 +113,13 @@ for n in sorted(graph.keys()):
|
||||||
print()
|
print()
|
||||||
print("[[%s_license]]" % safename)
|
print("[[%s_license]]" % safename)
|
||||||
print("----")
|
print("----")
|
||||||
with open(n[2:].replace(":", "/")) as fd:
|
filename = n[2:].replace(":", "/")
|
||||||
copyfileobj(fd, stdout)
|
try:
|
||||||
|
with open(filename, errors='ignore') as fd:
|
||||||
|
copyfileobj(fd, stdout)
|
||||||
|
except TypeError:
|
||||||
|
with open(filename) as fd:
|
||||||
|
copyfileobj(fd, stdout)
|
||||||
print()
|
print()
|
||||||
print("----")
|
print("----")
|
||||||
print()
|
print()
|
||||||
|
|
|
@ -57,7 +57,7 @@ def retrieve_ext_location():
|
||||||
return check_output(['bazel', 'info', 'output_base']).strip()
|
return check_output(['bazel', 'info', 'output_base']).strip()
|
||||||
|
|
||||||
def gen_bazel_path():
|
def gen_bazel_path():
|
||||||
bazel = check_output(['which', 'bazel']).strip()
|
bazel = check_output(['which', 'bazel']).strip().decode('UTF-8')
|
||||||
with open(path.join(ROOT, ".bazel_path"), 'w') as fd:
|
with open(path.join(ROOT, ".bazel_path"), 'w') as fd:
|
||||||
fd.write("bazel=%s\n" % bazel)
|
fd.write("bazel=%s\n" % bazel)
|
||||||
fd.write("PATH=%s\n" % environ["PATH"])
|
fd.write("PATH=%s\n" % environ["PATH"])
|
||||||
|
@ -262,7 +262,7 @@ def gen_factorypath(ext):
|
||||||
doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
|
doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ext_location = retrieve_ext_location()
|
ext_location = retrieve_ext_location().decode("utf-8")
|
||||||
gen_project(args.project_name)
|
gen_project(args.project_name)
|
||||||
gen_classpath(ext_location)
|
gen_classpath(ext_location)
|
||||||
gen_factorypath(ext_location)
|
gen_factorypath(ext_location)
|
||||||
|
|
|
@ -40,7 +40,7 @@ def hash_bower_component(hash_obj, path):
|
||||||
if f == '.bower.json':
|
if f == '.bower.json':
|
||||||
continue
|
continue
|
||||||
p = os.path.join(root, f)
|
p = os.path.join(root, f)
|
||||||
hash_obj.update(p[len(path)+1:])
|
hash_obj.update(p[len(path)+1:].encode("utf-8"))
|
||||||
hash_obj.update(open(p).read())
|
hash_obj.update(open(p, "rb").read())
|
||||||
|
|
||||||
return hash_obj
|
return hash_obj
|
||||||
|
|
|
@ -68,7 +68,7 @@ def ignore_deps(info):
|
||||||
deps = info.get('dependencies')
|
deps = info.get('dependencies')
|
||||||
if deps:
|
if deps:
|
||||||
with open(os.path.join('.bowerrc'), 'w') as f:
|
with open(os.path.join('.bowerrc'), 'w') as f:
|
||||||
json.dump({'ignoredDependencies': deps.keys()}, f)
|
json.dump({'ignoredDependencies': list(deps.keys())}, f)
|
||||||
|
|
||||||
|
|
||||||
def cache_entry(name, package, version, sha1):
|
def cache_entry(name, package, version, sha1):
|
||||||
|
|
|
@ -36,7 +36,7 @@ def is_bundled(tar):
|
||||||
def bundle_dependencies():
|
def bundle_dependencies():
|
||||||
with open('package.json') as f:
|
with open('package.json') as f:
|
||||||
package = json.load(f)
|
package = json.load(f)
|
||||||
package['bundledDependencies'] = package['dependencies'].keys()
|
package['bundledDependencies'] = list(package['dependencies'].keys())
|
||||||
with open('package.json', 'w') as f:
|
with open('package.json', 'w') as f:
|
||||||
json.dump(package, f)
|
json.dump(package, f)
|
||||||
|
|
||||||
|
|
|
@ -39,14 +39,12 @@ try:
|
||||||
continue
|
continue
|
||||||
elif n.startswith(SERVICES):
|
elif n.startswith(SERVICES):
|
||||||
# Concatenate all provider configuration files.
|
# Concatenate all provider configuration files.
|
||||||
myfile = inzip.open(n, 'r')
|
services[n] += inzip.read(n).decode("UTF-8")
|
||||||
myfile = io.TextIOWrapper(myfile, encoding='iso-8859-1', newline='')
|
|
||||||
services[n] += myfile.read()
|
|
||||||
continue
|
continue
|
||||||
outzip.writestr(info, inzip.read(n))
|
outzip.writestr(info, inzip.read(n))
|
||||||
seen.add(n)
|
seen.add(n)
|
||||||
|
|
||||||
for n, v in services.items():
|
for n, v in list(services.items()):
|
||||||
outzip.writestr(n, v)
|
outzip.writestr(n, v)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
exit('Failed to merge jars: %s' % err)
|
exit('Failed to merge jars: %s' % err)
|
||||||
|
|
Loading…
Reference in New Issue