Revert "upload-logs-swift: Create a download script"

This reverts commit acde44818d and
the testing part of b3f417a6e6.

We'd like to obtain more consensus on the download script before
we commit to this.  In particular, the new zuul manifest file may
make it possible to do this without adding the feature to the
log upload roles.

Change-Id: I959c44b4dac6cad6d1b3d82ba6bc0949c9c759ff
This commit is contained in:
James E. Blair 2020-03-26 15:58:18 -07:00 committed by Andreas Jaeger
parent 2ae8616306
commit 4f13f7c07f
6 changed files with 9 additions and 248 deletions

View File

@ -76,17 +76,3 @@ This uploads logs to an OpenStack Object Store (Swift) container.
More details can be found at
:zuul:rolevar:`set-zuul-log-path-fact.zuul_log_path_shard_build`.
.. zuul:rolevar:: zuul_log_include_download_script
:default: False
Generate a script from ``zuul_log_download_template`` in the root
directory of the uploaded logs to facilitate easy bulk download.
.. zuul:rolevar:: zuul_log_download_template
:default: templates/download-logs.sh.j2
Path to template file if ``zuul_log_include_download_script`` is
set. See the sample file for parameters available to the template.
The file will be placed in the root of the uploaded logs (with
``.j2`` suffix removed).

View File

@ -2,5 +2,3 @@ zuul_log_partition: false
zuul_log_container: logs
zuul_log_container_public: true
zuul_log_create_indexes: true
zuul_log_include_download_script: false
zuul_log_download_template: '{{ role_path }}/templates/download-logs.sh.j2'

View File

@ -1,87 +0,0 @@
#!/bin/bash
# Download all logs
#
# To use this file
#
# curl "http://fakebaseurl.com/download-logs.sh" | bash
#
# Logs will be copied in a temporary directory as described in the
# output. Set DOWNLOAD_DIR to an empty directory if you wish to
# override this.
#
BASE_URL=http://fakebaseurl.com
function log {
echo "$(date -Iseconds) | $@"
}
function save_file {
local file="$1"
curl -s --compressed --create-dirs -o "${file}" "${BASE_URL}/${file}"
# Using --compressed we will send an Accept-Encoding: gzip header
# and the data will come to us across the network compressed.
# However, sometimes things like OpenStack's log server will send
# .gz files (as stored on its disk) uncompressed, so we check if
# this really looks like an ASCII file and rename for clarity.
if [[ "${file}" == *.gz ]]; then
local type=$(file "${file}")
if [[ "${type}" =~ "ASCII text" ]] || [[ "${type}" =~ "Unicode text" ]]; then
local new_name=${file%.gz}
log "Renaming to ${new_name}"
mv "${file}" "${new_name}"
fi
fi
}
if [[ -z "${DOWNLOAD_DIR}" ]]; then
DOWNLOAD_DIR=$(mktemp -d --tmpdir zuul-logs.XXXXXX)
fi
log "Saving logs to ${DOWNLOAD_DIR}"
pushd "${DOWNLOAD_DIR}" > /dev/null
log "Getting ${BASE_URL}/job-output.json [ 0001 / 0011 ]"
save_file "job-output.json"
log "Getting ${BASE_URL}/Ꮓບບξ-unicode.txt [ 0002 / 0011 ]"
save_file "Ꮓບບξ-unicode.txt"
log "Getting ${BASE_URL}/controller/compressed.gz [ 0003 / 0011 ]"
save_file "controller/compressed.gz"
log "Getting ${BASE_URL}/controller/cpu-load.svg [ 0004 / 0011 ]"
save_file "controller/cpu-load.svg"
log "Getting ${BASE_URL}/controller/journal.xz [ 0005 / 0011 ]"
save_file "controller/journal.xz"
log "Getting ${BASE_URL}/controller/service_log.txt [ 0006 / 0011 ]"
save_file "controller/service_log.txt"
log "Getting ${BASE_URL}/controller/syslog [ 0007 / 0011 ]"
save_file "controller/syslog"
log "Getting ${BASE_URL}/controller/subdir/foo::3.txt [ 0008 / 0011 ]"
save_file "controller/subdir/foo::3.txt"
log "Getting ${BASE_URL}/controller/subdir/subdir.txt [ 0009 / 0011 ]"
save_file "controller/subdir/subdir.txt"
log "Getting ${BASE_URL}/zuul-info/inventory.yaml [ 0010 / 0011 ]"
save_file "zuul-info/inventory.yaml"
log "Getting ${BASE_URL}/zuul-info/zuul-info.controller.txt [ 0011 / 0011 ]"
save_file "zuul-info/zuul-info.controller.txt"
popd >/dev/null
log "Download complete!"

View File

@ -30,7 +30,6 @@ import io
import logging
import mimetypes
import os
import jinja2
try:
import queue as queuelib
except ImportError:
@ -55,7 +54,6 @@ import requests.exceptions
import requestsexceptions
import keystoneauth1.exceptions
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
try:
@ -267,15 +265,13 @@ class FileDetail():
to push to swift.
"""
def __init__(self, full_path, relative_path,
filename=None, is_index=False):
def __init__(self, full_path, relative_path, filename=None):
"""
Args:
full_path (str): The absolute path to the file on disk.
relative_path (str): The relative path from the artifacts source
used for links.
filename (str): An optional alternate filename in links.
is_index (bool): Is this file an index
"""
# Make FileNotFoundError exception to be compatible with python2
try:
@ -289,7 +285,6 @@ class FileDetail():
else:
self.filename = filename
self.relative_path = relative_path
self.is_index = is_index
if self.full_path and os.path.isfile(self.full_path):
mime_guess, encoding = mimetypes.guess_type(self.full_path)
@ -310,8 +305,7 @@ class FileDetail():
def __repr__(self):
t = 'Folder' if self.folder else 'File'
return '<%s %s%s>' % (t, self.relative_path,
' (index)' if self.is_index else '')
return '<%s %s>' % (t, self.relative_path)
class FileList(Sequence):
@ -417,7 +411,6 @@ class Indexer():
FileList
- make_indexes() : make index.html in folders
- make_download_script() : make a script to download all logs
"""
def __init__(self, file_list):
'''
@ -537,8 +530,7 @@ class Indexer():
if full_path:
filename = os.path.basename(full_path)
relative_name = os.path.join(folder, filename)
indexes[folder] = FileDetail(full_path, relative_name,
is_index=True)
indexes[folder] = FileDetail(full_path, relative_name)
# This appends the index file at the end of the group of files
# for each directory.
@ -561,41 +553,6 @@ class Indexer():
new_list.reverse()
self.file_list.file_list = new_list
def make_download_script(self, base_url, download_template):
'''Make a download script from template
Note since you need the base_url, it really only makes sense
to call this after the Uploader() is initalised.
Args:
base_url (str): The base URL to prefix
download_template (str): Path to a jinja2 template
Return:
None; a file with the same name as the template (stripped of
.j2 if present) is added to self.file_list for upload.
'''
# Prune the list to just be files, no indexes (this should run
# before indexing anyway)
download_files = [f for f in self.file_list
if not f.folder and not f.is_index]
output_filename = os.path.basename(download_template[:-3]
if download_template.endswith('.j2')
else download_template)
output = os.path.join(self.file_list.get_tempdir(), output_filename)
with open(download_template) as f, open(output, 'wb') as output:
logging.debug("Writing template %s" % output.name)
template = jinja2.Template(f.read())
rendered = template.stream(
base_url=base_url.rstrip('/'),
# jinja wants unicode input
file_list=[to_text(f.relative_path) for f in download_files])
rendered.dump(output, encoding='utf-8')
download_script = FileDetail(output.name, output_filename)
self.file_list.file_list.append(download_script)
class GzipFilter():
chunk_size = 16384
@ -813,7 +770,7 @@ class Uploader():
def run(cloud, container, files,
indexes=True, parent_links=True, topdir_parent_link=False,
partition=False, footer='index_footer.html', delete_after=15552000,
prefix=None, public=True, dry_run=False, download_template=''):
prefix=None, public=True, dry_run=False):
if prefix:
prefix = prefix.lstrip('/')
@ -829,16 +786,8 @@ def run(cloud, container, files,
for file_path in files:
file_list.add(file_path)
# Upload.
uploader = Uploader(cloud, container, prefix, delete_after,
public, dry_run)
indexer = Indexer(file_list)
# (Possibly) make download script
if download_template:
indexer.make_download_script(uploader.url, download_template)
# (Possibly) make indexes.
if indexes:
indexer.make_indexes(create_parent_links=parent_links,
@ -849,6 +798,9 @@ def run(cloud, container, files,
for x in file_list:
logging.debug(x)
# Upload.
uploader = Uploader(cloud, container, prefix, delete_after,
public, dry_run)
uploader.upload(file_list)
return uploader.url
@ -867,7 +819,6 @@ def ansible_main():
footer=dict(type='str'),
delete_after=dict(type='int'),
prefix=dict(type='str'),
download_template=dict(type='str'),
)
)
@ -882,8 +833,7 @@ def ansible_main():
footer=p.get('footer'),
delete_after=p.get('delete_after', 15552000),
prefix=p.get('prefix'),
public=p.get('public'),
download_template=p.get('download_template'))
public=p.get('public'))
except (keystoneauth1.exceptions.http.HttpError,
requests.exceptions.RequestException):
s = "Error uploading to %s.%s" % (cloud.name, cloud.config.region_name)
@ -925,9 +875,6 @@ def cli_main():
'upload. Default is 6 months (15552000 seconds) '
'and if set to 0 X-Delete-After will not be set',
type=int)
parser.add_argument('--download-template', default='',
help='Path to a Jinja2 template that will be filled '
'out to create an automatic download script')
parser.add_argument('--prefix',
help='Prepend this path to the object names when '
'uploading')
@ -965,8 +912,7 @@ def cli_main():
delete_after=args.delete_after,
prefix=args.prefix,
public=not args.no_public,
dry_run=args.dry_run,
download_template=args.download_template)
dry_run=args.dry_run)
print(url)

View File

@ -16,12 +16,6 @@
tags:
- skip_ansible_lint
- name: Set download template
set_fact:
download_template: "{{ zuul_log_download_template }}"
when:
- zuul_log_include_download_script
- name: Upload logs to swift
delegate_to: localhost
zuul_swift_upload:
@ -34,27 +28,8 @@
files:
- "{{ zuul.executor.log_root }}/"
delete_after: "{{ zuul_log_delete_after | default(omit) }}"
download_template: "{{ download_template | default(omit) }}"
register: upload_results
- name: Get download script name
set_fact:
download_script: "{{ zuul_log_download_template | basename | regex_replace('\\.j2$') }}"
when:
- zuul_log_include_download_script
- name: Set download template artifact
zuul_return:
data:
zuul:
artifacts:
- name: Download all logs
url: '{{ download_script }}'
metadata:
command: 'curl "{{ upload_results.url }}/{{ download_script }}" | bash'
when:
- zuul_log_include_download_script
- name: Return log URL to Zuul
delegate_to: localhost
zuul_return:

View File

@ -1,57 +0,0 @@
#!/bin/bash
# Download all logs
#
# To use this file
#
# curl "{{ base_url }}/download-logs.sh" | bash
#
# Logs will be copied in a temporary directory as described in the
# output. Set DOWNLOAD_DIR to an empty directory if you wish to
# override this.
#
BASE_URL={{ base_url }}
function log {
echo "$(date -Iseconds) | $@"
}
function save_file {
local file="$1"
curl -s --compressed --create-dirs -o "${file}" "${BASE_URL}/${file}"
# Using --compressed we will send an Accept-Encoding: gzip header
# and the data will come to us across the network compressed.
# However, sometimes things like OpenStack's log server will send
# .gz files (as stored on its disk) uncompressed, so we check if
# this really looks like an ASCII file and rename for clarity.
if [[ "${file}" == *.gz ]]; then
local type=$(file "${file}")
if [[ "${type}" =~ "ASCII text" ]] || [[ "${type}" =~ "Unicode text" ]]; then
local new_name=${file%.gz}
log "Renaming to ${new_name}"
mv "${file}" "${new_name}"
fi
fi
}
if [[ -z "${DOWNLOAD_DIR}" ]]; then
DOWNLOAD_DIR=$(mktemp -d --tmpdir zuul-logs.XXXXXX)
fi
log "Saving logs to ${DOWNLOAD_DIR}"
pushd "${DOWNLOAD_DIR}" > /dev/null
{% set total_files = file_list | length %}
{% for file in file_list %}
log "Getting ${BASE_URL}/{{ '%-80s'|format(file) }} [ {{ '%04d'|format(loop.index) }} / {{ '%04d'|format(total_files) }} ]"
save_file "{{ file }}"
{% endfor %}
popd >/dev/null
log "Download complete!"