Support for URLS instead of local log files, simple caching.
This commit is contained in:
parent
2a27eaba64
commit
183efe3b13
|
@ -48,4 +48,5 @@ The previous example would produce something like this::
|
||||||
Exit code: 0
|
Exit code: 0
|
||||||
Stdout: 'ha-2cdba01d-e4\nha-44dca3a9-44\nha-499d3db7-97\nha-55a19f5e-ef\nha-b2d04f15-f2\nha-b5b271a1-d8\nha-fa58d644-81\nint-br-enp7s0\nint-br-ex\nqr-34b826df-97\nqr-5d5ea109-48\nqr-6adcffbf-09\nqr-743ccaa6-7e\nqr-79b33879-32\nqr-c12e6e06-ff\nqr-dc662767-61\n'
|
Stdout: 'ha-2cdba01d-e4\nha-44dca3a9-44\nha-499d3db7-97\nha-55a19f5e-ef\nha-b2d04f15-f2\nha-b5b271a1-d8\nha-fa58d644-81\nint-br-enp7s0\nint-br-ex\nqr-34b826df-97\nqr-5d5ea109-48\nqr-6adcffbf-09\nqr-743ccaa6-7e\nqr-79b33879-32\nqr-c12e6e06-ff\nqr-dc662767-61\n'
|
||||||
|
|
||||||
|
References to http url files instead of local files is also supported. Files
|
||||||
|
will be cached locally to avoid re-downloading on next runs.
|
||||||
|
|
|
@ -1,14 +1,48 @@
|
||||||
|
from __future__ import print_function
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
|
||||||
EXTRALINES_PADDING = " " * 40
|
EXTRALINES_PADDING = " " * 40
|
||||||
|
CACHE_DIR = "%s/oslogmerger-cache/" % tempfile.gettempdir()
|
||||||
|
|
||||||
|
|
||||||
class OpenStackLog:
|
class OpenStackLog:
|
||||||
def __init__(self, filename):
|
def __init__(self, filename):
|
||||||
self._file = open(filename, 'r')
|
self._open(filename)
|
||||||
|
|
||||||
|
def _open(self, filename):
|
||||||
self._filename = filename
|
self._filename = filename
|
||||||
|
if filename.startswith("http://"):
|
||||||
|
filename = self._cached_download(filename)
|
||||||
|
self._file = open(filename, 'r')
|
||||||
|
|
||||||
|
def _url_cache_path(self, url):
|
||||||
|
md5 = hashlib.md5()
|
||||||
|
md5.update(url)
|
||||||
|
return CACHE_DIR + md5.hexdigest() + ".log"
|
||||||
|
|
||||||
|
def _ensure_cache_dir(self):
|
||||||
|
if not os.path.exists(CACHE_DIR):
|
||||||
|
os.makedirs(CACHE_DIR)
|
||||||
|
|
||||||
|
def _cached_download(self, url):
|
||||||
|
self._ensure_cache_dir()
|
||||||
|
path = self._url_cache_path(url)
|
||||||
|
if os.path.isfile(path):
|
||||||
|
print("CACHED: %s at %s" % (url, path), file=sys.stderr)
|
||||||
|
return path
|
||||||
|
print("DOWNLOADING: %s to %s" % (url, path), file=sys.stderr)
|
||||||
|
http_in = urllib2.urlopen(url)
|
||||||
|
file_out = open(path, 'w')
|
||||||
|
file_out.write(http_in.read())
|
||||||
|
file_out.close()
|
||||||
|
http_in.close()
|
||||||
|
return path
|
||||||
|
|
||||||
def _extract_with_date(self, line):
|
def _extract_with_date(self, line):
|
||||||
try:
|
try:
|
||||||
|
@ -50,7 +84,7 @@ class OpenStackLog:
|
||||||
|
|
||||||
|
|
||||||
def help():
|
def help():
|
||||||
print """oslogmerger tool
|
print ("""oslogmerger tool
|
||||||
|
|
||||||
usage instructions:
|
usage instructions:
|
||||||
oslogmerger /path/log_file1[:ALIAS] /path/log_file2[:ALIAS2] ..
|
oslogmerger /path/log_file1[:ALIAS] /path/log_file2[:ALIAS2] ..
|
||||||
|
@ -65,12 +99,11 @@ alias. Use the aliases if you want shorter line lengths.
|
||||||
Y-m-d H:M:S.mmm PID LOG-LEVEL ............
|
Y-m-d H:M:S.mmm PID LOG-LEVEL ............
|
||||||
Y-m-d H:M:S.mmm PID LOG-LEVEL ............
|
Y-m-d H:M:S.mmm PID LOG-LEVEL ............
|
||||||
[ extra line info ..... ]
|
[ extra line info ..... ]
|
||||||
"""
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process_logs(files):
|
def process_logs(files):
|
||||||
if len(files)==0:
|
if len(files) == 0:
|
||||||
help()
|
help()
|
||||||
return 1
|
return 1
|
||||||
all_entries = []
|
all_entries = []
|
||||||
|
@ -80,26 +113,33 @@ def process_logs(files):
|
||||||
# check if filename has an alias for log output, in the form of
|
# check if filename has an alias for log output, in the form of
|
||||||
# /path/filename:alias
|
# /path/filename:alias
|
||||||
filename_and_alias = filename.split(':')
|
filename_and_alias = filename.split(':')
|
||||||
if len(filename_and_alias) > 1:
|
filename = filename_and_alias[0]
|
||||||
filename_alias[filename_and_alias[0]] = (
|
alias = filename_and_alias[1:]
|
||||||
"[%s]" % filename_and_alias[1])
|
|
||||||
|
if filename == 'http' and alias and alias[0].startswith('//'):
|
||||||
|
filename = filename_and_alias[0] + ':' + filename_and_alias[1]
|
||||||
|
alias = filename_and_alias[2:]
|
||||||
|
|
||||||
|
if alias:
|
||||||
|
filename_alias[filename] = "[%s]" % alias[0]
|
||||||
else:
|
else:
|
||||||
filename_alias[filename] = filename
|
filename_alias[filename] = filename
|
||||||
|
|
||||||
# read the log
|
# read the log
|
||||||
oslog = OpenStackLog(filename_and_alias[0])
|
oslog = OpenStackLog(filename)
|
||||||
for entry in oslog.log_entries():
|
for entry in oslog.log_entries():
|
||||||
all_entries.append(entry)
|
all_entries.append(entry)
|
||||||
|
|
||||||
sorted_entries = sorted(all_entries, key=lambda log_entry: log_entry[0])
|
sorted_entries = sorted(all_entries, key=lambda log_entry: log_entry[0])
|
||||||
for entry in sorted_entries:
|
for entry in sorted_entries:
|
||||||
(date_object, filename, pid, level, rest) = entry
|
(date_object, filename, pid, level, rest) = entry
|
||||||
print ' '.join(
|
print (' '.join(
|
||||||
[date_object.strftime("%Y-%m-%d %H:%M:%S.%f"),
|
[date_object.strftime("%Y-%m-%d %H:%M:%S.%f"),
|
||||||
filename_alias[filename], pid,
|
filename_alias[filename], pid,
|
||||||
level, rest]).rstrip('\n')
|
level, rest]).rstrip('\n'))
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
sys.exit(process_logs(sys.argv[1:]))
|
sys.exit(process_logs(sys.argv[1:]))
|
||||||
|
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -11,7 +11,7 @@ with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
|
||||||
setup(
|
setup(
|
||||||
name='oslogmerger',
|
name='oslogmerger',
|
||||||
|
|
||||||
version='1.0.2',
|
version='1.0.3',
|
||||||
description='Openstack Log merge tool',
|
description='Openstack Log merge tool',
|
||||||
long_description=long_description,
|
long_description=long_description,
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue