From 2e2c83a52765aad347176366074ec3c94366ad10 Mon Sep 17 00:00:00 2001 From: Yatin Kumbhare Date: Mon, 30 May 2016 22:45:58 +0530 Subject: [PATCH] Keep py3.X compatibility for urllib Change-Id: Iba10637688ada66f2e3003cd87bbba7d4db4abc7 Closes-Bug: #1280105 --- tools/check_logs.py | 8 ++++---- tools/find_stack_traces.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/tools/check_logs.py b/tools/check_logs.py index e34dec355a..caad85c588 100755 --- a/tools/check_logs.py +++ b/tools/check_logs.py @@ -20,8 +20,8 @@ import gzip import os import re import six +import six.moves.urllib.request as urlreq import sys -import urllib2 import yaml @@ -67,9 +67,9 @@ def process_files(file_specs, url_specs, whitelists): logs_with_errors.append(name) for (name, url) in url_specs: whitelist = whitelists.get(name, []) - req = urllib2.Request(url) + req = urlreq.Request(url) req.add_header('Accept-Encoding', 'gzip') - page = urllib2.urlopen(req) + page = urlreq.urlopen(req) buf = six.StringIO(page.read()) f = gzip.GzipFile(fileobj=buf) if scan_content(name, f.read().splitlines(), regexp, whitelist): @@ -95,7 +95,7 @@ def scan_content(name, content, regexp, whitelist): def collect_url_logs(url): - page = urllib2.urlopen(url) + page = urlreq.urlopen(url) content = page.read() logs = re.findall('(screen-[\w-]+\.txt\.gz)', content) return logs diff --git a/tools/find_stack_traces.py b/tools/find_stack_traces.py index 49a42feb6e..f2da27aa0e 100755 --- a/tools/find_stack_traces.py +++ b/tools/find_stack_traces.py @@ -19,8 +19,8 @@ import gzip import pprint import re import six +import six.moves.urllib.request as urlreq import sys -import urllib2 pp = pprint.PrettyPrinter() @@ -65,9 +65,9 @@ class StackTrace(object): def hunt_for_stacktrace(url): """Return TRACE or ERROR lines out of logs.""" - req = urllib2.Request(url) + req = urlreq.Request(url) req.add_header('Accept-Encoding', 'gzip') - page = urllib2.urlopen(req) + page = urlreq.urlopen(req) buf = six.StringIO(page.read()) f = gzip.GzipFile(fileobj=buf) content = f.read() @@ -105,7 +105,7 @@ def log_url(url, log): def collect_logs(url): - page = urllib2.urlopen(url) + page = urlreq.urlopen(url) content = page.read() logs = re.findall('(screen-[\w-]+\.txt\.gz)', content) return logs