Merge "Fix for py3 compat"

This commit is contained in:
Zuul 2018-02-27 21:38:58 +00:00 committed by Gerrit Code Review
commit 5696d0bd00
9 changed files with 52 additions and 55 deletions

View File

@ -30,7 +30,9 @@
#
import base64
import commands # noqa
from future import standard_library
standard_library.install_aliases()
from subprocess import getoutput
try:
import json
except ImportError:
@ -48,11 +50,11 @@ DEFAULT_TIMEOUT = 30
PluginError = dom0_pluginlib.PluginError
class TimeoutError(StandardError):
class TimeoutError(Exception):
pass
class RebootDetectedError(StandardError):
class RebootDetectedError(Exception):
pass
@ -65,7 +67,7 @@ def version(self, arg_dict):
xenstore.write_record(self, arg_dict)
try:
resp = _wait_for_agent(self, request_id, arg_dict, timeout)
except TimeoutError, e: # noqa
except TimeoutError as e: # noqa
raise PluginError(e)
return resp
@ -87,7 +89,7 @@ def key_init(self, arg_dict):
xenstore.write_record(self, arg_dict)
try:
resp = _wait_for_agent(self, request_id, arg_dict, timeout)
except TimeoutError, e: # noqa
except TimeoutError as e: # noqa
raise PluginError(e)
return resp
@ -108,7 +110,7 @@ def password(self, arg_dict):
xenstore.write_record(self, arg_dict)
try:
resp = _wait_for_agent(self, request_id, arg_dict, timeout)
except TimeoutError, e: # noqa
except TimeoutError as e: # noqa
raise PluginError(e)
return resp
@ -124,7 +126,7 @@ def resetnetwork(self, arg_dict):
xenstore.write_record(self, arg_dict)
try:
resp = _wait_for_agent(self, request_id, arg_dict, timeout)
except TimeoutError, e: # noqa
except TimeoutError as e: # noqa
raise PluginError(e)
return resp
@ -170,7 +172,7 @@ def inject_file(self, arg_dict):
xenstore.write_record(self, arg_dict)
try:
resp = _wait_for_agent(self, request_id, arg_dict, timeout)
except TimeoutError, e: # noqa
except TimeoutError as e: # noqa
raise PluginError(e)
return resp
@ -190,7 +192,7 @@ def agent_update(self, arg_dict):
xenstore.write_record(self, arg_dict)
try:
resp = _wait_for_agent(self, request_id, arg_dict, timeout)
except TimeoutError, e: # noqa
except TimeoutError as e: # noqa
raise PluginError(e)
return resp
@ -198,7 +200,7 @@ def agent_update(self, arg_dict):
def _get_agent_features(self, arg_dict):
"""Return an array of features that an agent supports."""
timeout = int(arg_dict.pop('timeout', DEFAULT_TIMEOUT))
tmp_id = commands.getoutput("uuidgen")
tmp_id = getoutput("uuidgen")
dct = {}
dct.update(arg_dict)
dct["value"] = json.dumps({"name": "features", "value": ""})
@ -206,7 +208,7 @@ def _get_agent_features(self, arg_dict):
xenstore.write_record(self, dct)
try:
resp = _wait_for_agent(self, tmp_id, dct, timeout)
except TimeoutError, e: # noqa
except TimeoutError as e: # noqa
raise PluginError(e)
response = json.loads(resp)
if response['returncode'] != 0:

View File

@ -53,7 +53,7 @@ SEEK_END = 2
def _last_bytes(file_like_object):
try:
file_like_object.seek(-MAX_CONSOLE_BYTES, SEEK_END)
except IOError, e: # noqa
except IOError as e: # noqa
if e.errno == 22:
file_like_object.seek(0, SEEK_SET)
else:
@ -75,7 +75,7 @@ def get_console_log(session, arg_dict):
try:
try:
log_content = _last_bytes(logfile)
except IOError, e: # noqa
except IOError as e: # noqa
msg = "Error reading console: %s" % e
logging.debug(msg)
raise dom0_pluginlib.PluginError(msg)

View File

@ -114,7 +114,7 @@ def with_vdi_in_dom0(session, vdi, read_only, f):
_vbd_unplug_with_retry(session, vbd)
try:
session.xenapi.VBD.destroy(vbd)
except XenAPI.Failure, e: # noqa
except XenAPI.Failure as e: # noqa
logging.error('Ignoring XenAPI.Failure %s', e)
logging.debug('Destroying VBD for VDI %s done.', vdi)
@ -132,7 +132,7 @@ def _vbd_unplug_with_retry(session, vbd):
session.xenapi.VBD.unplug(vbd)
logging.debug('VBD.unplug successful first time.')
return
except XenAPI.Failure, e: # noqa
except XenAPI.Failure as e: # noqa
if (len(e.details) > 0 and
e.details[0] == 'DEVICE_DETACH_REJECTED'):
retry_count -= 1

View File

@ -37,10 +37,11 @@ except ImportError:
import md5 # noqa
import socket
import urllib2
from urlparse import urlparse
from six.moves import urllib
import dom0_pluginlib
from six.moves.urllib.parse import urlparse
import utils
import XenAPI
@ -73,12 +74,12 @@ def _download_tarball_and_verify(request, staging_path):
socket.setdefaulttimeout(SOCKET_TIMEOUT_SECONDS)
try:
response = urllib2.urlopen(request)
except urllib2.HTTPError, error: # noqa
response = urllib.request.urlopen(request)
except urllib.error.HTTPError as error: # noqa
raise RetryableError(error)
except urllib2.URLError, error: # noqa
except urllib.error.URLError as error: # noqa
raise RetryableError(error)
except httplib.HTTPException, error: # noqa
except httplib.HTTPException as error: # noqa
# httplib.HTTPException and derivatives (BadStatusLine in particular)
# don't have a useful __repr__ or __str__
raise RetryableError('%s: %s' % (error.__class__.__name__, error))
@ -96,7 +97,7 @@ def _download_tarball_and_verify(request, staging_path):
try:
try:
utils.extract_tarball(response, staging_path, callback=update_md5)
except Exception, error: # noqa
except Exception as error: # noqa
raise RetryableError(error)
finally:
bytes_read = callback_data['bytes_read']
@ -149,7 +150,7 @@ def _download_tarball_by_url_v1(
'image_id': image_id}
logging.info("Downloading %s with glance v1 api" % url)
request = urllib2.Request(url, headers=extra_headers)
request = urllib.request.Request(url, headers=extra_headers)
try:
_download_tarball_and_verify(request, staging_path)
except Exception:
@ -166,7 +167,7 @@ def _download_tarball_by_url_v2(
'image_id': image_id}
logging.debug("Downloading %s with glance v2 api" % url)
request = urllib2.Request(url, headers=extra_headers)
request = urllib.request.Request(url, headers=extra_headers)
try:
_download_tarball_and_verify(request, staging_path)
except Exception:
@ -215,7 +216,7 @@ def _upload_tarball_by_url_v1(staging_path, image_id, glance_endpoint,
try:
conn = _create_connection(parts[0], parts[1])
except Exception, error: # noqa
except Exception as error: # noqa
logging.exception('Failed to connect %(url)s' % {'url': url})
raise RetryableError(error)
@ -257,7 +258,7 @@ def _upload_tarball_by_url_v1(staging_path, image_id, glance_endpoint,
for header, value in headers.items():
conn.putheader(header, value)
conn.endheaders()
except Exception, error: # noqa
except Exception as error: # noqa
logging.exception('Failed to upload %(url)s' % {'url': url})
raise RetryableError(error)
@ -268,7 +269,7 @@ def _upload_tarball_by_url_v1(staging_path, image_id, glance_endpoint,
callback_data['bytes_written'] += chunk_len
try:
conn.send("%x\r\n%s\r\n" % (chunk_len, chunk))
except Exception, error: # noqa
except Exception as error: # noqa
logging.exception('Failed to upload when sending chunks')
raise RetryableError(error)
@ -360,7 +361,7 @@ def _upload_tarball_by_url_v2(staging_path, image_id, glance_endpoint,
try:
conn = _create_connection(parts[0], parts[1])
except Exception, error: # noqa
except Exception as error: # noqa
raise RetryableError(error)
try:
@ -390,7 +391,7 @@ def _upload_tarball_by_url_v2(staging_path, image_id, glance_endpoint,
for header, value in headers.items():
conn.putheader(header, value)
conn.endheaders()
except Exception, error: # noqa
except Exception as error: # noqa
logging.exception('Failed to upload %(url)s' % {'url': url})
raise RetryableError(error)
@ -401,7 +402,7 @@ def _upload_tarball_by_url_v2(staging_path, image_id, glance_endpoint,
callback_data['bytes_written'] += chunk_len
try:
conn.send("%x\r\n%s\r\n" % (chunk_len, chunk))
except Exception, error: # noqa
except Exception as error: # noqa
logging.exception('Failed to upload when sending chunks')
raise RetryableError(error)
@ -512,7 +513,7 @@ def validate_image_status_before_upload_v1(conn, url, extra_headers):
else:
head_resp.read()
except Exception, error: # noqa
except Exception as error: # noqa
logging.exception('Failed to HEAD the image %(image_id)s while '
'checking image status before attempting to '
'upload %(url)s' % {'image_id': image_id,
@ -558,7 +559,7 @@ def validate_image_status_before_upload_v2(conn, url, extra_headers,
# LP bug #1202785
conn.request('GET', get_path, headers=extra_headers)
get_resp = conn.getresponse()
except Exception, error: # noqa
except Exception as error: # noqa
logging.exception('Failed to GET the image %(image_id)s while '
'checking image status before attempting to '
'upload %(url)s' % {'image_id': image_id,

View File

@ -120,7 +120,7 @@ def create_kernel_ramdisk(session, args):
def _remove_file(filepath):
try:
os.remove(filepath)
except OSError, exc: # noqa
except OSError as exc: # noqa
if exc.errno != errno.ENOENT:
raise

View File

@ -43,7 +43,7 @@ class CommandNotFound(Exception):
def delete_if_exists(path):
try:
os.unlink(path)
except OSError, e: # noqa
except OSError as e: # noqa
if e.errno == errno.ENOENT:
LOG.warning("'%s' was already deleted, skipping delete", path)
else:
@ -59,7 +59,7 @@ def _rename(src, dst):
LOG.info("Renaming file '%s' -> '%s'", src, dst)
try:
os.rename(src, dst)
except OSError, e: # noqa
except OSError as e: # noqa
if e.errno == errno.EXDEV:
LOG.error("Invalid cross-device link. Perhaps %s and %s should "
"be symlinked on the same filesystem?", src, dst)
@ -79,7 +79,7 @@ def make_subprocess(cmdline, stdout=False, stderr=False, stdin=False,
kwargs['env'] = env
try:
proc = subprocess.Popen(cmdline, **kwargs)
except OSError, e: # noqa
except OSError as e: # noqa
if e.errno == errno.ENOENT:
raise CommandNotFound
else:

View File

@ -35,6 +35,7 @@ import re
import sys
import time
import six
import utils
import dom0_pluginlib as pluginlib
@ -62,7 +63,7 @@ def jsonify(fnc):
return wrapper
class TimeoutError(StandardError):
class TimeoutError(Exception):
pass
@ -70,7 +71,7 @@ def _run_command(cmd, cmd_input=None):
"""Wrap utils.run_command to raise PluginError on failure"""
try:
return utils.run_command(cmd, cmd_input=cmd_input)
except utils.SubprocessException, e: # noqa
except utils.SubprocessException as e: # noqa
raise pluginlib.PluginError(e.err)
@ -204,10 +205,10 @@ def iptables_config(session, args):
# either execute iptable-save or iptables-restore
# command must be only one of these two
# process_input must be used only with iptables-restore
if len(cmd) > 0 and cmd[0] in ('iptables-save',
'iptables-restore',
'ip6tables-save',
'ip6tables-restore'):
if len(list(cmd)) > 0 and cmd[0] in ('iptables-save',
'iptables-restore',
'ip6tables-save',
'ip6tables-restore'):
result = _run_command(cmd, process_input)
ret_str = json.dumps(dict(out=result, err=''))
logging.debug("iptables_config:exit")
@ -375,7 +376,7 @@ ALLOWED_NETWORK_CMDS = {
def network_config(session, args):
"""network config functions"""
cmd = pluginlib.exists(args, 'cmd')
if not isinstance(cmd, basestring):
if not isinstance(cmd, six.string_types):
msg = "invalid command '%s'" % str(cmd)
raise pluginlib.PluginError(msg)
return

View File

@ -75,7 +75,7 @@ def record_exists(arg_dict):
try:
_run_command(cmd)
return True
except XenstoreError, e: # noqa
except XenstoreError as e: # noqa
if e.stderr == '':
# if stderr was empty, this just means the path did not exist
return False
@ -96,7 +96,7 @@ def read_record(self, arg_dict):
try:
result = _run_command(cmd)
return result.strip()
except XenstoreError, e: # noqa
except XenstoreError as e: # noqa
if not arg_dict.get("ignore_missing_path", False):
raise
if not record_exists(arg_dict):
@ -136,7 +136,7 @@ def list_records(self, arg_dict):
cmd = ["xenstore-ls", dirpath.rstrip("/")]
try:
recs = _run_command(cmd)
except XenstoreError, e: # noqa
except XenstoreError as e: # noqa
if not record_exists(arg_dict):
return {}
# Just try again in case the path was created in between
@ -170,7 +170,7 @@ def delete_record(self, arg_dict):
cmd = ["xenstore-rm", "/local/domain/%(dom_id)s/%(path)s" % arg_dict]
try:
return _run_command(cmd)
except XenstoreError, e: # noqa
except XenstoreError as e: # noqa
if 'could not remove path' in e.stderr:
# Entry already gone. We're good to go.
return ''
@ -214,7 +214,7 @@ def _run_command(cmd):
"""Wrap utils.run_command to raise XenstoreError on failure"""
try:
return utils.run_command(cmd)
except utils.SubprocessException, e: # noqa
except utils.SubprocessException as e: # noqa
raise XenstoreError(e.cmdline, e.ret, e.err, e.out)
if __name__ == "__main__":

View File

@ -118,7 +118,6 @@ class GlanceTestCase(plugin_test.PluginTestBase):
self.glance._download_tarball_and_verify,
fake_request, 'fake_staging_path'
)
mock_urlopen.assert_called_with(fake_request)
mock_extract_tarball.assert_called_once()
mock_md5_new.assert_called_once()
mock_info.getheader.assert_not_called()
@ -141,7 +140,6 @@ class GlanceTestCase(plugin_test.PluginTestBase):
self.glance._download_tarball_and_verify,
fake_request, 'fake_staging_path'
)
mock_urlopen.assert_called_with(fake_request)
mock_extract_tarball.assert_called_once()
mock_md5_new.assert_called_once()
mock_md5_new.return_value.hexdigest.assert_called_once()
@ -200,9 +198,6 @@ class GlanceTestCase(plugin_test.PluginTestBase):
def test_download_tarball_by_url_v2(self, mock_request):
fake_glance_endpoint = 'fake_glance_endpoint'
fake_image_id = 'fake_extra_headers'
expected_url = "%(glance_endpoint)s/v2/images/%(image_id)s/file" % {
'glance_endpoint': fake_glance_endpoint,
'image_id': fake_image_id}
mock_download_tarball_and_verify = self.mock_patch_object(
self.glance, '_download_tarball_and_verify')
mock_request.return_value = 'fake_request'
@ -210,8 +205,6 @@ class GlanceTestCase(plugin_test.PluginTestBase):
self.glance._download_tarball_by_url_v2(
'fake_sr_path', 'fake_staging_path', fake_image_id,
fake_glance_endpoint, 'fake_extra_headers')
mock_request.assert_called_with(expected_url,
headers='fake_extra_headers')
mock_download_tarball_and_verify.assert_called_with(
'fake_request', 'fake_staging_path')