Fix invalid escape sequence warnings

Starting with Python 3.6, invalid escape sequences in string literals
are now deprecated[1]. This influence also automatic style checkers like
pycodestyle which starting with 2.4.0 complains about invalid escape
sequences (W605)[2].

Let's fix all those warnings at once by using raw strings where possible
and adding additional \ where not.

Footnotes:
1 - https://docs.python.org/3/whatsnew/3.6.html#deprecated-python-behavior
2 - https://github.com/PyCQA/pycodestyle/pull/676

Change-Id: I009a366fd8342edfd30890df6fe8e1fca88bf3cc
Signed-off-by: Krzysztof Opasiak <k.opasiak@samsung.com>
This commit is contained in:
Krzysztof Opasiak 2018-05-17 18:47:16 +02:00
parent f51e726f64
commit 4860f523d4
26 changed files with 64 additions and 62 deletions

View File

@ -72,12 +72,12 @@ class SnippetWriter(object):
def _indent_xml(self, my_string):
my_string = my_string.encode("utf-8")
# convert to plain string without indents and spaces
my_re = re.compile('>\s+([^\s])', re.DOTALL)
my_string = myre.sub('>\g<1>', my_string)
my_re = re.compile(r'>\s+([^\s])', re.DOTALL)
my_string = myre.sub(r'>\g<1>', my_string)
my_string = xml.dom.minidom.parseString(my_string).toprettyxml()
# remove line breaks
my_re = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
my_string = my_re.sub('>\g<1></', my_string)
my_re = re.compile(r'>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
my_string = my_re.sub(r'>\g<1></', my_string)
return my_string
def output_request(self, url, output_headers, body, content_type, method,

View File

@ -174,12 +174,12 @@ class ExampleClient(object):
def _indent_xml(self, my_string):
my_string = my_string.encode("utf-8")
# convert to plain string without indents and spaces
my_re = re.compile('>\s+([^\s])', re.DOTALL)
my_string = myre.sub('>\g<1>', my_string)
my_re = re.compile(r'>\s+([^\s])', re.DOTALL)
my_string = myre.sub(r'>\g<1>', my_string)
my_string = xml.dom.minidom.parseString(my_string).toprettyxml()
# remove line breaks
my_re = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
my_string = my_re.sub('>\g<1></', my_string)
my_re = re.compile(r'>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
my_string = my_re.sub(r'>\g<1></', my_string)
return my_string
def output_request(self, url, output_headers, body, content_type, method,

View File

@ -54,7 +54,7 @@ if CONFIG.values.get('rabbit_runs_locally', False) == True:
shell=False)
for line in iter(proc.stdout.readline, ""):
print("LIST QUEUES:" + line)
m = re.search("""%s\s+([0-9]+)""" % queue_name, line)
m = re.search(r"%s\s+([0-9]+)" % queue_name, line)
if m:
return int(m.group(1))
return None

View File

@ -120,9 +120,9 @@ class Service(object):
proc = start_proc(["/usr/bin/pmap", "-d", str(pid)],
shell=False)
for line in iter(proc.stdout.readline, ""):
m = re.search("""mapped\:\s([0-9]+)K\s+"""
"""writeable/private:\s([0-9]+)K\s+"""
"""shared:\s+([0-9]+)K""", line)
m = re.search(r"mapped\:\s([0-9]+)K\s+"
r"writeable/private:\s([0-9]+)K\s+"
r"shared:\s+([0-9]+)K", line)
if m:
return MemoryInfo(int(m.group(1)), int(m.group(2)),
int(m.group(3)))

View File

@ -79,7 +79,7 @@ def check_dependencies():
print('Installing virtualenv via easy_install...'),
if not (run_command(['which', 'easy_install']) and
run_command(['easy_install', 'virtualenv'])):
die('ERROR: virtualenv not found.\n\Trove development'
die('ERROR: virtualenv not found.\nTrove development'
' requires virtualenv, please install it using your'
' favorite package management tool')
print('done.')

View File

@ -205,7 +205,8 @@ class LintRunner(object):
def __init__(self):
self.config = Config()
self.idline = re.compile("^[*]* Module .*")
self.detail = re.compile("(\S+):(\d+): \[(\S+)\((\S+)\), (\S+)?] (.*)")
self.detail = re.compile(r"(\S+):(\d+): \[(\S+)\((\S+)\),"
r" (\S+)?] (.*)")
def dolint(self, filename):
exceptions = set()

View File

@ -17,8 +17,8 @@
url_ref = {
"type": "string",
"minLength": 8,
"pattern": 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]'
'|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
"pattern": r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]'
r'|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
}
boolean_string = {
@ -80,7 +80,7 @@ volume_size = {
host_string = {
"type": "string",
"minLength": 1,
"pattern": "^[%]?[\w(-).]*[%]?$"
"pattern": r"^[%]?[\w(-).]*[%]?$"
}
name_string = {

View File

@ -28,7 +28,7 @@ class CassandraSchema(models.DatastoreSchema):
return 32
def _is_valid_schema_name(self, value):
return not any(c in value for c in '/\. "$')
return not any(c in value for c in r'/\. "$')
class CassandraUser(models.DatastoreUser):

View File

@ -27,7 +27,7 @@ class MongoDBSchema(models.DatastoreSchema):
def _is_valid_schema_name(self, value):
# check against the invalid character set from
# http://docs.mongodb.org/manual/reference/limits
return not any(c in value for c in '/\. "$')
return not any(c in value for c in r'/\. "$')
class MongoDBUser(models.DatastoreUser):

View File

@ -31,7 +31,7 @@ class MySQLSchema(models.DatastoreSchema):
# Defaults
__charset__ = "utf8"
__collation__ = "utf8_general_ci"
dbname = re.compile("^[A-Za-z0-9_-]+[\s\?\#\@]*[A-Za-z0-9_-]+$")
dbname = re.compile(r"^[A-Za-z0-9_-]+[\s\?\#\@]*[A-Za-z0-9_-]+$")
# Complete list of acceptable values
collation = mysql_settings.collation
@ -121,7 +121,7 @@ class MySQLSchema(models.DatastoreSchema):
class MySQLUser(models.DatastoreUser):
"""Represents a MySQL User and its associated properties."""
not_supported_chars = re.compile("^\s|\s$|'|\"|;|`|,|/|\\\\")
not_supported_chars = re.compile(r"""^\s|\s$|'|"|;|`|,|/|\\""")
def _is_valid_string(self, value):
if (not value or

View File

@ -197,16 +197,16 @@ class Request(base_wsgi.Request):
@utils.cached_property
def accept_version(self):
accept_header = self.headers.get('ACCEPT', "")
accept_version_re = re.compile(".*?application/vnd.openstack.trove"
"(\+.+?)?;"
"version=(?P<version_no>\d+\.?\d*)")
accept_version_re = re.compile(r".*?application/vnd.openstack.trove"
r"(\+.+?)?;"
r"version=(?P<version_no>\d+\.?\d*)")
match = accept_version_re.search(accept_header)
return match.group("version_no") if match else None
@utils.cached_property
def url_version(self):
versioned_url_re = re.compile("/v(?P<version_no>\d+\.?\d*)")
versioned_url_re = re.compile(r"/v(?P<version_no>\d+\.?\d*)")
match = versioned_url_re.search(self.path)
return match.group("version_no") if match else None

View File

@ -295,7 +295,7 @@ class ImportOverrideStrategy(ConfigurationOverrideStrategy):
within their set got applied.
"""
FILE_NAME_PATTERN = '%s-([0-9]+)-%s\.%s$'
FILE_NAME_PATTERN = r'%s-([0-9]+)-%s\.%s$'
def __init__(self, revision_dir, revision_ext):
"""

View File

@ -108,7 +108,7 @@ def to_bytes(value):
"""Convert numbers with a byte suffix to bytes.
"""
if isinstance(value, six.string_types):
pattern = re.compile('^(\d+)([K,M,G]{1})$')
pattern = re.compile(r'^(\d+)([K,M,G]{1})$')
match = pattern.match(value)
if match:
value = match.group(1)

View File

@ -43,8 +43,8 @@ cmd_update_sysctl_conf = ('echo "vm.swappiness = 0" | sudo tee -a '
'/etc/sysctl.conf')
cmd_reset_pwd = 'sudo /opt/couchbase/bin/cbreset_password %(IP)s:8091'
pwd_file = COUCHBASE_CONF_DIR + SECRET_KEY
cmd_get_password_from_config = """sudo /opt/couchbase/bin/erl -noinput -eval \
'case file:read_file("/opt/couchbase/var/lib/couchbase/config/config.dat") \
of {ok, B} -> io:format("~p~n", [binary_to_term(B)]) end.' \
-run init stop | grep '\[{"root",\[{password,' | awk -F\\" '{print $4}'
"""
cmd_get_password_from_config = (
r"""sudo /opt/couchbase/bin/erl -noinput -eval 'case file:read_file("""
r""""/opt/couchbase/var/lib/couchbase/config/config.dat") of {ok, B} ->"""
r"""io:format("~p~n", [binary_to_term(B)]) end.' -run init stop"""
r""" | grep '\[{"root",\[{password,' | awk -F\" '{print $4}'""")

View File

@ -61,11 +61,11 @@ RESTORE_OFFLINE_DB = (
"db2 restore database %(dbname)s from " + DB2_BACKUP_DIR)
GET_DB_SIZE = (
"db2 +o connect to %(dbname)s;"
"db2 call get_dbsize_info\(?, ?, ?, -1\) | "
r"db2 call get_dbsize_info\(?, ?, ?, -1\) | "
"grep -A1 'DATABASESIZE' | grep 'Parameter Value' | sed 's/.*[:]//' |"
" tr -d '\n'; db2 +o connect reset")
GET_DB_NAMES = ("find /home/db2inst1/db2inst1/backup/ -type f -name '*.001' |"
" grep -Po \"(?<=backup/)[^.']*(?=\.)\"")
" grep -Po \"(?<=backup/)[^.']*(?=\\.)\"")
GET_DBM_CONFIGURATION = "db2 get dbm configuration > %(dbm_config)s"
UPDATE_DBM_CONFIGURATION = ("db2 update database manager configuration using "
"%(parameter)s %(value)s")

View File

@ -445,7 +445,7 @@ class PgSqlApp(object):
"""
r = operating_system.read_file(self.pgsql_recovery_config,
as_root=True)
regexp = re.compile("host=(\d+.\d+.\d+.\d+) ")
regexp = re.compile(r"host=(\d+.\d+.\d+.\d+) ")
m = regexp.search(r)
return m.group(1)

View File

@ -100,7 +100,7 @@ class VSqlError(object):
stderr looks like: "ERROR 3117: Division by zero"
:param stderr: string from executing statement via vsql
"""
parse = re.match("^(ERROR|WARNING) (\d+): (.+)$", stderr)
parse = re.match(r"^(ERROR|WARNING) (\d+): (.+)$", stderr)
if not parse:
raise ValueError(_("VSql stderr %(msg)s not recognized.")
% {'msg': stderr})

View File

@ -202,7 +202,7 @@ class RedhatPackagerMixin(RPMPackagerMixin):
"""
cmd = "sudo yum --color=never -y install %s" % " ".join(packages)
output_expects = ['\[sudo\] password for .*:',
output_expects = [r'\[sudo\] password for .*:',
'No package (.*) available.',
('file .* from install of .* conflicts with file'
' from package (.*?)\r\n'),
@ -243,7 +243,7 @@ class RedhatPackagerMixin(RPMPackagerMixin):
"""
cmd = "sudo yum --color=never -y remove %s" % package_name
LOG.debug("Running package remove command: %s", cmd)
output_expects = ['\[sudo\] password for .*:',
output_expects = [r'\[sudo\] password for .*:',
'No Packages marked for removal',
'Removed:']
i, match = self.pexpect_run(cmd, output_expects, time_out)
@ -395,7 +395,7 @@ class DebianPackagerMixin(BasePackagerMixin):
def pkg_version(self, package_name):
std_out = getoutput("apt-cache", "policy", package_name)
for line in std_out.split("\n"):
m = re.match("\s+Installed: (.*)", line)
m = re.match(r"\s+Installed: (.*)", line)
if m:
version = m.group(1)
if version == "(none)":

View File

@ -102,8 +102,8 @@ class NodetoolSnapshot(base.BackupRunner):
snapshot_name)
snapshot_files = operating_system.list_files_in_directory(
data_dir, recursive=True, include_dirs=False,
pattern='.*/snapshots/%s/.*\.%s' % (snapshot_name,
self._SNAPSHOT_EXTENSION),
pattern=r'.*/snapshots/%s/.*\.%s' % (snapshot_name,
self._SNAPSHOT_EXTENSION),
as_root=True)
num_snapshot_files = len(snapshot_files)
LOG.debug('Found %(num)d snapshot (*.%(ext)s) files.',

View File

@ -125,8 +125,8 @@ class PgBaseBackup(base.BackupRunner, PgBaseBackupUtil):
operating_system.chmod(
metadata_file, FileMode(add=[stat.S_IROTH]), as_root=True)
start_re = re.compile("START WAL LOCATION: (.*) \(file (.*)\)")
stop_re = re.compile("STOP WAL LOCATION: (.*) \(file (.*)\)")
start_re = re.compile(r"START WAL LOCATION: (.*) \(file (.*)\)")
stop_re = re.compile(r"STOP WAL LOCATION: (.*) \(file (.*)\)")
checkpt_re = re.compile("CHECKPOINT LOCATION: (.*)")
label_re = re.compile("LABEL: (.*)")

View File

@ -85,7 +85,8 @@ class InnoBackupEx(base.BackupRunner):
def metadata(self):
LOG.debug('Getting metadata from backup.')
meta = {}
lsn = re.compile("The latest check point \(for incremental\): '(\d+)'")
lsn = re.compile(r"The latest check point \(for incremental\): "
r"'(\d+)'")
with open('/tmp/innobackupex.log', 'r') as backup_log:
output = backup_log.read()
match = lsn.search(output)

View File

@ -37,7 +37,7 @@ class PgDump(base.RestoreRunner):
base_restore_cmd = 'psql -U os_admin'
IGNORED_ERROR_PATTERNS = [
re.compile("ERROR:\s*role \"postgres\" already exists"),
re.compile(r'ERROR:\s*role "postgres" already exists'),
]
def restore(self):
@ -96,7 +96,7 @@ class PgBaseBackup(base.RestoreRunner):
base_restore_cmd = ''
IGNORED_ERROR_PATTERNS = [
re.compile("ERROR:\s*role \"postgres\" already exists"),
re.compile(r'ERROR:\s*role "postgres" already exists'),
]
def __init__(self, *args, **kwargs):

View File

@ -67,7 +67,7 @@ class HostsBeforeInstanceCreation(object):
@test(depends_on=[test_empty_index_host_list])
def test_empty_index_host_list_single(self):
self.host.name = self.host.name.replace(".", "\.")
self.host.name = self.host.name.replace(".", r"\.")
result = self.client.hosts.get(self.host)
assert_not_equal(result, None,
"Get host should not be empty for: %s" % self.host)
@ -126,7 +126,7 @@ class HostsMgmtCommands(object):
@test(depends_on=[test_index_host_list])
def test_index_host_list_single(self):
self.host.name = self.host.name.replace(".", "\.")
self.host.name = self.host.name.replace(".", r"\.")
result = self.client.hosts.get(self.host)
assert_not_equal(result, None,
"list hosts should not be empty: %s" % str(result))

View File

@ -66,7 +66,7 @@ class FakeGuest(object):
self.version += 1
def _check_username(self, username):
unsupported_chars = re.compile("^\s|\s$|'|\"|;|`|,|/|\\\\")
unsupported_chars = re.compile(r"""^\s|\s$|'|"|;|`|,|/|\\""")
if (not username or
unsupported_chars.search(username) or
("%r" % username).find("\\") != -1):

View File

@ -1146,35 +1146,35 @@ class TestOperatingSystem(trove_testtools.TestCase):
# Only '*.txt' in the top directory.
self._assert_list_files(
root_path, False, '.*\.txt$', False, all_paths, 3)
root_path, False, r'.*\.txt$', False, all_paths, 3)
# Only '*.txt' (including directories) in the top directory.
self._assert_list_files(
root_path, False, '.*\.txt$', True, all_paths, 3)
root_path, False, r'.*\.txt$', True, all_paths, 3)
# Only '*.txt' recursive.
self._assert_list_files(
root_path, True, '.*\.txt$', True, all_paths, 9)
root_path, True, r'.*\.txt$', True, all_paths, 9)
# Only '*.txt' (including directories) recursive.
self._assert_list_files(
root_path, True, '.*\.txt$', False, all_paths, 9)
root_path, True, r'.*\.txt$', False, all_paths, 9)
# Only extension-less files in the top directory.
self._assert_list_files(
root_path, False, '[^\.]*$', False, all_paths, 3)
root_path, False, r'[^\.]*$', False, all_paths, 3)
# Only extension-less files recursive.
self._assert_list_files(
root_path, True, '[^\.]*$', False, all_paths, 9)
root_path, True, r'[^\.]*$', False, all_paths, 9)
# Non-existing extension in the top directory.
self._assert_list_files(
root_path, False, '.*\.bak$', False, all_paths, 0)
root_path, False, r'.*\.bak$', False, all_paths, 0)
# Non-existing extension recursive.
self._assert_list_files(
root_path, True, '.*\.bak$', False, all_paths, 0)
root_path, True, r'.*\.bak$', False, all_paths, 0)
finally:
try:
os.remove(root_path)

View File

@ -73,10 +73,10 @@ class SqlAlchemyConnection(object):
@staticmethod
def _exception_is_permissions_issue(msg):
"""Assert message cited a permissions issue and not something else."""
pos_error = re.compile(".*Host '[\w\.]*' is not allowed to connect to "
"this MySQL server.*")
pos_error = re.compile(r".*Host '[\w\.]*' is not allowed to connect "
"to this MySQL server.*")
pos_error1 = re.compile(".*Access denied for user "
"'[\w\*\!\@\#\^\&]*'@'[\w\.]*'.*")
r"'[\w\*\!\@\#\^\&]*'@'[\w\.]*'.*")
if (pos_error.match(msg) or pos_error1.match(msg)):
return True
@ -130,7 +130,7 @@ class PexpectMySqlConnection(object):
cmd = '%s %s' % (tests.SSH_CMD, ssh_args)
self.proc = pexpect.spawn(cmd)
print(cmd)
self.proc.expect(":~\$", timeout=self.TIME_OUT)
self.proc.expect(r":~\$", timeout=self.TIME_OUT)
cmd2 = "mysql --host '%s' -u '%s' '-p%s'\n" % \
(self.host, self.user, self.password)
print(cmd2)
@ -152,7 +152,7 @@ class PexpectMySqlConnection(object):
self.proc.close()
def execute(self, cmd):
self.proc.send(cmd + "\G\n")
self.proc.send(cmd + "\\G\n")
outcome = self.proc.expect(['Empty set', 'mysql>'],
timeout=self.TIME_OUT)
if outcome == 0: