Merge branch 'master' into feature/losf

Change-Id: Ib46bf7612c62163fbce471c530ff375d19f45bbd
This commit is contained in:
Tim Burke 2019-06-13 09:31:55 -07:00
commit b1ad1bcec9
24 changed files with 800 additions and 599 deletions

View File

@ -4,7 +4,7 @@
# this is required for the docs build jobs
sphinx>=1.6.2,<2.0.0;python_version=='2.7' # BSD
sphinx>=1.6.2;python_version>='3.4' # BSD
openstackdocstheme>=1.11.0 # Apache-2.0
openstackdocstheme>=1.30.0 # Apache-2.0
reno>=1.8.0 # Apache-2.0
os-api-ref>=1.0.0 # Apache-2.0
python-keystoneclient!=2.1.0,>=2.0.0 # Apache-2.0

View File

@ -12,7 +12,7 @@ chardet==3.0.4
cliff==2.11.0
cmd2==0.8.1
coverage==3.6
cryptography==1.8.2
cryptography==2.0.2
debtcollector==1.19.0
dnspython==1.14.0
docutils==0.11
@ -46,7 +46,7 @@ netifaces==0.8
nose==1.3.7
nosehtmloutput==0.0.3
nosexcover==1.0.10
openstackdocstheme==1.11.0
openstackdocstheme==1.30.0
os-api-ref==1.0.0
os-testr==0.8.0
oslo.config==4.0.0

View File

@ -14,7 +14,7 @@ urllib3>=1.21.1,<1.25
six>=1.9.0
xattr>=0.4;sys_platform!='win32' # MIT
PyECLib>=1.3.1 # BSD
cryptography!=2.0,>=1.8.2 # BSD/Apache-2.0
cryptography>=2.0.2 # BSD/Apache-2.0
ipaddress>=1.0.16;python_version<'3.3' # PSF
# grpcio will fail to work with eventlet starting with 1.3.5.
# see this for a similar issue with gevent: https://github.com/grpc/grpc/issues/4629 and https://github.com/gevent/gevent/issues/786

View File

@ -434,20 +434,21 @@ class SigV4Mixin(object):
raise InvalidRequest(msg)
else:
hashed_payload = self.headers['X-Amz-Content-SHA256']
if self.content_length == 0:
if hashed_payload != sha256().hexdigest():
raise BadDigest(
'The X-Amz-Content-SHA56 you specified did not match '
'what we received.')
elif self.content_length:
self.environ['wsgi.input'] = HashingInput(
self.environ['wsgi.input'],
self.content_length,
sha256,
hashed_payload)
# else, not provided -- Swift will kick out a 411 Length Required
# which will get translated back to a S3-style response in
# S3Request._swift_error_codes
if hashed_payload != 'UNSIGNED-PAYLOAD':
if self.content_length == 0:
if hashed_payload != sha256().hexdigest():
raise BadDigest(
'The X-Amz-Content-SHA56 you specified did not '
'match what we received.')
elif self.content_length:
self.environ['wsgi.input'] = HashingInput(
self.environ['wsgi.input'],
self.content_length,
sha256,
hashed_payload)
# else, length not provided -- Swift will kick out a
# 411 Length Required which will get translated back
# to a S3-style response in S3Request._swift_error_codes
cr.append(swob.wsgi_to_bytes(hashed_payload))
return b'\n'.join(cr)

View File

@ -199,12 +199,7 @@ def _check_symlink_header(req):
# validation first, here.
error_body = 'X-Symlink-Target header must be of the form ' \
'<container name>/<object name>'
try:
if wsgi_unquote(req.headers[TGT_OBJ_SYMLINK_HDR]).startswith('/'):
raise HTTPPreconditionFailed(
body=error_body,
request=req, content_type='text/plain')
except TypeError:
if wsgi_unquote(req.headers[TGT_OBJ_SYMLINK_HDR]).startswith('/'):
raise HTTPPreconditionFailed(
body=error_body,
request=req, content_type='text/plain')
@ -216,14 +211,9 @@ def _check_symlink_header(req):
req.headers[TGT_OBJ_SYMLINK_HDR] = wsgi_quote('%s/%s' % (container, obj))
# Check account format if it exists
try:
account = check_account_format(
req, wsgi_unquote(req.headers[TGT_ACCT_SYMLINK_HDR])) \
if TGT_ACCT_SYMLINK_HDR in req.headers else None
except TypeError:
raise HTTPPreconditionFailed(
body='Account name cannot contain slashes',
request=req, content_type='text/plain')
account = check_account_format(
req, wsgi_unquote(req.headers[TGT_ACCT_SYMLINK_HDR])) \
if TGT_ACCT_SYMLINK_HDR in req.headers else None
# Extract request path
_junk, req_acc, req_cont, req_obj = req.split_path(4, 4, True)

View File

@ -649,7 +649,8 @@ class StrAnonymizer(str):
def __new__(cls, data, method, salt):
method = method.lower()
if method not in hashlib.algorithms_guaranteed:
if method not in (hashlib.algorithms if six.PY2 else
hashlib.algorithms_guaranteed):
raise ValueError('Unsupported hashing method: %r' % method)
s = str.__new__(cls, data or '')
s.method = method

View File

@ -462,7 +462,7 @@ class ObjectReconstructor(Daemon):
if resp_frag_index not in buckets[timestamp]:
buckets[timestamp][resp_frag_index] = resp
if len(buckets[timestamp]) >= job['policy'].ec_ndata:
responses = buckets[timestamp].values()
responses = list(buckets[timestamp].values())
self.logger.debug(
'Reconstruct frag #%s with frag indexes %s'
% (fi_to_rebuild, list(buckets[timestamp])))
@ -508,15 +508,15 @@ class ObjectReconstructor(Daemon):
"""
def _get_one_fragment(resp):
buff = ''
buff = []
remaining_bytes = policy.fragment_size
while remaining_bytes:
chunk = resp.read(remaining_bytes)
if not chunk:
break
remaining_bytes -= len(chunk)
buff += chunk
return buff
buff.append(chunk)
return b''.join(buff)
def fragment_payload_iter():
# We need a fragment from each connections, so best to
@ -849,7 +849,8 @@ class ObjectReconstructor(Daemon):
success, _ = ssync_sender(
self, node, job, suffixes)()
# let remote end know to rehash it's suffixes
self.rehash_remote(node, job, suffixes)
if success:
self.rehash_remote(node, job, suffixes)
# update stats for this attempt
self.suffix_sync += len(suffixes)
self.logger.update_stats('suffix.syncs', len(suffixes))

View File

@ -15,6 +15,7 @@
import eventlet.greenio
import eventlet.wsgi
from six.moves import urllib
from swift.common import exceptions
@ -35,7 +36,7 @@ def decode_missing(line):
:py:func:`~swift.obj.ssync_sender.encode_missing`
"""
result = {}
parts = line.split()
parts = line.decode('ascii').split()
result['object_hash'] = urllib.parse.unquote(parts[0])
t_data = urllib.parse.unquote(parts[1])
result['ts_data'] = Timestamp(t_data)
@ -129,7 +130,17 @@ class Receiver(object):
# raised during processing because otherwise the sender could send for
# quite some time before realizing it was all in vain.
self.disconnect = True
self.initialize_request()
try:
self.initialize_request()
except swob.HTTPException:
# Old (pre-0.18.0) eventlet would try to drain the request body
# in a way that's prone to blowing up when the client has
# disconnected. Trick it into skipping that so we don't trip
# ValueError: invalid literal for int() with base 16
# in tests. Note we disconnect shortly after receiving a non-200
# response in the sender code, so this is not *so* crazy to do.
request.environ['wsgi.input'].chunked_input = False
raise
def __call__(self):
"""
@ -151,7 +162,7 @@ class Receiver(object):
try:
# Need to send something to trigger wsgi to return response
# headers and kick off the ssync exchange.
yield '\r\n'
yield b'\r\n'
# If semaphore is in use, try to acquire it, non-blocking, and
# return a 503 if it fails.
if self.app.replication_semaphore:
@ -176,21 +187,22 @@ class Receiver(object):
'%s/%s/%s SSYNC LOCK TIMEOUT: %s' % (
self.request.remote_addr, self.device, self.partition,
err))
yield ':ERROR: %d %r\n' % (0, str(err))
yield (':ERROR: %d %r\n' % (0, str(err))).encode('utf8')
except exceptions.MessageTimeout as err:
self.app.logger.error(
'%s/%s/%s TIMEOUT in ssync.Receiver: %s' % (
self.request.remote_addr, self.device, self.partition,
err))
yield ':ERROR: %d %r\n' % (408, str(err))
yield (':ERROR: %d %r\n' % (408, str(err))).encode('utf8')
except swob.HTTPException as err:
body = ''.join(err({}, lambda *args: None))
yield ':ERROR: %d %r\n' % (err.status_int, body)
body = b''.join(err({}, lambda *args: None))
yield (':ERROR: %d %r\n' % (
err.status_int, body)).encode('utf8')
except Exception as err:
self.app.logger.exception(
'%s/%s/%s EXCEPTION in ssync.Receiver' %
(self.request.remote_addr, self.device, self.partition))
yield ':ERROR: %d %r\n' % (0, str(err))
yield (':ERROR: %d %r\n' % (0, str(err))).encode('utf8')
except Exception:
self.app.logger.exception('EXCEPTION in ssync.Receiver')
if self.disconnect:
@ -335,7 +347,7 @@ class Receiver(object):
with exceptions.MessageTimeout(
self.app.client_timeout, 'missing_check start'):
line = self.fp.readline(self.app.network_chunk_size)
if line.strip() != ':MISSING_CHECK: START':
if line.strip() != b':MISSING_CHECK: START':
raise Exception(
'Looking for :MISSING_CHECK: START got %r' % line[:1024])
object_hashes = []
@ -343,16 +355,16 @@ class Receiver(object):
with exceptions.MessageTimeout(
self.app.client_timeout, 'missing_check line'):
line = self.fp.readline(self.app.network_chunk_size)
if not line or line.strip() == ':MISSING_CHECK: END':
if not line or line.strip() == b':MISSING_CHECK: END':
break
want = self._check_missing(line)
if want:
object_hashes.append(want)
yield ':MISSING_CHECK: START\r\n'
yield b':MISSING_CHECK: START\r\n'
if object_hashes:
yield '\r\n'.join(object_hashes)
yield '\r\n'
yield ':MISSING_CHECK: END\r\n'
yield b'\r\n'.join(hsh.encode('ascii') for hsh in object_hashes)
yield b'\r\n'
yield b':MISSING_CHECK: END\r\n'
def updates(self):
"""
@ -395,7 +407,7 @@ class Receiver(object):
with exceptions.MessageTimeout(
self.app.client_timeout, 'updates start'):
line = self.fp.readline(self.app.network_chunk_size)
if line.strip() != ':UPDATES: START':
if line.strip() != b':UPDATES: START':
raise Exception('Looking for :UPDATES: START got %r' % line[:1024])
successes = 0
failures = 0
@ -403,10 +415,10 @@ class Receiver(object):
with exceptions.MessageTimeout(
self.app.client_timeout, 'updates line'):
line = self.fp.readline(self.app.network_chunk_size)
if not line or line.strip() == ':UPDATES: END':
if not line or line.strip() == b':UPDATES: END':
break
# Read first line METHOD PATH of subrequest.
method, path = line.strip().split(' ', 1)
method, path = swob.bytes_to_wsgi(line.strip()).split(' ', 1)
subreq = swob.Request.blank(
'/%s/%s%s' % (self.device, self.partition, path),
environ={'REQUEST_METHOD': method})
@ -422,7 +434,7 @@ class Receiver(object):
line = line.strip()
if not line:
break
header, value = line.split(':', 1)
header, value = swob.bytes_to_wsgi(line).split(':', 1)
header = header.strip().lower()
value = value.strip()
subreq.headers[header] = value
@ -495,5 +507,5 @@ class Receiver(object):
raise swob.HTTPInternalServerError(
'ERROR: With :UPDATES: %d failures to %d successes' %
(failures, successes))
yield ':UPDATES: START\r\n'
yield ':UPDATES: END\r\n'
yield b':UPDATES: START\r\n'
yield b':UPDATES: END\r\n'

View File

@ -40,7 +40,7 @@ def encode_missing(object_hash, ts_data, ts_meta=None, ts_ctype=None):
if ts_ctype and ts_ctype != ts_data:
delta = ts_ctype.raw - ts_data.raw
msg = '%s,t:%x' % (msg, delta)
return msg
return msg.encode('ascii')
def decode_wanted(parts):
@ -52,7 +52,7 @@ def decode_wanted(parts):
:py:func:`~swift.obj.ssync_receiver.encode_wanted`
"""
wanted = {}
key_map = dict(d='data', m='meta')
key_map = {'d': 'data', 'm': 'meta'}
if parts:
# receiver specified data and/or meta wanted, so use those as
# conditions for sending PUT and/or POST subrequests
@ -72,7 +72,7 @@ def decode_wanted(parts):
class SsyncBufferedHTTPResponse(bufferedhttp.BufferedHTTPResponse, object):
def __init__(self, *args, **kwargs):
super(SsyncBufferedHTTPResponse, self).__init__(*args, **kwargs)
self.ssync_response_buffer = ''
self.ssync_response_buffer = b''
self.ssync_response_chunk_left = 0
def readline(self, size=1024):
@ -84,13 +84,13 @@ class SsyncBufferedHTTPResponse(bufferedhttp.BufferedHTTPResponse, object):
taken from Python's httplib itself.
"""
data = self.ssync_response_buffer
self.ssync_response_buffer = ''
while '\n' not in data and len(data) < size:
self.ssync_response_buffer = b''
while b'\n' not in data and len(data) < size:
if self.ssync_response_chunk_left == -1: # EOF-already indicator
break
if self.ssync_response_chunk_left == 0:
line = self.fp.readline()
i = line.find(';')
i = line.find(b';')
if i >= 0:
line = line[:i] # strip chunk-extensions
try:
@ -114,9 +114,9 @@ class SsyncBufferedHTTPResponse(bufferedhttp.BufferedHTTPResponse, object):
if self.ssync_response_chunk_left == 0:
self.fp.read(2) # discard the trailing \r\n
data += chunk
if '\n' in data:
data, self.ssync_response_buffer = data.split('\n', 1)
data += '\n'
if b'\n' in data:
data, self.ssync_response_buffer = data.split(b'\n', 1)
data += b'\n'
return data
@ -263,8 +263,8 @@ class Sender(object):
# First, send our list.
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'missing_check start'):
msg = ':MISSING_CHECK: START\r\n'
connection.send('%x\r\n%s\r\n' % (len(msg), msg))
msg = b':MISSING_CHECK: START\r\n'
connection.send(b'%x\r\n%s\r\n' % (len(msg), msg))
hash_gen = self.df_mgr.yield_hashes(
self.job['device'], self.job['partition'],
self.job['policy'], self.suffixes,
@ -279,12 +279,12 @@ class Sender(object):
with exceptions.MessageTimeout(
self.daemon.node_timeout,
'missing_check send line'):
msg = '%s\r\n' % encode_missing(object_hash, **timestamps)
connection.send('%x\r\n%s\r\n' % (len(msg), msg))
msg = b'%s\r\n' % encode_missing(object_hash, **timestamps)
connection.send(b'%x\r\n%s\r\n' % (len(msg), msg))
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'missing_check end'):
msg = ':MISSING_CHECK: END\r\n'
connection.send('%x\r\n%s\r\n' % (len(msg), msg))
msg = b':MISSING_CHECK: END\r\n'
connection.send(b'%x\r\n%s\r\n' % (len(msg), msg))
# Now, retrieve the list of what they want.
while True:
with exceptions.MessageTimeout(
@ -293,9 +293,14 @@ class Sender(object):
if not line:
raise exceptions.ReplicationException('Early disconnect')
line = line.strip()
if line == ':MISSING_CHECK: START':
if line == b':MISSING_CHECK: START':
break
elif line:
if not six.PY2:
try:
line = line.decode('ascii')
except UnicodeDecodeError:
pass
raise exceptions.ReplicationException(
'Unexpected response: %r' % line[:1024])
while True:
@ -305,9 +310,9 @@ class Sender(object):
if not line:
raise exceptions.ReplicationException('Early disconnect')
line = line.strip()
if line == ':MISSING_CHECK: END':
if line == b':MISSING_CHECK: END':
break
parts = line.split()
parts = line.decode('ascii').split()
if parts:
send_map[parts[0]] = decode_wanted(parts[1:])
return available_map, send_map
@ -323,8 +328,8 @@ class Sender(object):
# First, send all our subrequests based on the send_map.
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'updates start'):
msg = ':UPDATES: START\r\n'
connection.send('%x\r\n%s\r\n' % (len(msg), msg))
msg = b':UPDATES: START\r\n'
connection.send(b'%x\r\n%s\r\n' % (len(msg), msg))
for object_hash, want in send_map.items():
object_hash = urllib.parse.unquote(object_hash)
try:
@ -360,8 +365,8 @@ class Sender(object):
pass
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'updates end'):
msg = ':UPDATES: END\r\n'
connection.send('%x\r\n%s\r\n' % (len(msg), msg))
msg = b':UPDATES: END\r\n'
connection.send(b'%x\r\n%s\r\n' % (len(msg), msg))
# Now, read their response for any issues.
while True:
with exceptions.MessageTimeout(
@ -370,9 +375,14 @@ class Sender(object):
if not line:
raise exceptions.ReplicationException('Early disconnect')
line = line.strip()
if line == ':UPDATES: START':
if line == b':UPDATES: START':
break
elif line:
if not six.PY2:
try:
line = line.decode('ascii')
except UnicodeDecodeError:
pass
raise exceptions.ReplicationException(
'Unexpected response: %r' % line[:1024])
while True:
@ -382,20 +392,30 @@ class Sender(object):
if not line:
raise exceptions.ReplicationException('Early disconnect')
line = line.strip()
if line == ':UPDATES: END':
if line == b':UPDATES: END':
break
elif line:
if not six.PY2:
try:
line = line.decode('ascii')
except UnicodeDecodeError:
pass
raise exceptions.ReplicationException(
'Unexpected response: %r' % line[:1024])
def send_subrequest(self, connection, method, url_path, headers, df):
msg = ['%s %s' % (method, url_path)]
msg = [b'%s %s' % (method.encode('ascii'), url_path.encode('utf8'))]
for key, value in sorted(headers.items()):
msg.append('%s: %s' % (key, value))
msg = '\r\n'.join(msg) + '\r\n\r\n'
if six.PY2:
msg.append(b'%s: %s' % (key, value))
else:
msg.append(b'%s: %s' % (
key.encode('utf8', 'surrogateescape'),
str(value).encode('utf8', 'surrogateescape')))
msg = b'\r\n'.join(msg) + b'\r\n\r\n'
with exceptions.MessageTimeout(self.daemon.node_timeout,
'send_%s' % method.lower()):
connection.send('%x\r\n%s\r\n' % (len(msg), msg))
connection.send(b'%x\r\n%s\r\n' % (len(msg), msg))
if df:
bytes_read = 0
@ -404,7 +424,7 @@ class Sender(object):
with exceptions.MessageTimeout(self.daemon.node_timeout,
'send_%s chunk' %
method.lower()):
connection.send('%x\r\n%s\r\n' % (len(chunk), chunk))
connection.send(b'%x\r\n%s\r\n' % (len(chunk), chunk))
if bytes_read != df.content_length:
# Since we may now have partial state on the receiver we have
# to prevent the receiver finalising what may well be a bad or
@ -450,7 +470,7 @@ class Sender(object):
try:
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'disconnect'):
connection.send('0\r\n\r\n')
connection.send(b'0\r\n\r\n')
except (Exception, exceptions.Timeout):
pass # We're okay with the above failing.
connection.close()

View File

@ -594,13 +594,25 @@ def _get_info_from_memcache(app, env, account, container=None):
info = memcache.get(cache_key)
if info and six.PY2:
# Get back to native strings
new_info = {}
for key in info:
new_key = key.encode("utf-8") if isinstance(
key, six.text_type) else key
if isinstance(info[key], six.text_type):
info[key] = info[key].encode("utf-8")
new_info[new_key] = info[key].encode("utf-8")
elif isinstance(info[key], dict):
new_info[new_key] = {}
for subkey, value in info[key].items():
new_subkey = subkey.encode("utf-8") if isinstance(
subkey, six.text_type) else subkey
if isinstance(value, six.text_type):
info[key][subkey] = value.encode("utf-8")
new_info[new_key][new_subkey] = \
value.encode("utf-8")
else:
new_info[new_key][new_subkey] = value
else:
new_info[new_key] = info[key]
info = new_info
if info:
env.setdefault('swift.infocache', {})[cache_key] = info
return info

View File

@ -40,42 +40,40 @@ class TestDloEnv(BaseEnv):
if not cont.create():
raise ResponseError(cls.conn.response)
# avoid getting a prefix that stops halfway through an encoded
# character
prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8")
prefix = Utils.create_name(10)
cls.segment_prefix = prefix
for letter in ('a', 'b', 'c', 'd', 'e'):
file_item = cls.container.file("%s/seg_lower%s" % (prefix, letter))
file_item.write(letter * 10)
file_item.write(letter.encode('ascii') * 10)
file_item = cls.container.file(
"%s/seg_upper_%%ff%s" % (prefix, letter))
file_item.write(letter.upper() * 10)
file_item.write(letter.upper().encode('ascii') * 10)
for letter in ('f', 'g', 'h', 'i', 'j'):
file_item = cls.container2.file("%s/seg_lower%s" %
(prefix, letter))
file_item.write(letter * 10)
file_item.write(letter.encode('ascii') * 10)
man1 = cls.container.file("man1")
man1.write('man1-contents',
man1.write(b'man1-contents',
hdrs={"X-Object-Manifest": "%s/%s/seg_lower" %
(cls.container.name, prefix)})
man2 = cls.container.file("man2")
man2.write('man2-contents',
man2.write(b'man2-contents',
hdrs={"X-Object-Manifest": "%s/%s/seg_upper_%%25ff" %
(cls.container.name, prefix)})
manall = cls.container.file("manall")
manall.write('manall-contents',
manall.write(b'manall-contents',
hdrs={"X-Object-Manifest": "%s/%s/seg" %
(cls.container.name, prefix)})
mancont2 = cls.container.file("mancont2")
mancont2.write(
'mancont2-contents',
b'mancont2-contents',
hdrs={"X-Object-Manifest": "%s/%s/seg_lower" %
(cls.container2.name, prefix)})

View File

@ -48,7 +48,7 @@ class TestSloEnv(BaseEnv):
('e', 1)):
seg_name = "seg_%s" % letter
file_item = container.file(seg_name)
file_item.write(letter * size)
file_item.write(letter.encode('ascii') * size)
seg_info[seg_name] = {
'size_bytes': size,
'etag': file_item.md5,
@ -93,24 +93,26 @@ class TestSloEnv(BaseEnv):
file_item.write(
json.dumps([seg_info['seg_a'], seg_info['seg_b'],
seg_info['seg_c'], seg_info['seg_d'],
seg_info['seg_e']]),
seg_info['seg_e']]).encode('ascii'),
parms={'multipart-manifest': 'put'})
cls.container.file('seg_with_%ff_funky_name').write('z' * 10)
cls.container.file('seg_with_%ff_funky_name').write(b'z' * 10)
# Put the same manifest in the container2
file_item = cls.container2.file("manifest-abcde")
file_item.write(
json.dumps([seg_info['seg_a'], seg_info['seg_b'],
seg_info['seg_c'], seg_info['seg_d'],
seg_info['seg_e']]),
seg_info['seg_e']]).encode('ascii'),
parms={'multipart-manifest': 'put'})
file_item = cls.container.file('manifest-cd')
cd_json = json.dumps([seg_info['seg_c'], seg_info['seg_d']])
cd_json = json.dumps([
seg_info['seg_c'], seg_info['seg_d']]).encode('ascii')
file_item.write(cd_json, parms={'multipart-manifest': 'put'})
cd_etag = hashlib.md5(seg_info['seg_c']['etag'] +
seg_info['seg_d']['etag']).hexdigest()
cd_etag = hashlib.md5((
seg_info['seg_c']['etag'] + seg_info['seg_d']['etag']
).encode('ascii')).hexdigest()
file_item = cls.container.file("manifest-bcd-submanifest")
file_item.write(
@ -119,10 +121,10 @@ class TestSloEnv(BaseEnv):
'size_bytes': (seg_info['seg_c']['size_bytes'] +
seg_info['seg_d']['size_bytes']),
'path': '/%s/%s' % (cls.container.name,
'manifest-cd')}]),
'manifest-cd')}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
bcd_submanifest_etag = hashlib.md5(
seg_info['seg_b']['etag'] + cd_etag).hexdigest()
bcd_submanifest_etag = hashlib.md5((
seg_info['seg_b']['etag'] + cd_etag).encode('ascii')).hexdigest()
file_item = cls.container.file("manifest-abcde-submanifest")
file_item.write(
@ -134,11 +136,11 @@ class TestSloEnv(BaseEnv):
seg_info['seg_d']['size_bytes']),
'path': '/%s/%s' % (cls.container.name,
'manifest-bcd-submanifest')},
seg_info['seg_e']]),
seg_info['seg_e']]).encode('ascii'),
parms={'multipart-manifest': 'put'})
abcde_submanifest_etag = hashlib.md5(
abcde_submanifest_etag = hashlib.md5((
seg_info['seg_a']['etag'] + bcd_submanifest_etag +
seg_info['seg_e']['etag']).hexdigest()
seg_info['seg_e']['etag']).encode('ascii')).hexdigest()
abcde_submanifest_size = (seg_info['seg_a']['size_bytes'] +
seg_info['seg_b']['size_bytes'] +
seg_info['seg_c']['size_bytes'] +
@ -162,12 +164,13 @@ class TestSloEnv(BaseEnv):
'size_bytes': abcde_submanifest_size,
'path': '/%s/%s' % (cls.container.name,
'manifest-abcde-submanifest'),
'range': '3145727-3145728'}]), # 'cd'
'range': '3145727-3145728'}]).encode('ascii'), # 'cd'
parms={'multipart-manifest': 'put'})
ranged_manifest_etag = hashlib.md5(
ranged_manifest_etag = hashlib.md5((
abcde_submanifest_etag + ':3145727-4194304;' +
abcde_submanifest_etag + ':524288-1572863;' +
abcde_submanifest_etag + ':3145727-3145728;').hexdigest()
abcde_submanifest_etag + ':3145727-3145728;'
).encode('ascii')).hexdigest()
ranged_manifest_size = 2 * 1024 * 1024 + 4
file_item = cls.container.file("ranged-submanifest")
@ -187,7 +190,7 @@ class TestSloEnv(BaseEnv):
'size_bytes': ranged_manifest_size,
'path': '/%s/%s' % (cls.container.name,
'ranged-manifest'),
'range': '-3'}]),
'range': '-3'}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
file_item = cls.container.file("manifest-db")
@ -197,7 +200,7 @@ class TestSloEnv(BaseEnv):
'size_bytes': None},
{'path': seg_info['seg_b']['path'], 'etag': None,
'size_bytes': None},
]), parms={'multipart-manifest': 'put'})
]).encode('ascii'), parms={'multipart-manifest': 'put'})
file_item = cls.container.file("ranged-manifest-repeated-segment")
file_item.write(
@ -208,20 +211,20 @@ class TestSloEnv(BaseEnv):
'size_bytes': None},
{'path': seg_info['seg_b']['path'], 'etag': None,
'size_bytes': None, 'range': '-1048578'},
]), parms={'multipart-manifest': 'put'})
]).encode('ascii'), parms={'multipart-manifest': 'put'})
file_item = cls.container.file("mixed-object-data-manifest")
file_item.write(
json.dumps([
{'data': base64.b64encode('APRE' * 8)},
{'data': base64.b64encode(b'APRE' * 8).decode('ascii')},
{'path': seg_info['seg_a']['path']},
{'data': base64.b64encode('APOS' * 16)},
{'data': base64.b64encode(b'APOS' * 16).decode('ascii')},
{'path': seg_info['seg_b']['path']},
{'data': base64.b64encode('BPOS' * 32)},
{'data': base64.b64encode('CPRE' * 64)},
{'data': base64.b64encode(b'BPOS' * 32).decode('ascii')},
{'data': base64.b64encode(b'CPRE' * 64).decode('ascii')},
{'path': seg_info['seg_c']['path']},
{'data': base64.b64encode('CPOS' * 8)},
]), parms={'multipart-manifest': 'put'}
{'data': base64.b64encode(b'CPOS' * 8).decode('ascii')},
]).encode('ascii'), parms={'multipart-manifest': 'put'}
)
file_item = cls.container.file("nested-data-manifest")
@ -229,7 +232,7 @@ class TestSloEnv(BaseEnv):
json.dumps([
{'path': '%s/%s' % (cls.container.name,
"mixed-object-data-manifest")}
]), parms={'multipart-manifest': 'put'}
]).encode('ascii'), parms={'multipart-manifest': 'put'}
)

View File

@ -18,6 +18,7 @@ import hmac
import unittest2
import itertools
import hashlib
import six
import time
from six.moves import urllib
@ -35,8 +36,9 @@ from test.functional.test_tempurl import TestContainerTempurlEnv, \
TestTempurlEnv
from test.functional.swift_test_client import ResponseError
import test.functional as tf
from test.unit import group_by_byte
TARGET_BODY = 'target body'
TARGET_BODY = b'target body'
def setUpModule():
@ -76,7 +78,7 @@ class TestSymlinkEnv(BaseEnv):
@classmethod
def _make_request(cls, url, token, parsed, conn, method,
container, obj='', headers=None, body='',
container, obj='', headers=None, body=b'',
query_args=None):
headers = headers or {}
headers.update({'X-Auth-Token': token})
@ -179,7 +181,7 @@ class TestSymlink(Base):
self.env.tearDown()
def _make_request(self, url, token, parsed, conn, method,
container, obj='', headers=None, body='',
container, obj='', headers=None, body=b'',
query_args=None, allow_redirects=True):
headers = headers or {}
headers.update({'X-Auth-Token': token})
@ -195,7 +197,7 @@ class TestSymlink(Base):
return resp
def _make_request_with_symlink_get(self, url, token, parsed, conn, method,
container, obj, headers=None, body=''):
container, obj, headers=None, body=b''):
resp = self._make_request(
url, token, parsed, conn, method, container, obj, headers, body,
query_args='symlink=get')
@ -245,7 +247,7 @@ class TestSymlink(Base):
self._make_request_with_symlink_get, method='GET',
container=link_cont, obj=link_obj, use_account=use_account)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.content, '')
self.assertEqual(resp.content, b'')
self.assertEqual(resp.getheader('content-length'), str(0))
self.assertTrue(resp.getheader('x-symlink-target'))
@ -333,7 +335,7 @@ class TestSymlink(Base):
container=self.env.link_cont, obj=link_obj,
headers=headers)
self.assertEqual(resp.status, 206)
self.assertEqual(resp.content, 'body')
self.assertEqual(resp.content, b'body')
def test_create_symlink_before_target(self):
link_obj = uuid4().hex
@ -431,7 +433,7 @@ class TestSymlink(Base):
container=container,
obj=too_many_chain_link)
self.assertEqual(resp.status, 409)
self.assertEqual(resp.content, '')
self.assertEqual(resp.content, b'')
# try to GET to target object via too_many_chain_link
resp = retry(self._make_request, method='GET',
@ -440,7 +442,7 @@ class TestSymlink(Base):
self.assertEqual(resp.status, 409)
self.assertEqual(
resp.content,
'Too many levels of symbolic links, maximum allowed is %d' %
b'Too many levels of symbolic links, maximum allowed is %d' %
symloop_max)
# However, HEAD/GET to the (just) link is still ok
@ -522,7 +524,7 @@ class TestSymlink(Base):
resp = retry(self._make_request, method='PUT',
container=container, obj=too_many_recursion_manifest,
body=manifest,
body=manifest.encode('ascii'),
query_args='multipart-manifest=put')
self.assertEqual(resp.status, 201) # sanity
@ -533,8 +535,8 @@ class TestSymlink(Base):
# N.B. This error message is from slo middleware that uses default.
self.assertEqual(
resp.content,
'<html><h1>Conflict</h1><p>There was a conflict when trying to'
' complete your request.</p></html>')
b'<html><h1>Conflict</h1><p>There was a conflict when trying to'
b' complete your request.</p></html>')
def test_symlink_put_missing_target_container(self):
link_obj = uuid4().hex
@ -546,8 +548,8 @@ class TestSymlink(Base):
headers=headers)
self.assertEqual(resp.status, 412)
self.assertEqual(resp.content,
'X-Symlink-Target header must be of the form'
' <container name>/<object name>')
b'X-Symlink-Target header must be of the form'
b' <container name>/<object name>')
def test_symlink_put_non_zero_length(self):
link_obj = uuid4().hex
@ -559,7 +561,7 @@ class TestSymlink(Base):
self.assertEqual(resp.status, 400)
self.assertEqual(resp.content,
'Symlink requests require a zero byte body')
b'Symlink requests require a zero byte body')
def test_symlink_target_itself(self):
link_obj = uuid4().hex
@ -569,7 +571,7 @@ class TestSymlink(Base):
container=self.env.link_cont, obj=link_obj,
headers=headers)
self.assertEqual(resp.status, 400)
self.assertEqual(resp.content, 'Symlink cannot target itself')
self.assertEqual(resp.content, b'Symlink cannot target itself')
def test_symlink_target_each_other(self):
symloop_max = cluster_info['symlink']['symloop_max']
@ -599,7 +601,7 @@ class TestSymlink(Base):
self.assertEqual(resp.status, 409)
self.assertEqual(
resp.content,
'Too many levels of symbolic links, maximum allowed is %d' %
b'Too many levels of symbolic links, maximum allowed is %d' %
symloop_max)
def test_symlink_put_copy_from(self):
@ -828,7 +830,7 @@ class TestSymlink(Base):
obj=self.env.tgt_obj, headers=headers, allow_redirects=False)
self.assertEqual(resp.status, 400)
self.assertEqual(resp.content,
'A PUT request is required to set a symlink target')
b'A PUT request is required to set a symlink target')
def test_overwrite_symlink(self):
link_obj = uuid4().hex
@ -1010,41 +1012,39 @@ class TestSymlinkSlo(Base):
self.file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'manifest-abcde')})
file_contents = self.file_symlink.read()
self.assertEqual(4 * 1024 * 1024 + 1, len(file_contents))
self.assertEqual('a', file_contents[0])
self.assertEqual('a', file_contents[1024 * 1024 - 1])
self.assertEqual('b', file_contents[1024 * 1024])
self.assertEqual('d', file_contents[-2])
self.assertEqual('e', file_contents[-1])
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
(b'c', 1024 * 1024),
(b'd', 1024 * 1024),
(b'e', 1),
], group_by_byte(self.file_symlink.read()))
def test_symlink_target_slo_nested_manifest(self):
self.file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'manifest-abcde-submanifest')})
file_contents = self.file_symlink.read()
self.assertEqual(4 * 1024 * 1024 + 1, len(file_contents))
self.assertEqual('a', file_contents[0])
self.assertEqual('a', file_contents[1024 * 1024 - 1])
self.assertEqual('b', file_contents[1024 * 1024])
self.assertEqual('d', file_contents[-2])
self.assertEqual('e', file_contents[-1])
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
(b'c', 1024 * 1024),
(b'd', 1024 * 1024),
(b'e', 1),
], group_by_byte(self.file_symlink.read()))
def test_slo_get_ranged_manifest(self):
self.file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'ranged-manifest')})
grouped_file_contents = [
(char, sum(1 for _char in grp))
for char, grp in itertools.groupby(self.file_symlink.read())]
self.assertEqual([
('c', 1),
('d', 1024 * 1024),
('e', 1),
('a', 512 * 1024),
('b', 512 * 1024),
('c', 1),
('d', 1)], grouped_file_contents)
(b'c', 1),
(b'd', 1024 * 1024),
(b'e', 1),
(b'a', 512 * 1024),
(b'b', 512 * 1024),
(b'c', 1),
(b'd', 1),
], group_by_byte(self.file_symlink.read()))
def test_slo_ranged_get(self):
self.file_symlink.write(hdrs={'X-Symlink-Target':
@ -1052,10 +1052,11 @@ class TestSymlinkSlo(Base):
'manifest-abcde')})
file_contents = self.file_symlink.read(size=1024 * 1024 + 2,
offset=1024 * 1024 - 1)
self.assertEqual('a', file_contents[0])
self.assertEqual('b', file_contents[1])
self.assertEqual('b', file_contents[-2])
self.assertEqual('c', file_contents[-1])
self.assertEqual([
(b'a', 1),
(b'b', 1024 * 1024),
(b'c', 1),
], group_by_byte(file_contents))
class TestSymlinkSloEnv(TestSloEnv):
@ -1081,7 +1082,7 @@ class TestSymlinkSloEnv(TestSloEnv):
file_item = cls.container.file("manifest-linkto-ab")
file_item.write(
json.dumps([cls.link_seg_info['linkto_seg_a'],
cls.link_seg_info['linkto_seg_b']]),
cls.link_seg_info['linkto_seg_b']]).encode('ascii'),
parms={'multipart-manifest': 'put'})
@ -1106,18 +1107,18 @@ class TestSymlinkToSloSegments(Base):
def test_slo_get_simple_manifest_with_links(self):
file_item = self.env.container.file("manifest-linkto-ab")
file_contents = file_item.read()
self.assertEqual(2 * 1024 * 1024, len(file_contents))
self.assertEqual('a', file_contents[0])
self.assertEqual('a', file_contents[1024 * 1024 - 1])
self.assertEqual('b', file_contents[1024 * 1024])
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
], group_by_byte(file_item.read()))
def test_slo_container_listing(self):
# the listing object size should equal the sum of the size of the
# segments, not the size of the manifest body
file_item = self.env.container.file(Utils.create_name())
file_item.write(
json.dumps([self.env.link_seg_info['linkto_seg_a']]),
json.dumps([
self.env.link_seg_info['linkto_seg_a']]).encode('ascii'),
parms={'multipart-manifest': 'put'})
# The container listing has the etag of the actual manifest object
@ -1182,8 +1183,10 @@ class TestSymlinkToSloSegments(Base):
def test_slo_etag_is_hash_of_etags(self):
expected_hash = hashlib.md5()
expected_hash.update(hashlib.md5('a' * 1024 * 1024).hexdigest())
expected_hash.update(hashlib.md5('b' * 1024 * 1024).hexdigest())
expected_hash.update(hashlib.md5(
b'a' * 1024 * 1024).hexdigest().encode('ascii'))
expected_hash.update(hashlib.md5(
b'b' * 1024 * 1024).hexdigest().encode('ascii'))
expected_etag = expected_hash.hexdigest()
file_item = self.env.container.file('manifest-linkto-ab')
@ -1194,8 +1197,10 @@ class TestSymlinkToSloSegments(Base):
file_item.copy(self.env.container.name, "copied-abcde")
copied = self.env.container.file("copied-abcde")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
self.assertEqual(2 * 1024 * 1024, len(copied_contents))
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
], group_by_byte(copied.read(parms={'multipart-manifest': 'get'})))
def test_slo_copy_the_manifest(self):
# first just perform some tests of the contents of the manifest itself
@ -1270,31 +1275,44 @@ class TestSymlinkDlo(Base):
'%s/%s' % (self.env.container.name,
'man1')})
file_contents = file_symlink.read()
self.assertEqual(
file_contents,
"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee")
self.assertEqual([
(b'a', 10),
(b'b', 10),
(b'c', 10),
(b'd', 10),
(b'e', 10),
], group_by_byte(file_symlink.read()))
link_obj = uuid4().hex
file_symlink = self.env.container.file(link_obj)
file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'man2')})
file_contents = file_symlink.read()
self.assertEqual(
file_contents,
"AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE")
self.assertEqual([
(b'A', 10),
(b'B', 10),
(b'C', 10),
(b'D', 10),
(b'E', 10),
], group_by_byte(file_symlink.read()))
link_obj = uuid4().hex
file_symlink = self.env.container.file(link_obj)
file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'manall')})
file_contents = file_symlink.read()
self.assertEqual(
file_contents,
("aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee" +
"AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE"))
self.assertEqual([
(b'a', 10),
(b'b', 10),
(b'c', 10),
(b'd', 10),
(b'e', 10),
(b'A', 10),
(b'B', 10),
(b'C', 10),
(b'D', 10),
(b'E', 10),
], group_by_byte(file_symlink.read()))
def test_get_manifest_document_itself(self):
link_obj = uuid4().hex
@ -1303,7 +1321,7 @@ class TestSymlinkDlo(Base):
'%s/%s' % (self.env.container.name,
'man1')})
file_contents = file_symlink.read(parms={'multipart-manifest': 'get'})
self.assertEqual(file_contents, "man1-contents")
self.assertEqual(file_contents, b"man1-contents")
self.assertEqual(file_symlink.info()['x_object_manifest'],
"%s/%s/seg_lower" %
(self.env.container.name, self.env.segment_prefix))
@ -1314,11 +1332,15 @@ class TestSymlinkDlo(Base):
file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'man1')})
file_contents = file_symlink.read(size=25, offset=8)
self.assertEqual(file_contents, "aabbbbbbbbbbccccccccccddd")
self.assertEqual([
(b'a', 2),
(b'b', 10),
(b'c', 10),
(b'd', 3),
], group_by_byte(file_symlink.read(size=25, offset=8)))
file_contents = file_symlink.read(size=1, offset=47)
self.assertEqual(file_contents, "e")
self.assertEqual(file_contents, b"e")
def test_get_range_out_of_range(self):
link_obj = uuid4().hex
@ -1373,7 +1395,7 @@ class TestSymlinkTargetObjectComparison(Base):
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual('', body)
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_header('etag', md5)
@ -1395,7 +1417,7 @@ class TestSymlinkTargetObjectComparison(Base):
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual('', body)
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_header('etag', md5)
@ -1417,7 +1439,7 @@ class TestSymlinkTargetObjectComparison(Base):
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual('', body)
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_header('etag', md5)
@ -1440,7 +1462,7 @@ class TestSymlinkTargetObjectComparison(Base):
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual('', body)
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_header('etag', md5)
@ -1464,7 +1486,7 @@ class TestSymlinkTargetObjectComparison(Base):
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual('', body)
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_header('etag', md5)
self.assertTrue(file_symlink.info(hdrs=hdrs, parms=self.env.parms))
@ -1493,7 +1515,7 @@ class TestSymlinkTargetObjectComparison(Base):
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual('', body)
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_header('etag', md5)
self.assertTrue(file_symlink.info(hdrs=hdrs, parms=self.env.parms))
@ -1521,7 +1543,7 @@ class TestSymlinkTargetObjectComparison(Base):
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual('', body)
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_header('etag', md5)
@ -1600,7 +1622,7 @@ class TestSymlinkComparison(TestSymlinkTargetObjectComparison):
hdrs = {'If-Modified-Since': put_target_last_modified}
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
self.assertEqual('', body)
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_header('etag', md5)
@ -1613,7 +1635,7 @@ class TestSymlinkComparison(TestSymlinkTargetObjectComparison):
hdrs = {'If-Unmodified-Since': last_modified}
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
self.assertEqual('', body)
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_header('etag', md5)
@ -1644,9 +1666,14 @@ class TestSymlinkAccountTempurl(Base):
self.env.tempurl_key)
def tempurl_parms(self, method, expires, path, key):
path = urllib.parse.unquote(path)
if not six.PY2:
method = method.encode('utf8')
path = path.encode('utf8')
key = key.encode('utf8')
sig = hmac.new(
key,
'%s\n%s\n%s' % (method, expires, urllib.parse.unquote(path)),
b'%s\n%d\n%s' % (method, expires, path),
self.digest).hexdigest()
return {'temp_url_sig': sig, 'temp_url_expires': str(expires)}
@ -1662,7 +1689,7 @@ class TestSymlinkAccountTempurl(Base):
# try to create symlink object
try:
new_sym.write(
'', {'x-symlink-target': 'cont/foo'}, parms=put_parms,
b'', {'x-symlink-target': 'cont/foo'}, parms=put_parms,
cfg={'no_auth_token': True})
except ResponseError as e:
self.assertEqual(e.status, 400)
@ -1672,9 +1699,9 @@ class TestSymlinkAccountTempurl(Base):
def test_GET_symlink_inside_container(self):
tgt_obj = self.env.container.file(Utils.create_name())
sym = self.env.container.file(Utils.create_name())
tgt_obj.write("target object body")
tgt_obj.write(b"target object body")
sym.write(
'',
b'',
{'x-symlink-target': '%s/%s' % (self.env.container.name, tgt_obj)})
expires = int(time.time()) + 86400
@ -1684,18 +1711,18 @@ class TestSymlinkAccountTempurl(Base):
contents = sym.read(parms=get_parms, cfg={'no_auth_token': True})
self.assert_status([200])
self.assertEqual(contents, "target object body")
self.assertEqual(contents, b"target object body")
def test_GET_symlink_outside_container(self):
tgt_obj = self.env.container.file(Utils.create_name())
tgt_obj.write("target object body")
tgt_obj.write(b"target object body")
container2 = self.env.account.container(Utils.create_name())
container2.create()
sym = container2.file(Utils.create_name())
sym.write(
'',
b'',
{'x-symlink-target': '%s/%s' % (self.env.container.name, tgt_obj)})
expires = int(time.time()) + 86400
@ -1706,7 +1733,7 @@ class TestSymlinkAccountTempurl(Base):
# cross container tempurl works fine for account tempurl key
contents = sym.read(parms=get_parms, cfg={'no_auth_token': True})
self.assert_status([200])
self.assertEqual(contents, "target object body")
self.assertEqual(contents, b"target object body")
class TestSymlinkContainerTempurl(Base):
@ -1737,9 +1764,14 @@ class TestSymlinkContainerTempurl(Base):
'temp_url_expires': str(expires)}
def tempurl_sig(self, method, expires, path, key):
path = urllib.parse.unquote(path)
if not six.PY2:
method = method.encode('utf8')
path = path.encode('utf8')
key = key.encode('utf8')
return hmac.new(
key,
'%s\n%s\n%s' % (method, expires, urllib.parse.unquote(path)),
b'%s\n%d\n%s' % (method, expires, path),
self.digest).hexdigest()
def test_PUT_symlink(self):
@ -1756,7 +1788,7 @@ class TestSymlinkContainerTempurl(Base):
# try to create symlink object, should fail
try:
new_sym.write(
'', {'x-symlink-target': 'cont/foo'}, parms=put_parms,
b'', {'x-symlink-target': 'cont/foo'}, parms=put_parms,
cfg={'no_auth_token': True})
except ResponseError as e:
self.assertEqual(e.status, 400)
@ -1766,9 +1798,9 @@ class TestSymlinkContainerTempurl(Base):
def test_GET_symlink_inside_container(self):
tgt_obj = self.env.container.file(Utils.create_name())
sym = self.env.container.file(Utils.create_name())
tgt_obj.write("target object body")
tgt_obj.write(b"target object body")
sym.write(
'',
b'',
{'x-symlink-target': '%s/%s' % (self.env.container.name, tgt_obj)})
expires = int(time.time()) + 86400
@ -1780,18 +1812,18 @@ class TestSymlinkContainerTempurl(Base):
contents = sym.read(parms=parms, cfg={'no_auth_token': True})
self.assert_status([200])
self.assertEqual(contents, "target object body")
self.assertEqual(contents, b"target object body")
def test_GET_symlink_outside_container(self):
tgt_obj = self.env.container.file(Utils.create_name())
tgt_obj.write("target object body")
tgt_obj.write(b"target object body")
container2 = self.env.account.container(Utils.create_name())
container2.create()
sym = container2.file(Utils.create_name())
sym.write(
'',
b'',
{'x-symlink-target': '%s/%s' % (self.env.container.name, tgt_obj)})
expires = int(time.time()) + 86400

View File

@ -82,9 +82,9 @@ class TestTempurlEnv(TestTempurlBaseEnv):
raise ResponseError(cls.conn.response)
cls.obj = cls.container.file(Utils.create_name())
cls.obj.write("obj contents")
cls.obj.write(b"obj contents")
cls.other_obj = cls.container.file(Utils.create_name())
cls.other_obj.write("other obj contents")
cls.other_obj.write(b"other obj contents")
class TestTempurl(Base):
@ -437,9 +437,9 @@ class TestContainerTempurlEnv(BaseEnv):
raise ResponseError(cls.conn.response)
cls.obj = cls.container.file(Utils.create_name())
cls.obj.write("obj contents")
cls.obj.write(b"obj contents")
cls.other_obj = cls.container.file(Utils.create_name())
cls.other_obj.write("other obj contents")
cls.other_obj.write(b"other obj contents")
class TestContainerTempurl(Base):

View File

@ -1424,3 +1424,13 @@ def attach_fake_replication_rpc(rpc, replicate_hook=None, errors=None):
return resp
return FakeReplConnection
def group_by_byte(contents):
# This looks a little funny, but iterating through a byte string on py3
# yields a sequence of ints, not a sequence of single-byte byte strings
# as it did on py2.
byte_iter = (contents[i:i + 1] for i in range(len(contents)))
return [
(char, sum(1 for _ in grp))
for char, grp in itertools.groupby(byte_iter)]

View File

@ -524,9 +524,37 @@ class TestS3ApiObj(S3ApiTestCase):
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '400')
print(body)
self.assertEqual(self._get_error_code(body), 'BadDigest')
@s3acl
def test_object_PUT_v4_unsigned_payload(self):
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test:tester/%s/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-date, '
'Signature=hmac' % (
self.get_v4_amz_date_header().split('T', 1)[0]),
'x-amz-date': self.get_v4_amz_date_header(),
'x-amz-storage-class': 'STANDARD',
'x-amz-content-sha256': 'UNSIGNED-PAYLOAD',
'Date': self.get_date_header()},
body=self.object_body)
req.date = datetime.now()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
# Check that s3api returns an etag header.
self.assertEqual(headers['etag'],
'"%s"' % self.response_headers['etag'])
_, _, headers = self.swift.calls_with_headers[-1]
# No way to determine ETag to send
self.assertNotIn('etag', headers)
def test_object_PUT_headers(self):
content_md5 = binascii.b2a_base64(binascii.a2b_hex(self.etag)).strip()
if not six.PY2:

View File

@ -415,6 +415,9 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase):
do_test(
{'X-Symlink-Target': 'cont/obj',
'X-Symlink-Target-Account': target})
do_test(
{'X-Symlink-Target': 'cont/obj',
'X-Symlink-Target-Account': swob.wsgi_quote(target)})
def test_check_symlink_header_invalid_format(self):
def do_test(headers, status, err_msg):
@ -456,26 +459,25 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase):
'412 Precondition Failed',
b'Account name cannot contain slashes')
# with multi-bytes
target = u'/\u30b0\u30e9\u30d6\u30eb/\u30a2\u30ba\u30ec\u30f3'
target = swob.bytes_to_wsgi(target.encode('utf8'))
do_test(
{'X-Symlink-Target':
u'/\u30b0\u30e9\u30d6\u30eb/\u30a2\u30ba\u30ec\u30f3'},
{'X-Symlink-Target': target},
'412 Precondition Failed',
b'X-Symlink-Target header must be of the '
b'form <container name>/<object name>')
target = u'/\u30b0\u30e9\u30d6\u30eb/\u30a2\u30ba\u30ec\u30f3'
target = swob.bytes_to_wsgi(target.encode('utf8'))
do_test(
{'X-Symlink-Target': swob.wsgi_quote(target)},
'412 Precondition Failed',
b'X-Symlink-Target header must be of the '
b'form <container name>/<object name>')
account = u'\u30b0\u30e9\u30d6\u30eb/\u30a2\u30ba\u30ec\u30f3'
account = swob.bytes_to_wsgi(account.encode('utf8'))
do_test(
{'X-Symlink-Target': 'c/o',
'X-Symlink-Target-Account': account},
'412 Precondition Failed',
b'Account name cannot contain slashes')
account = swob.bytes_to_wsgi(account.encode('utf8'))
do_test(
{'X-Symlink-Target': 'c/o',
'X-Symlink-Target-Account': swob.wsgi_quote(account)},

View File

@ -3565,6 +3565,22 @@ cluster_dfw1 = http://dfw1.host/v1/
self.assertRaises(ValueError, utils.StrAnonymizer,
'Swift is great!', 'sha257', '')
def test_str_anonymizer_python_maddness(self):
with mock.patch('swift.common.utils.hashlib') as mocklib:
if six.PY2:
# python <2.7.9 doesn't have this algorithms_guaranteed, but
# our if block short-circuts before we explode
mocklib.algorithms = hashlib.algorithms
mocklib.algorithms_guaranteed.sideEffect = AttributeError()
else:
# python 3 doesn't have this algorithms but our if block
# short-circuts before we explode
mocklib.algorithms.sideEffect.sideEffect = AttributeError()
mocklib.algorithms_guaranteed = hashlib.algorithms_guaranteed
utils.StrAnonymizer('Swift is great!', 'sha1', '')
self.assertRaises(ValueError, utils.StrAnonymizer,
'Swift is great!', 'sha257', '')
def test_str_format_time(self):
dt = utils.StrFormatTime(10000.123456789)
self.assertEqual(str(dt), '10000.123456789')

View File

@ -4113,6 +4113,11 @@ class TestObjectReconstructor(BaseTestObjectReconstructor):
(node['replication_ip'], '/%s/0' % node['device']),
(node['replication_ip'], '/%s/0/123-abc' % node['device']),
])
# the first (primary sync_to) node's rehash_remote will be skipped
first_node = part_nodes[0]
expected_suffix_calls.remove(
(first_node['replication_ip'], '/%s/0/123-abc'
% first_node['device']))
ssync_calls = []
with mock_ssync_sender(ssync_calls,

View File

@ -25,6 +25,7 @@ from six.moves import urllib
from swift.common.exceptions import DiskFileNotExist, DiskFileError, \
DiskFileDeleted, DiskFileExpired
from swift.common import swob
from swift.common import utils
from swift.common.storage_policy import POLICIES, EC_POLICY
from swift.common.utils import Timestamp
@ -92,8 +93,8 @@ class TestBaseSsync(BaseTest):
def make_send_wrapper(send):
def wrapped_send(msg):
_msg = msg.split('\r\n', 1)[1]
_msg = _msg.rsplit('\r\n', 1)[0]
_msg = msg.split(b'\r\n', 1)[1]
_msg = _msg.rsplit(b'\r\n', 1)[0]
add_trace('tx', _msg)
send(msg)
return wrapped_send
@ -118,7 +119,7 @@ class TestBaseSsync(BaseTest):
def _get_object_data(self, path, **kwargs):
# return data for given path
if path not in self.obj_data:
self.obj_data[path] = '%s___data' % path
self.obj_data[path] = b'%s___data' % path.encode('ascii')
return self.obj_data[path]
def _create_ondisk_files(self, df_mgr, obj_name, policy, timestamp,
@ -162,7 +163,7 @@ class TestBaseSsync(BaseTest):
for k, v in tx_df.get_metadata().items():
if k == 'X-Object-Sysmeta-Ec-Frag-Index':
# if tx_df had a frag_index then rx_df should also have one
self.assertTrue(k in rx_metadata)
self.assertIn(k, rx_metadata)
self.assertEqual(frag_index, int(rx_metadata.pop(k)))
elif k == 'ETag' and not same_etag:
self.assertNotEqual(v, rx_metadata.pop(k, None))
@ -174,7 +175,7 @@ class TestBaseSsync(BaseTest):
self.assertFalse(rx_metadata)
expected_body = self._get_object_data(tx_df._name,
frag_index=frag_index)
actual_body = ''.join([chunk for chunk in rx_df.reader()])
actual_body = b''.join([chunk for chunk in rx_df.reader()])
self.assertEqual(expected_body, actual_body)
def _analyze_trace(self, trace):
@ -194,22 +195,22 @@ class TestBaseSsync(BaseTest):
def rx_missing(results, line):
self.assertEqual('rx', line[0])
parts = line[1].split('\r\n')
parts = line[1].split(b'\r\n')
for part in parts:
results['rx_missing'].append(part)
def tx_updates(results, line):
self.assertEqual('tx', line[0])
subrequests = results['tx_updates']
if line[1].startswith(('PUT', 'DELETE', 'POST')):
parts = line[1].split('\r\n')
if line[1].startswith((b'PUT', b'DELETE', b'POST')):
parts = [swob.bytes_to_wsgi(l) for l in line[1].split(b'\r\n')]
method, path = parts[0].split()
subreq = {'method': method, 'path': path, 'req': line[1],
'headers': parts[1:]}
subrequests.append(subreq)
else:
self.assertTrue(subrequests)
body = (subrequests[-1]).setdefault('body', '')
body = (subrequests[-1]).setdefault('body', b'')
body += line[1]
subrequests[-1]['body'] = body
@ -221,14 +222,14 @@ class TestBaseSsync(BaseTest):
results.setdefault('unexpected', []).append(line)
# each trace line is a tuple of ([tx|rx], msg)
handshakes = iter([(('tx', ':MISSING_CHECK: START'), tx_missing),
(('tx', ':MISSING_CHECK: END'), unexpected),
(('rx', ':MISSING_CHECK: START'), rx_missing),
(('rx', ':MISSING_CHECK: END'), unexpected),
(('tx', ':UPDATES: START'), tx_updates),
(('tx', ':UPDATES: END'), unexpected),
(('rx', ':UPDATES: START'), rx_updates),
(('rx', ':UPDATES: END'), unexpected)])
handshakes = iter([(('tx', b':MISSING_CHECK: START'), tx_missing),
(('tx', b':MISSING_CHECK: END'), unexpected),
(('rx', b':MISSING_CHECK: START'), rx_missing),
(('rx', b':MISSING_CHECK: END'), unexpected),
(('tx', b':UPDATES: START'), tx_updates),
(('tx', b':UPDATES: END'), unexpected),
(('rx', b':UPDATES: START'), rx_updates),
(('rx', b':UPDATES: END'), unexpected)])
expect_handshake = next(handshakes)
phases = ('tx_missing', 'rx_missing', 'tx_updates', 'rx_updates')
results = dict((k, []) for k in phases)
@ -319,7 +320,8 @@ class TestBaseSsyncEC(TestBaseSsync):
# for EC policies obj_data maps obj path -> list of frag archives
if path not in self.obj_data:
# make unique frag archives for each object name
data = path * 2 * (self.policy.ec_ndata + self.policy.ec_nparity)
data = path.encode('ascii') * 2 * (
self.policy.ec_ndata + self.policy.ec_nparity)
self.obj_data[path] = encode_frag_archive_bodies(
self.policy, data)
return self.obj_data[path][frag_index]
@ -740,7 +742,7 @@ class TestSsyncEC(TestBaseSsyncEC):
self.device, self.partition, suffixes, policy)
rx_hashes = rx_df_mgr.get_hashes(
self.device, self.partition, suffixes, policy)
self.assertEqual(suffixes, tx_hashes.keys()) # sanity
self.assertEqual(suffixes, list(tx_hashes.keys())) # sanity
self.assertEqual(tx_hashes, rx_hashes)
# sanity check - run ssync again and expect no sync activity
@ -763,7 +765,7 @@ class FakeResponse(object):
}
self.frag_index = frag_index
self.obj_data = obj_data
self.data = ''
self.data = b''
self.length = length
def init(self, path):
@ -779,7 +781,7 @@ class FakeResponse(object):
if isinstance(self.data, Exception):
raise self.data
val = self.data
self.data = ''
self.data = b''
return val if self.length is None else val[:self.length]
@ -1011,7 +1013,7 @@ class TestSsyncECReconstructorSyncJob(TestBaseSsyncEC):
self.assertEqual(
self._get_object_data(synced_obj_path,
frag_index=self.rx_node_index),
''.join([d for d in df.reader()]))
b''.join([d for d in df.reader()]))
except DiskFileNotExist:
msgs.append('Missing rx diskfile for %r' % obj_name)
@ -1057,7 +1059,7 @@ class TestSsyncECReconstructorSyncJob(TestBaseSsyncEC):
self.assertEqual(
self._get_object_data(df._name,
frag_index=self.rx_node_index),
''.join([d for d in df.reader()]))
b''.join([d for d in df.reader()]))
except DiskFileNotExist:
msgs.append('Missing rx diskfile for %r' % obj_name)
if msgs:
@ -1499,7 +1501,7 @@ class TestSsyncReplication(TestBaseSsync):
def _legacy_check_missing(self, line):
# reproduces behavior of 'legacy' ssync receiver missing_checks()
parts = line.split()
parts = line.decode('ascii').split()
object_hash = urllib.parse.unquote(parts[0])
timestamp = urllib.parse.unquote(parts[1])
want = False
@ -1562,14 +1564,14 @@ class TestSsyncReplication(TestBaseSsync):
# o1 on tx only with two meta files
name = 'o1'
t1 = self.ts_iter.next()
t1 = next(self.ts_iter)
tx_objs[name] = self._create_ondisk_files(tx_df_mgr, name, policy, t1)
t1_type = self.ts_iter.next()
t1_type = next(self.ts_iter)
metadata_1 = {'X-Timestamp': t1_type.internal,
'Content-Type': 'text/test',
'Content-Type-Timestamp': t1_type.internal}
tx_objs[name][0].write_metadata(metadata_1)
t1_meta = self.ts_iter.next()
t1_meta = next(self.ts_iter)
metadata_2 = {'X-Timestamp': t1_meta.internal,
'X-Object-Meta-Test': name}
tx_objs[name][0].write_metadata(metadata_2)
@ -1579,14 +1581,14 @@ class TestSsyncReplication(TestBaseSsync):
# o2 on tx with two meta files, rx has .data and newest .meta but is
# missing latest content-type
name = 'o2'
t2 = self.ts_iter.next()
t2 = next(self.ts_iter)
tx_objs[name] = self._create_ondisk_files(tx_df_mgr, name, policy, t2)
t2_type = self.ts_iter.next()
t2_type = next(self.ts_iter)
metadata_1 = {'X-Timestamp': t2_type.internal,
'Content-Type': 'text/test',
'Content-Type-Timestamp': t2_type.internal}
tx_objs[name][0].write_metadata(metadata_1)
t2_meta = self.ts_iter.next()
t2_meta = next(self.ts_iter)
metadata_2 = {'X-Timestamp': t2_meta.internal,
'X-Object-Meta-Test': name}
tx_objs[name][0].write_metadata(metadata_2)
@ -1597,14 +1599,14 @@ class TestSsyncReplication(TestBaseSsync):
# o3 on tx with two meta files, rx has .data and one .meta but does
# have latest content-type so nothing to sync
name = 'o3'
t3 = self.ts_iter.next()
t3 = next(self.ts_iter)
tx_objs[name] = self._create_ondisk_files(tx_df_mgr, name, policy, t3)
t3_type = self.ts_iter.next()
t3_type = next(self.ts_iter)
metadata_1 = {'X-Timestamp': t3_type.internal,
'Content-Type': 'text/test',
'Content-Type-Timestamp': t3_type.internal}
tx_objs[name][0].write_metadata(metadata_1)
t3_meta = self.ts_iter.next()
t3_meta = next(self.ts_iter)
metadata_2 = {'X-Timestamp': t3_meta.internal,
'X-Object-Meta-Test': name}
tx_objs[name][0].write_metadata(metadata_2)
@ -1619,10 +1621,10 @@ class TestSsyncReplication(TestBaseSsync):
# .data and two .meta having latest content-type so nothing to sync
# i.e. o4 is the reverse of o3 scenario
name = 'o4'
t4 = self.ts_iter.next()
t4 = next(self.ts_iter)
tx_objs[name] = self._create_ondisk_files(tx_df_mgr, name, policy, t4)
t4_type = self.ts_iter.next()
t4_meta = self.ts_iter.next()
t4_type = next(self.ts_iter)
t4_meta = next(self.ts_iter)
metadata_2b = {'X-Timestamp': t4_meta.internal,
'X-Object-Meta-Test': name,
'Content-Type': 'text/test',
@ -1640,10 +1642,10 @@ class TestSsyncReplication(TestBaseSsync):
# o5 on tx with one meta file having latest content-type, rx has
# .data and no .meta
name = 'o5'
t5 = self.ts_iter.next()
t5 = next(self.ts_iter)
tx_objs[name] = self._create_ondisk_files(tx_df_mgr, name, policy, t5)
t5_type = self.ts_iter.next()
t5_meta = self.ts_iter.next()
t5_type = next(self.ts_iter)
t5_meta = next(self.ts_iter)
metadata = {'X-Timestamp': t5_meta.internal,
'X-Object-Meta-Test': name,
'Content-Type': 'text/test',

File diff suppressed because it is too large Load Diff

View File

@ -57,14 +57,16 @@ class FakeResponse(ssync_sender.SsyncBufferedHTTPResponse):
def __init__(self, chunk_body=''):
self.status = 200
self.close_called = False
if not six.PY2:
chunk_body = chunk_body.encode('ascii')
if chunk_body:
self.fp = six.StringIO(
'%x\r\n%s\r\n0\r\n\r\n' % (len(chunk_body), chunk_body))
self.ssync_response_buffer = ''
self.fp = six.BytesIO(
b'%x\r\n%s\r\n0\r\n\r\n' % (len(chunk_body), chunk_body))
self.ssync_response_buffer = b''
self.ssync_response_chunk_left = 0
def read(self, *args, **kwargs):
return ''
return b''
def close(self):
self.close_called = True
@ -487,9 +489,9 @@ class TestSender(BaseTest):
self.assertTrue(success)
found_post = found_put = False
for chunk in connection.sent:
if 'POST' in chunk:
if b'POST' in chunk:
found_post = True
if 'PUT' in chunk:
if b'PUT' in chunk:
found_put = True
self.assertFalse(found_post)
self.assertTrue(found_put)
@ -677,56 +679,56 @@ class TestSender(BaseTest):
def test_readline_newline_in_buffer(self):
response = FakeResponse()
response.ssync_response_buffer = 'Has a newline already.\r\nOkay.'
self.assertEqual(response.readline(), 'Has a newline already.\r\n')
self.assertEqual(response.ssync_response_buffer, 'Okay.')
response.ssync_response_buffer = b'Has a newline already.\r\nOkay.'
self.assertEqual(response.readline(), b'Has a newline already.\r\n')
self.assertEqual(response.ssync_response_buffer, b'Okay.')
def test_readline_buffer_exceeds_network_chunk_size_somehow(self):
response = FakeResponse()
response.ssync_response_buffer = '1234567890'
self.assertEqual(response.readline(size=2), '1234567890')
self.assertEqual(response.ssync_response_buffer, '')
response.ssync_response_buffer = b'1234567890'
self.assertEqual(response.readline(size=2), b'1234567890')
self.assertEqual(response.ssync_response_buffer, b'')
def test_readline_at_start_of_chunk(self):
response = FakeResponse()
response.fp = six.StringIO('2\r\nx\n\r\n')
self.assertEqual(response.readline(), 'x\n')
response.fp = six.BytesIO(b'2\r\nx\n\r\n')
self.assertEqual(response.readline(), b'x\n')
def test_readline_chunk_with_extension(self):
response = FakeResponse()
response.fp = six.StringIO(
'2 ; chunk=extension\r\nx\n\r\n')
self.assertEqual(response.readline(), 'x\n')
response.fp = six.BytesIO(
b'2 ; chunk=extension\r\nx\n\r\n')
self.assertEqual(response.readline(), b'x\n')
def test_readline_broken_chunk(self):
response = FakeResponse()
response.fp = six.StringIO('q\r\nx\n\r\n')
response.fp = six.BytesIO(b'q\r\nx\n\r\n')
self.assertRaises(
exceptions.ReplicationException, response.readline)
self.assertTrue(response.close_called)
def test_readline_terminated_chunk(self):
response = FakeResponse()
response.fp = six.StringIO('b\r\nnot enough')
response.fp = six.BytesIO(b'b\r\nnot enough')
self.assertRaises(
exceptions.ReplicationException, response.readline)
self.assertTrue(response.close_called)
def test_readline_all(self):
response = FakeResponse()
response.fp = six.StringIO('2\r\nx\n\r\n0\r\n\r\n')
self.assertEqual(response.readline(), 'x\n')
self.assertEqual(response.readline(), '')
self.assertEqual(response.readline(), '')
response.fp = six.BytesIO(b'2\r\nx\n\r\n0\r\n\r\n')
self.assertEqual(response.readline(), b'x\n')
self.assertEqual(response.readline(), b'')
self.assertEqual(response.readline(), b'')
def test_readline_all_trailing_not_newline_termed(self):
response = FakeResponse()
response.fp = six.StringIO(
'2\r\nx\n\r\n3\r\n123\r\n0\r\n\r\n')
self.assertEqual(response.readline(), 'x\n')
self.assertEqual(response.readline(), '123')
self.assertEqual(response.readline(), '')
self.assertEqual(response.readline(), '')
response.fp = six.BytesIO(
b'2\r\nx\n\r\n3\r\n123\r\n0\r\n\r\n')
self.assertEqual(response.readline(), b'x\n')
self.assertEqual(response.readline(), b'123')
self.assertEqual(response.readline(), b'')
self.assertEqual(response.readline(), b'')
def test_missing_check_timeout(self):
connection = FakeConnection()
@ -761,9 +763,9 @@ class TestSender(BaseTest):
available_map, send_map = self.sender.missing_check(connection,
response)
self.assertEqual(
''.join(connection.sent),
'17\r\n:MISSING_CHECK: START\r\n\r\n'
'15\r\n:MISSING_CHECK: END\r\n\r\n')
b''.join(connection.sent),
b'17\r\n:MISSING_CHECK: START\r\n\r\n'
b'15\r\n:MISSING_CHECK: END\r\n\r\n')
self.assertEqual(send_map, {})
self.assertEqual(available_map, {})
@ -804,14 +806,14 @@ class TestSender(BaseTest):
available_map, send_map = self.sender.missing_check(connection,
response)
self.assertEqual(
''.join(connection.sent),
'17\r\n:MISSING_CHECK: START\r\n\r\n'
'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n'
'3b\r\n9d41d8cd98f00b204e9800998ecf0def 1380144472.22222 '
'm:186a0\r\n\r\n'
'3f\r\n9d41d8cd98f00b204e9800998ecf1def 1380144474.44444 '
'm:186a0,t:4\r\n\r\n'
'15\r\n:MISSING_CHECK: END\r\n\r\n')
b''.join(connection.sent),
b'17\r\n:MISSING_CHECK: START\r\n\r\n'
b'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n'
b'3b\r\n9d41d8cd98f00b204e9800998ecf0def 1380144472.22222 '
b'm:186a0\r\n\r\n'
b'3f\r\n9d41d8cd98f00b204e9800998ecf1def 1380144474.44444 '
b'm:186a0,t:4\r\n\r\n'
b'15\r\n:MISSING_CHECK: END\r\n\r\n')
self.assertEqual(send_map, {})
candidates = [('9d41d8cd98f00b204e9800998ecf0abc',
dict(ts_data=Timestamp(1380144470.00000))),
@ -853,10 +855,10 @@ class TestSender(BaseTest):
exc = err
self.assertEqual(str(exc), 'Early disconnect')
self.assertEqual(
''.join(connection.sent),
'17\r\n:MISSING_CHECK: START\r\n\r\n'
'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n'
'15\r\n:MISSING_CHECK: END\r\n\r\n')
b''.join(connection.sent),
b'17\r\n:MISSING_CHECK: START\r\n\r\n'
b'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n'
b'15\r\n:MISSING_CHECK: END\r\n\r\n')
def test_missing_check_far_end_disconnect2(self):
def yield_hashes(device, partition, policy, suffixes=None, **kwargs):
@ -888,10 +890,10 @@ class TestSender(BaseTest):
exc = err
self.assertEqual(str(exc), 'Early disconnect')
self.assertEqual(
''.join(connection.sent),
'17\r\n:MISSING_CHECK: START\r\n\r\n'
'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n'
'15\r\n:MISSING_CHECK: END\r\n\r\n')
b''.join(connection.sent),
b'17\r\n:MISSING_CHECK: START\r\n\r\n'
b'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n'
b'15\r\n:MISSING_CHECK: END\r\n\r\n')
def test_missing_check_far_end_unexpected(self):
def yield_hashes(device, partition, policy, suffixes=None, **kwargs):
@ -922,10 +924,10 @@ class TestSender(BaseTest):
exc = err
self.assertEqual(str(exc), "Unexpected response: 'OH HAI'")
self.assertEqual(
''.join(connection.sent),
'17\r\n:MISSING_CHECK: START\r\n\r\n'
'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n'
'15\r\n:MISSING_CHECK: END\r\n\r\n')
b''.join(connection.sent),
b'17\r\n:MISSING_CHECK: START\r\n\r\n'
b'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n'
b'15\r\n:MISSING_CHECK: END\r\n\r\n')
def test_missing_check_send_map(self):
def yield_hashes(device, partition, policy, suffixes=None, **kwargs):
@ -956,10 +958,10 @@ class TestSender(BaseTest):
available_map, send_map = self.sender.missing_check(connection,
response)
self.assertEqual(
''.join(connection.sent),
'17\r\n:MISSING_CHECK: START\r\n\r\n'
'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n'
'15\r\n:MISSING_CHECK: END\r\n\r\n')
b''.join(connection.sent),
b'17\r\n:MISSING_CHECK: START\r\n\r\n'
b'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n'
b'15\r\n:MISSING_CHECK: END\r\n\r\n')
self.assertEqual(send_map, {'0123abc': {'data': True, 'meta': True}})
self.assertEqual(available_map,
dict([('9d41d8cd98f00b204e9800998ecf0abc',
@ -1016,9 +1018,9 @@ class TestSender(BaseTest):
':UPDATES: END\r\n'))
self.sender.updates(connection, response, {})
self.assertEqual(
''.join(connection.sent),
'11\r\n:UPDATES: START\r\n\r\n'
'f\r\n:UPDATES: END\r\n\r\n')
b''.join(connection.sent),
b'11\r\n:UPDATES: START\r\n\r\n'
b'f\r\n:UPDATES: END\r\n\r\n')
def test_updates_unexpected_response_lines1(self):
connection = FakeConnection()
@ -1034,9 +1036,9 @@ class TestSender(BaseTest):
exc = err
self.assertEqual(str(exc), "Unexpected response: 'abc'")
self.assertEqual(
''.join(connection.sent),
'11\r\n:UPDATES: START\r\n\r\n'
'f\r\n:UPDATES: END\r\n\r\n')
b''.join(connection.sent),
b'11\r\n:UPDATES: START\r\n\r\n'
b'f\r\n:UPDATES: END\r\n\r\n')
def test_updates_unexpected_response_lines2(self):
connection = FakeConnection()
@ -1052,9 +1054,9 @@ class TestSender(BaseTest):
exc = err
self.assertEqual(str(exc), "Unexpected response: 'abc'")
self.assertEqual(
''.join(connection.sent),
'11\r\n:UPDATES: START\r\n\r\n'
'f\r\n:UPDATES: END\r\n\r\n')
b''.join(connection.sent),
b'11\r\n:UPDATES: START\r\n\r\n'
b'f\r\n:UPDATES: END\r\n\r\n')
def test_updates_is_deleted(self):
device = 'dev'
@ -1086,9 +1088,9 @@ class TestSender(BaseTest):
# note that the delete line isn't actually sent since we mock
# send_delete; send_delete is tested separately.
self.assertEqual(
''.join(connection.sent),
'11\r\n:UPDATES: START\r\n\r\n'
'f\r\n:UPDATES: END\r\n\r\n')
b''.join(connection.sent),
b'11\r\n:UPDATES: START\r\n\r\n'
b'f\r\n:UPDATES: END\r\n\r\n')
def test_update_send_delete(self):
device = 'dev'
@ -1113,13 +1115,13 @@ class TestSender(BaseTest):
':UPDATES: END\r\n'))
self.sender.updates(connection, response, send_map)
self.assertEqual(
''.join(connection.sent),
'11\r\n:UPDATES: START\r\n\r\n'
'30\r\n'
'DELETE /a/c/o\r\n'
'X-Timestamp: %s\r\n\r\n\r\n'
'f\r\n:UPDATES: END\r\n\r\n'
% delete_timestamp
b''.join(connection.sent),
b'11\r\n:UPDATES: START\r\n\r\n'
b'30\r\n'
b'DELETE /a/c/o\r\n'
b'X-Timestamp: %s\r\n\r\n\r\n'
b'f\r\n:UPDATES: END\r\n\r\n'
% delete_timestamp.encode('ascii')
)
def test_updates_put(self):
@ -1166,9 +1168,9 @@ class TestSender(BaseTest):
# note that the put line isn't actually sent since we mock send_put;
# send_put is tested separately.
self.assertEqual(
''.join(connection.sent),
'11\r\n:UPDATES: START\r\n\r\n'
'f\r\n:UPDATES: END\r\n\r\n')
b''.join(connection.sent),
b'11\r\n:UPDATES: START\r\n\r\n'
b'f\r\n:UPDATES: END\r\n\r\n')
def test_updates_post(self):
ts_iter = make_timestamp_iter()
@ -1213,9 +1215,9 @@ class TestSender(BaseTest):
# note that the post line isn't actually sent since we mock send_post;
# send_post is tested separately.
self.assertEqual(
''.join(connection.sent),
'11\r\n:UPDATES: START\r\n\r\n'
'f\r\n:UPDATES: END\r\n\r\n')
b''.join(connection.sent),
b'11\r\n:UPDATES: START\r\n\r\n'
b'f\r\n:UPDATES: END\r\n\r\n')
def test_updates_put_and_post(self):
ts_iter = make_timestamp_iter()
@ -1265,9 +1267,9 @@ class TestSender(BaseTest):
self.assertIsInstance(df, diskfile.DiskFile)
self.assertEqual(expected, df.get_metadata())
self.assertEqual(
''.join(connection.sent),
'11\r\n:UPDATES: START\r\n\r\n'
'f\r\n:UPDATES: END\r\n\r\n')
b''.join(connection.sent),
b'11\r\n:UPDATES: START\r\n\r\n'
b'f\r\n:UPDATES: END\r\n\r\n')
def test_updates_storage_policy_index(self):
device = 'dev'
@ -1328,9 +1330,9 @@ class TestSender(BaseTest):
exc = err
self.assertEqual(str(exc), 'Early disconnect')
self.assertEqual(
''.join(connection.sent),
'11\r\n:UPDATES: START\r\n\r\n'
'f\r\n:UPDATES: END\r\n\r\n')
b''.join(connection.sent),
b'11\r\n:UPDATES: START\r\n\r\n'
b'f\r\n:UPDATES: END\r\n\r\n')
def test_updates_read_response_unexp_start(self):
connection = FakeConnection()
@ -1346,9 +1348,9 @@ class TestSender(BaseTest):
exc = err
self.assertEqual(str(exc), "Unexpected response: 'anything else'")
self.assertEqual(
''.join(connection.sent),
'11\r\n:UPDATES: START\r\n\r\n'
'f\r\n:UPDATES: END\r\n\r\n')
b''.join(connection.sent),
b'11\r\n:UPDATES: START\r\n\r\n'
b'f\r\n:UPDATES: END\r\n\r\n')
def test_updates_read_response_timeout_end(self):
connection = FakeConnection()
@ -1360,7 +1362,7 @@ class TestSender(BaseTest):
def delayed_readline(*args, **kwargs):
rv = orig_readline(*args, **kwargs)
if rv == ':UPDATES: END\r\n':
if rv == b':UPDATES: END\r\n':
eventlet.sleep(1)
return rv
@ -1382,9 +1384,9 @@ class TestSender(BaseTest):
exc = err
self.assertEqual(str(exc), 'Early disconnect')
self.assertEqual(
''.join(connection.sent),
'11\r\n:UPDATES: START\r\n\r\n'
'f\r\n:UPDATES: END\r\n\r\n')
b''.join(connection.sent),
b'11\r\n:UPDATES: START\r\n\r\n'
b'f\r\n:UPDATES: END\r\n\r\n')
def test_updates_read_response_unexp_end(self):
connection = FakeConnection()
@ -1400,9 +1402,9 @@ class TestSender(BaseTest):
exc = err
self.assertEqual(str(exc), "Unexpected response: 'anything else'")
self.assertEqual(
''.join(connection.sent),
'11\r\n:UPDATES: START\r\n\r\n'
'f\r\n:UPDATES: END\r\n\r\n')
b''.join(connection.sent),
b'11\r\n:UPDATES: START\r\n\r\n'
b'f\r\n:UPDATES: END\r\n\r\n')
def test_send_delete_timeout(self):
connection = FakeConnection()
@ -1421,11 +1423,11 @@ class TestSender(BaseTest):
self.sender.send_delete(connection, '/a/c/o',
utils.Timestamp('1381679759.90941'))
self.assertEqual(
''.join(connection.sent),
'30\r\n'
'DELETE /a/c/o\r\n'
'X-Timestamp: 1381679759.90941\r\n'
'\r\n\r\n')
b''.join(connection.sent),
b'30\r\n'
b'DELETE /a/c/o\r\n'
b'X-Timestamp: 1381679759.90941\r\n'
b'\r\n\r\n')
def test_send_put_initial_timeout(self):
df = self._make_open_diskfile()
@ -1465,19 +1467,20 @@ class TestSender(BaseTest):
def _check_send_put(self, obj_name, meta_value):
ts_iter = make_timestamp_iter()
t1 = next(ts_iter)
body = 'test'
body = b'test'
extra_metadata = {'Some-Other-Header': 'value',
u'Unicode-Meta-Name': meta_value}
df = self._make_open_diskfile(obj=obj_name, body=body,
timestamp=t1,
extra_metadata=extra_metadata)
expected = dict(df.get_metadata())
expected['body'] = body
expected['body'] = body if six.PY2 else body.decode('ascii')
expected['chunk_size'] = len(body)
expected['meta'] = meta_value
wire_meta = meta_value if six.PY2 else meta_value.encode('utf8')
path = six.moves.urllib.parse.quote(expected['name'])
expected['path'] = path
expected['length'] = format(145 + len(path) + len(meta_value), 'x')
expected['length'] = format(145 + len(path) + len(wire_meta), 'x')
# .meta file metadata is not included in expected for data only PUT
t2 = next(ts_iter)
metadata = {'X-Timestamp': t2.internal, 'X-Object-Meta-Fruit': 'kiwi'}
@ -1485,8 +1488,7 @@ class TestSender(BaseTest):
df.open()
connection = FakeConnection()
self.sender.send_put(connection, path, df)
self.assertEqual(
''.join(connection.sent),
expected = (
'%(length)s\r\n'
'PUT %(path)s\r\n'
'Content-Length: %(Content-Length)s\r\n'
@ -1498,13 +1500,20 @@ class TestSender(BaseTest):
'\r\n'
'%(chunk_size)s\r\n'
'%(body)s\r\n' % expected)
if not six.PY2:
expected = expected.encode('utf8')
self.assertEqual(b''.join(connection.sent), expected)
def test_send_put(self):
self._check_send_put('o', 'meta')
def test_send_put_unicode(self):
self._check_send_put(
'o_with_caract\xc3\xa8res_like_in_french', 'm\xc3\xa8ta')
if six.PY2:
self._check_send_put(
'o_with_caract\xc3\xa8res_like_in_french', 'm\xc3\xa8ta')
else:
self._check_send_put(
'o_with_caract\u00e8res_like_in_french', 'm\u00e8ta')
def _check_send_post(self, obj_name, meta_value):
ts_iter = make_timestamp_iter()
@ -1522,39 +1531,46 @@ class TestSender(BaseTest):
'X-Timestamp': ts_1.internal}
df.write_metadata(newer_metadata)
path = six.moves.urllib.parse.quote(df.read_metadata()['name'])
length = format(61 + len(path) + len(meta_value), 'x')
wire_meta = meta_value if six.PY2 else meta_value.encode('utf8')
length = format(61 + len(path) + len(wire_meta), 'x')
connection = FakeConnection()
with df.open():
self.sender.send_post(connection, path, df)
self.assertEqual(
''.join(connection.sent),
'%s\r\n'
'POST %s\r\n'
'X-Object-Meta-Foo: %s\r\n'
'X-Timestamp: %s\r\n'
'\r\n'
'\r\n' % (length, path, meta_value, ts_1.internal))
b''.join(connection.sent),
b'%s\r\n'
b'POST %s\r\n'
b'X-Object-Meta-Foo: %s\r\n'
b'X-Timestamp: %s\r\n'
b'\r\n'
b'\r\n' % (length.encode('ascii'), path.encode('ascii'),
wire_meta,
ts_1.internal.encode('ascii')))
def test_send_post(self):
self._check_send_post('o', 'meta')
def test_send_post_unicode(self):
self._check_send_post(
'o_with_caract\xc3\xa8res_like_in_french', 'm\xc3\xa8ta')
if six.PY2:
self._check_send_post(
'o_with_caract\xc3\xa8res_like_in_french', 'm\xc3\xa8ta')
else:
self._check_send_post(
'o_with_caract\u00e8res_like_in_french', 'm\u00e8ta')
def test_disconnect_timeout(self):
connection = FakeConnection()
connection.send = lambda d: eventlet.sleep(1)
self.sender.daemon.node_timeout = 0.01
self.sender.disconnect(connection)
self.assertEqual(''.join(connection.sent), '')
self.assertEqual(b''.join(connection.sent), b'')
self.assertTrue(connection.closed)
def test_disconnect(self):
connection = FakeConnection()
self.sender.disconnect(connection)
self.assertEqual(''.join(connection.sent), '0\r\n\r\n')
self.assertEqual(b''.join(connection.sent), b'0\r\n\r\n')
self.assertTrue(connection.closed)
@ -1571,34 +1587,34 @@ class TestModuleMethods(unittest.TestCase):
# equal data and meta timestamps -> legacy single timestamp string
expected = '%s %s' % (object_hash, t_data.internal)
self.assertEqual(
expected,
expected.encode('ascii'),
ssync_sender.encode_missing(object_hash, t_data, ts_meta=t_data))
# newer meta timestamp -> hex data delta encoded as extra message part
expected = '%s %s m:%x' % (object_hash, t_data.internal, d_meta_data)
self.assertEqual(
expected,
expected.encode('ascii'),
ssync_sender.encode_missing(object_hash, t_data, ts_meta=t_meta))
# newer meta timestamp -> hex data delta encoded as extra message part
# content type timestamp equals data timestamp -> no delta
expected = '%s %s m:%x' % (object_hash, t_data.internal, d_meta_data)
self.assertEqual(
expected,
expected.encode('ascii'),
ssync_sender.encode_missing(object_hash, t_data, t_meta, t_data))
# content type timestamp newer data timestamp -> delta encoded
expected = ('%s %s m:%x,t:%x'
% (object_hash, t_data.internal, d_meta_data, d_type_data))
self.assertEqual(
expected,
expected.encode('ascii'),
ssync_sender.encode_missing(object_hash, t_data, t_meta, t_type))
# content type timestamp equal to meta timestamp -> delta encoded
expected = ('%s %s m:%x,t:%x'
% (object_hash, t_data.internal, d_meta_data, d_type_data))
self.assertEqual(
expected,
expected.encode('ascii'),
ssync_sender.encode_missing(object_hash, t_data, t_meta, t_type))
# test encode and decode functions invert

View File

@ -168,7 +168,8 @@ class FakeCache(FakeMemcache):
super(FakeCache, self).__init__()
if pre_cached:
self.store.update(pre_cached)
self.stub = stub
# Fake a json roundtrip
self.stub = json.loads(json.dumps(stub))
def get(self, key):
return self.stub or self.store.get(key)
@ -370,18 +371,27 @@ class TestFuncs(unittest.TestCase):
def test_get_container_info_cache(self):
cache_stub = {
'status': 404, 'bytes': 3333, 'object_count': 10,
'versions': u"\u1F4A9"}
'versions': u"\U0001F4A9",
'meta': {u'some-\N{SNOWMAN}': u'non-ascii meta \U0001F334'}}
req = Request.blank("/v1/account/cont",
environ={'swift.cache': FakeCache(cache_stub)})
resp = get_container_info(req.environ, FakeApp())
self.assertEqual([(k, type(k)) for k in resp],
[(k, str) for k in resp])
self.assertEqual(resp['storage_policy'], 0)
self.assertEqual(resp['bytes'], 3333)
self.assertEqual(resp['object_count'], 10)
self.assertEqual(resp['status'], 404)
if six.PY3:
self.assertEqual(resp['versions'], u'\u1f4a9')
self.assertEqual(resp['versions'], u'\U0001f4a9')
else:
self.assertEqual(resp['versions'], "\xe1\xbd\x8a\x39")
self.assertEqual(resp['versions'], "\xf0\x9f\x92\xa9")
for subdict in resp.values():
if isinstance(subdict, dict):
self.assertEqual([(k, type(k), v, type(v))
for k, v in subdict.items()],
[(k, str, v, str)
for k, v in subdict.items()])
def test_get_container_info_env(self):
cache_key = get_cache_key("account", "cont")

View File

@ -99,6 +99,9 @@ commands =
test/unit/obj/test_expirer.py \
test/unit/obj/test_replicator.py \
test/unit/obj/test_server.py \
test/unit/obj/test_ssync.py \
test/unit/obj/test_ssync_receiver.py \
test/unit/obj/test_ssync_sender.py \
test/unit/obj/test_updater.py \
test/unit/proxy}
@ -129,6 +132,7 @@ basepython = python3
commands =
pip install -U eventlet@git+https://github.com/eventlet/eventlet.git
nosetests {posargs: \
test/functional/test_symlink.py \
test/functional/tests.py}
[testenv:func-encryption]