From 82e446a8a0c0fd6a81f06717b76ed3d1be26a281 Mon Sep 17 00:00:00 2001 From: Tim Burke Date: Mon, 20 May 2019 11:44:21 -0700 Subject: [PATCH 1/8] s3api: Allow clients to upload with UNSIGNED-PAYLOAD (Some versions of?) awscli/boto3 will do v4 signatures but send a Content-MD5 for end-to-end validation. Since a X-Amz-Content-SHA256 is still required to calculate signatures, it uses UNSIGNED-PAYLOAD similar to how signatures work for pre-signed URLs. Look for UNSIGNED-PAYLOAD and skip SHA256 validation if set. Change-Id: I571c16c196dae4e4f8fb41904c8850d0054b1fe9 Related-Change: I61eb12455c37376be4d739eee55a5f439216f0e9 --- swift/common/middleware/s3api/s3request.py | 29 +++++++++--------- test/unit/common/middleware/s3api/test_obj.py | 30 ++++++++++++++++++- 2 files changed, 44 insertions(+), 15 deletions(-) diff --git a/swift/common/middleware/s3api/s3request.py b/swift/common/middleware/s3api/s3request.py index 6173ecd03c..1caca1f34b 100644 --- a/swift/common/middleware/s3api/s3request.py +++ b/swift/common/middleware/s3api/s3request.py @@ -434,20 +434,21 @@ class SigV4Mixin(object): raise InvalidRequest(msg) else: hashed_payload = self.headers['X-Amz-Content-SHA256'] - if self.content_length == 0: - if hashed_payload != sha256().hexdigest(): - raise BadDigest( - 'The X-Amz-Content-SHA56 you specified did not match ' - 'what we received.') - elif self.content_length: - self.environ['wsgi.input'] = HashingInput( - self.environ['wsgi.input'], - self.content_length, - sha256, - hashed_payload) - # else, not provided -- Swift will kick out a 411 Length Required - # which will get translated back to a S3-style response in - # S3Request._swift_error_codes + if hashed_payload != 'UNSIGNED-PAYLOAD': + if self.content_length == 0: + if hashed_payload != sha256().hexdigest(): + raise BadDigest( + 'The X-Amz-Content-SHA56 you specified did not ' + 'match what we received.') + elif self.content_length: + self.environ['wsgi.input'] = HashingInput( + self.environ['wsgi.input'], + self.content_length, + sha256, + hashed_payload) + # else, length not provided -- Swift will kick out a + # 411 Length Required which will get translated back + # to a S3-style response in S3Request._swift_error_codes cr.append(swob.wsgi_to_bytes(hashed_payload)) return b'\n'.join(cr) diff --git a/test/unit/common/middleware/s3api/test_obj.py b/test/unit/common/middleware/s3api/test_obj.py index 5a300f803f..cbe8c42c9a 100644 --- a/test/unit/common/middleware/s3api/test_obj.py +++ b/test/unit/common/middleware/s3api/test_obj.py @@ -524,9 +524,37 @@ class TestS3ApiObj(S3ApiTestCase): req.content_type = 'text/plain' status, headers, body = self.call_s3api(req) self.assertEqual(status.split()[0], '400') - print(body) self.assertEqual(self._get_error_code(body), 'BadDigest') + @s3acl + def test_object_PUT_v4_unsigned_payload(self): + req = Request.blank( + '/bucket/object', + environ={'REQUEST_METHOD': 'PUT'}, + headers={ + 'Authorization': + 'AWS4-HMAC-SHA256 ' + 'Credential=test:tester/%s/us-east-1/s3/aws4_request, ' + 'SignedHeaders=host;x-amz-date, ' + 'Signature=hmac' % ( + self.get_v4_amz_date_header().split('T', 1)[0]), + 'x-amz-date': self.get_v4_amz_date_header(), + 'x-amz-storage-class': 'STANDARD', + 'x-amz-content-sha256': 'UNSIGNED-PAYLOAD', + 'Date': self.get_date_header()}, + body=self.object_body) + req.date = datetime.now() + req.content_type = 'text/plain' + status, headers, body = self.call_s3api(req) + self.assertEqual(status.split()[0], '200') + # Check that s3api returns an etag header. + self.assertEqual(headers['etag'], + '"%s"' % self.response_headers['etag']) + + _, _, headers = self.swift.calls_with_headers[-1] + # No way to determine ETag to send + self.assertNotIn('etag', headers) + def test_object_PUT_headers(self): content_md5 = binascii.b2a_base64(binascii.a2b_hex(self.etag)).strip() if not six.PY2: From 2e35376c6d6afb5aa2a36081861bab011c8c95c3 Mon Sep 17 00:00:00 2001 From: Tim Burke Date: Thu, 30 May 2019 11:55:58 -0700 Subject: [PATCH 2/8] py3: symlink follow-up - Have the unit tests use WSGI strings, like a real system. - Port the func tests. Change-Id: I3a6f409208de45ebf9f55f7f59e4fe6ac6fbe163 --- swift/common/middleware/symlink.py | 18 +- test/functional/test_dlo.py | 18 +- test/functional/test_slo.py | 55 ++--- test/functional/test_symlink.py | 226 +++++++++++--------- test/functional/test_tempurl.py | 8 +- test/unit/__init__.py | 10 + test/unit/common/middleware/test_symlink.py | 12 +- tox.ini | 1 + 8 files changed, 192 insertions(+), 156 deletions(-) diff --git a/swift/common/middleware/symlink.py b/swift/common/middleware/symlink.py index fbf035a4af..9b6172aeea 100644 --- a/swift/common/middleware/symlink.py +++ b/swift/common/middleware/symlink.py @@ -199,12 +199,7 @@ def _check_symlink_header(req): # validation first, here. error_body = 'X-Symlink-Target header must be of the form ' \ '/' - try: - if wsgi_unquote(req.headers[TGT_OBJ_SYMLINK_HDR]).startswith('/'): - raise HTTPPreconditionFailed( - body=error_body, - request=req, content_type='text/plain') - except TypeError: + if wsgi_unquote(req.headers[TGT_OBJ_SYMLINK_HDR]).startswith('/'): raise HTTPPreconditionFailed( body=error_body, request=req, content_type='text/plain') @@ -216,14 +211,9 @@ def _check_symlink_header(req): req.headers[TGT_OBJ_SYMLINK_HDR] = wsgi_quote('%s/%s' % (container, obj)) # Check account format if it exists - try: - account = check_account_format( - req, wsgi_unquote(req.headers[TGT_ACCT_SYMLINK_HDR])) \ - if TGT_ACCT_SYMLINK_HDR in req.headers else None - except TypeError: - raise HTTPPreconditionFailed( - body='Account name cannot contain slashes', - request=req, content_type='text/plain') + account = check_account_format( + req, wsgi_unquote(req.headers[TGT_ACCT_SYMLINK_HDR])) \ + if TGT_ACCT_SYMLINK_HDR in req.headers else None # Extract request path _junk, req_acc, req_cont, req_obj = req.split_path(4, 4, True) diff --git a/test/functional/test_dlo.py b/test/functional/test_dlo.py index 5018d246ec..daeda94b53 100644 --- a/test/functional/test_dlo.py +++ b/test/functional/test_dlo.py @@ -40,42 +40,40 @@ class TestDloEnv(BaseEnv): if not cont.create(): raise ResponseError(cls.conn.response) - # avoid getting a prefix that stops halfway through an encoded - # character - prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8") + prefix = Utils.create_name(10) cls.segment_prefix = prefix for letter in ('a', 'b', 'c', 'd', 'e'): file_item = cls.container.file("%s/seg_lower%s" % (prefix, letter)) - file_item.write(letter * 10) + file_item.write(letter.encode('ascii') * 10) file_item = cls.container.file( "%s/seg_upper_%%ff%s" % (prefix, letter)) - file_item.write(letter.upper() * 10) + file_item.write(letter.upper().encode('ascii') * 10) for letter in ('f', 'g', 'h', 'i', 'j'): file_item = cls.container2.file("%s/seg_lower%s" % (prefix, letter)) - file_item.write(letter * 10) + file_item.write(letter.encode('ascii') * 10) man1 = cls.container.file("man1") - man1.write('man1-contents', + man1.write(b'man1-contents', hdrs={"X-Object-Manifest": "%s/%s/seg_lower" % (cls.container.name, prefix)}) man2 = cls.container.file("man2") - man2.write('man2-contents', + man2.write(b'man2-contents', hdrs={"X-Object-Manifest": "%s/%s/seg_upper_%%25ff" % (cls.container.name, prefix)}) manall = cls.container.file("manall") - manall.write('manall-contents', + manall.write(b'manall-contents', hdrs={"X-Object-Manifest": "%s/%s/seg" % (cls.container.name, prefix)}) mancont2 = cls.container.file("mancont2") mancont2.write( - 'mancont2-contents', + b'mancont2-contents', hdrs={"X-Object-Manifest": "%s/%s/seg_lower" % (cls.container2.name, prefix)}) diff --git a/test/functional/test_slo.py b/test/functional/test_slo.py index 4ab2bda0a0..72dec5b185 100644 --- a/test/functional/test_slo.py +++ b/test/functional/test_slo.py @@ -48,7 +48,7 @@ class TestSloEnv(BaseEnv): ('e', 1)): seg_name = "seg_%s" % letter file_item = container.file(seg_name) - file_item.write(letter * size) + file_item.write(letter.encode('ascii') * size) seg_info[seg_name] = { 'size_bytes': size, 'etag': file_item.md5, @@ -93,24 +93,26 @@ class TestSloEnv(BaseEnv): file_item.write( json.dumps([seg_info['seg_a'], seg_info['seg_b'], seg_info['seg_c'], seg_info['seg_d'], - seg_info['seg_e']]), + seg_info['seg_e']]).encode('ascii'), parms={'multipart-manifest': 'put'}) - cls.container.file('seg_with_%ff_funky_name').write('z' * 10) + cls.container.file('seg_with_%ff_funky_name').write(b'z' * 10) # Put the same manifest in the container2 file_item = cls.container2.file("manifest-abcde") file_item.write( json.dumps([seg_info['seg_a'], seg_info['seg_b'], seg_info['seg_c'], seg_info['seg_d'], - seg_info['seg_e']]), + seg_info['seg_e']]).encode('ascii'), parms={'multipart-manifest': 'put'}) file_item = cls.container.file('manifest-cd') - cd_json = json.dumps([seg_info['seg_c'], seg_info['seg_d']]) + cd_json = json.dumps([ + seg_info['seg_c'], seg_info['seg_d']]).encode('ascii') file_item.write(cd_json, parms={'multipart-manifest': 'put'}) - cd_etag = hashlib.md5(seg_info['seg_c']['etag'] + - seg_info['seg_d']['etag']).hexdigest() + cd_etag = hashlib.md5(( + seg_info['seg_c']['etag'] + seg_info['seg_d']['etag'] + ).encode('ascii')).hexdigest() file_item = cls.container.file("manifest-bcd-submanifest") file_item.write( @@ -119,10 +121,10 @@ class TestSloEnv(BaseEnv): 'size_bytes': (seg_info['seg_c']['size_bytes'] + seg_info['seg_d']['size_bytes']), 'path': '/%s/%s' % (cls.container.name, - 'manifest-cd')}]), + 'manifest-cd')}]).encode('ascii'), parms={'multipart-manifest': 'put'}) - bcd_submanifest_etag = hashlib.md5( - seg_info['seg_b']['etag'] + cd_etag).hexdigest() + bcd_submanifest_etag = hashlib.md5(( + seg_info['seg_b']['etag'] + cd_etag).encode('ascii')).hexdigest() file_item = cls.container.file("manifest-abcde-submanifest") file_item.write( @@ -134,11 +136,11 @@ class TestSloEnv(BaseEnv): seg_info['seg_d']['size_bytes']), 'path': '/%s/%s' % (cls.container.name, 'manifest-bcd-submanifest')}, - seg_info['seg_e']]), + seg_info['seg_e']]).encode('ascii'), parms={'multipart-manifest': 'put'}) - abcde_submanifest_etag = hashlib.md5( + abcde_submanifest_etag = hashlib.md5(( seg_info['seg_a']['etag'] + bcd_submanifest_etag + - seg_info['seg_e']['etag']).hexdigest() + seg_info['seg_e']['etag']).encode('ascii')).hexdigest() abcde_submanifest_size = (seg_info['seg_a']['size_bytes'] + seg_info['seg_b']['size_bytes'] + seg_info['seg_c']['size_bytes'] + @@ -162,12 +164,13 @@ class TestSloEnv(BaseEnv): 'size_bytes': abcde_submanifest_size, 'path': '/%s/%s' % (cls.container.name, 'manifest-abcde-submanifest'), - 'range': '3145727-3145728'}]), # 'cd' + 'range': '3145727-3145728'}]).encode('ascii'), # 'cd' parms={'multipart-manifest': 'put'}) - ranged_manifest_etag = hashlib.md5( + ranged_manifest_etag = hashlib.md5(( abcde_submanifest_etag + ':3145727-4194304;' + abcde_submanifest_etag + ':524288-1572863;' + - abcde_submanifest_etag + ':3145727-3145728;').hexdigest() + abcde_submanifest_etag + ':3145727-3145728;' + ).encode('ascii')).hexdigest() ranged_manifest_size = 2 * 1024 * 1024 + 4 file_item = cls.container.file("ranged-submanifest") @@ -187,7 +190,7 @@ class TestSloEnv(BaseEnv): 'size_bytes': ranged_manifest_size, 'path': '/%s/%s' % (cls.container.name, 'ranged-manifest'), - 'range': '-3'}]), + 'range': '-3'}]).encode('ascii'), parms={'multipart-manifest': 'put'}) file_item = cls.container.file("manifest-db") @@ -197,7 +200,7 @@ class TestSloEnv(BaseEnv): 'size_bytes': None}, {'path': seg_info['seg_b']['path'], 'etag': None, 'size_bytes': None}, - ]), parms={'multipart-manifest': 'put'}) + ]).encode('ascii'), parms={'multipart-manifest': 'put'}) file_item = cls.container.file("ranged-manifest-repeated-segment") file_item.write( @@ -208,20 +211,20 @@ class TestSloEnv(BaseEnv): 'size_bytes': None}, {'path': seg_info['seg_b']['path'], 'etag': None, 'size_bytes': None, 'range': '-1048578'}, - ]), parms={'multipart-manifest': 'put'}) + ]).encode('ascii'), parms={'multipart-manifest': 'put'}) file_item = cls.container.file("mixed-object-data-manifest") file_item.write( json.dumps([ - {'data': base64.b64encode('APRE' * 8)}, + {'data': base64.b64encode(b'APRE' * 8).decode('ascii')}, {'path': seg_info['seg_a']['path']}, - {'data': base64.b64encode('APOS' * 16)}, + {'data': base64.b64encode(b'APOS' * 16).decode('ascii')}, {'path': seg_info['seg_b']['path']}, - {'data': base64.b64encode('BPOS' * 32)}, - {'data': base64.b64encode('CPRE' * 64)}, + {'data': base64.b64encode(b'BPOS' * 32).decode('ascii')}, + {'data': base64.b64encode(b'CPRE' * 64).decode('ascii')}, {'path': seg_info['seg_c']['path']}, - {'data': base64.b64encode('CPOS' * 8)}, - ]), parms={'multipart-manifest': 'put'} + {'data': base64.b64encode(b'CPOS' * 8).decode('ascii')}, + ]).encode('ascii'), parms={'multipart-manifest': 'put'} ) file_item = cls.container.file("nested-data-manifest") @@ -229,7 +232,7 @@ class TestSloEnv(BaseEnv): json.dumps([ {'path': '%s/%s' % (cls.container.name, "mixed-object-data-manifest")} - ]), parms={'multipart-manifest': 'put'} + ]).encode('ascii'), parms={'multipart-manifest': 'put'} ) diff --git a/test/functional/test_symlink.py b/test/functional/test_symlink.py index 721b449741..2f3454bcaf 100755 --- a/test/functional/test_symlink.py +++ b/test/functional/test_symlink.py @@ -18,6 +18,7 @@ import hmac import unittest2 import itertools import hashlib +import six import time from six.moves import urllib @@ -35,8 +36,9 @@ from test.functional.test_tempurl import TestContainerTempurlEnv, \ TestTempurlEnv from test.functional.swift_test_client import ResponseError import test.functional as tf +from test.unit import group_by_byte -TARGET_BODY = 'target body' +TARGET_BODY = b'target body' def setUpModule(): @@ -76,7 +78,7 @@ class TestSymlinkEnv(BaseEnv): @classmethod def _make_request(cls, url, token, parsed, conn, method, - container, obj='', headers=None, body='', + container, obj='', headers=None, body=b'', query_args=None): headers = headers or {} headers.update({'X-Auth-Token': token}) @@ -179,7 +181,7 @@ class TestSymlink(Base): self.env.tearDown() def _make_request(self, url, token, parsed, conn, method, - container, obj='', headers=None, body='', + container, obj='', headers=None, body=b'', query_args=None, allow_redirects=True): headers = headers or {} headers.update({'X-Auth-Token': token}) @@ -195,7 +197,7 @@ class TestSymlink(Base): return resp def _make_request_with_symlink_get(self, url, token, parsed, conn, method, - container, obj, headers=None, body=''): + container, obj, headers=None, body=b''): resp = self._make_request( url, token, parsed, conn, method, container, obj, headers, body, query_args='symlink=get') @@ -245,7 +247,7 @@ class TestSymlink(Base): self._make_request_with_symlink_get, method='GET', container=link_cont, obj=link_obj, use_account=use_account) self.assertEqual(resp.status, 200) - self.assertEqual(resp.content, '') + self.assertEqual(resp.content, b'') self.assertEqual(resp.getheader('content-length'), str(0)) self.assertTrue(resp.getheader('x-symlink-target')) @@ -333,7 +335,7 @@ class TestSymlink(Base): container=self.env.link_cont, obj=link_obj, headers=headers) self.assertEqual(resp.status, 206) - self.assertEqual(resp.content, 'body') + self.assertEqual(resp.content, b'body') def test_create_symlink_before_target(self): link_obj = uuid4().hex @@ -431,7 +433,7 @@ class TestSymlink(Base): container=container, obj=too_many_chain_link) self.assertEqual(resp.status, 409) - self.assertEqual(resp.content, '') + self.assertEqual(resp.content, b'') # try to GET to target object via too_many_chain_link resp = retry(self._make_request, method='GET', @@ -440,7 +442,7 @@ class TestSymlink(Base): self.assertEqual(resp.status, 409) self.assertEqual( resp.content, - 'Too many levels of symbolic links, maximum allowed is %d' % + b'Too many levels of symbolic links, maximum allowed is %d' % symloop_max) # However, HEAD/GET to the (just) link is still ok @@ -522,7 +524,7 @@ class TestSymlink(Base): resp = retry(self._make_request, method='PUT', container=container, obj=too_many_recursion_manifest, - body=manifest, + body=manifest.encode('ascii'), query_args='multipart-manifest=put') self.assertEqual(resp.status, 201) # sanity @@ -533,8 +535,8 @@ class TestSymlink(Base): # N.B. This error message is from slo middleware that uses default. self.assertEqual( resp.content, - '

Conflict

There was a conflict when trying to' - ' complete your request.

') + b'

Conflict

There was a conflict when trying to' + b' complete your request.

') def test_symlink_put_missing_target_container(self): link_obj = uuid4().hex @@ -546,8 +548,8 @@ class TestSymlink(Base): headers=headers) self.assertEqual(resp.status, 412) self.assertEqual(resp.content, - 'X-Symlink-Target header must be of the form' - ' /') + b'X-Symlink-Target header must be of the form' + b' /') def test_symlink_put_non_zero_length(self): link_obj = uuid4().hex @@ -559,7 +561,7 @@ class TestSymlink(Base): self.assertEqual(resp.status, 400) self.assertEqual(resp.content, - 'Symlink requests require a zero byte body') + b'Symlink requests require a zero byte body') def test_symlink_target_itself(self): link_obj = uuid4().hex @@ -569,7 +571,7 @@ class TestSymlink(Base): container=self.env.link_cont, obj=link_obj, headers=headers) self.assertEqual(resp.status, 400) - self.assertEqual(resp.content, 'Symlink cannot target itself') + self.assertEqual(resp.content, b'Symlink cannot target itself') def test_symlink_target_each_other(self): symloop_max = cluster_info['symlink']['symloop_max'] @@ -599,7 +601,7 @@ class TestSymlink(Base): self.assertEqual(resp.status, 409) self.assertEqual( resp.content, - 'Too many levels of symbolic links, maximum allowed is %d' % + b'Too many levels of symbolic links, maximum allowed is %d' % symloop_max) def test_symlink_put_copy_from(self): @@ -828,7 +830,7 @@ class TestSymlink(Base): obj=self.env.tgt_obj, headers=headers, allow_redirects=False) self.assertEqual(resp.status, 400) self.assertEqual(resp.content, - 'A PUT request is required to set a symlink target') + b'A PUT request is required to set a symlink target') def test_overwrite_symlink(self): link_obj = uuid4().hex @@ -1010,41 +1012,39 @@ class TestSymlinkSlo(Base): self.file_symlink.write(hdrs={'X-Symlink-Target': '%s/%s' % (self.env.container.name, 'manifest-abcde')}) - file_contents = self.file_symlink.read() - self.assertEqual(4 * 1024 * 1024 + 1, len(file_contents)) - self.assertEqual('a', file_contents[0]) - self.assertEqual('a', file_contents[1024 * 1024 - 1]) - self.assertEqual('b', file_contents[1024 * 1024]) - self.assertEqual('d', file_contents[-2]) - self.assertEqual('e', file_contents[-1]) + self.assertEqual([ + (b'a', 1024 * 1024), + (b'b', 1024 * 1024), + (b'c', 1024 * 1024), + (b'd', 1024 * 1024), + (b'e', 1), + ], group_by_byte(self.file_symlink.read())) def test_symlink_target_slo_nested_manifest(self): self.file_symlink.write(hdrs={'X-Symlink-Target': '%s/%s' % (self.env.container.name, 'manifest-abcde-submanifest')}) - file_contents = self.file_symlink.read() - self.assertEqual(4 * 1024 * 1024 + 1, len(file_contents)) - self.assertEqual('a', file_contents[0]) - self.assertEqual('a', file_contents[1024 * 1024 - 1]) - self.assertEqual('b', file_contents[1024 * 1024]) - self.assertEqual('d', file_contents[-2]) - self.assertEqual('e', file_contents[-1]) + self.assertEqual([ + (b'a', 1024 * 1024), + (b'b', 1024 * 1024), + (b'c', 1024 * 1024), + (b'd', 1024 * 1024), + (b'e', 1), + ], group_by_byte(self.file_symlink.read())) def test_slo_get_ranged_manifest(self): self.file_symlink.write(hdrs={'X-Symlink-Target': '%s/%s' % (self.env.container.name, 'ranged-manifest')}) - grouped_file_contents = [ - (char, sum(1 for _char in grp)) - for char, grp in itertools.groupby(self.file_symlink.read())] self.assertEqual([ - ('c', 1), - ('d', 1024 * 1024), - ('e', 1), - ('a', 512 * 1024), - ('b', 512 * 1024), - ('c', 1), - ('d', 1)], grouped_file_contents) + (b'c', 1), + (b'd', 1024 * 1024), + (b'e', 1), + (b'a', 512 * 1024), + (b'b', 512 * 1024), + (b'c', 1), + (b'd', 1), + ], group_by_byte(self.file_symlink.read())) def test_slo_ranged_get(self): self.file_symlink.write(hdrs={'X-Symlink-Target': @@ -1052,10 +1052,11 @@ class TestSymlinkSlo(Base): 'manifest-abcde')}) file_contents = self.file_symlink.read(size=1024 * 1024 + 2, offset=1024 * 1024 - 1) - self.assertEqual('a', file_contents[0]) - self.assertEqual('b', file_contents[1]) - self.assertEqual('b', file_contents[-2]) - self.assertEqual('c', file_contents[-1]) + self.assertEqual([ + (b'a', 1), + (b'b', 1024 * 1024), + (b'c', 1), + ], group_by_byte(file_contents)) class TestSymlinkSloEnv(TestSloEnv): @@ -1081,7 +1082,7 @@ class TestSymlinkSloEnv(TestSloEnv): file_item = cls.container.file("manifest-linkto-ab") file_item.write( json.dumps([cls.link_seg_info['linkto_seg_a'], - cls.link_seg_info['linkto_seg_b']]), + cls.link_seg_info['linkto_seg_b']]).encode('ascii'), parms={'multipart-manifest': 'put'}) @@ -1106,18 +1107,18 @@ class TestSymlinkToSloSegments(Base): def test_slo_get_simple_manifest_with_links(self): file_item = self.env.container.file("manifest-linkto-ab") - file_contents = file_item.read() - self.assertEqual(2 * 1024 * 1024, len(file_contents)) - self.assertEqual('a', file_contents[0]) - self.assertEqual('a', file_contents[1024 * 1024 - 1]) - self.assertEqual('b', file_contents[1024 * 1024]) + self.assertEqual([ + (b'a', 1024 * 1024), + (b'b', 1024 * 1024), + ], group_by_byte(file_item.read())) def test_slo_container_listing(self): # the listing object size should equal the sum of the size of the # segments, not the size of the manifest body file_item = self.env.container.file(Utils.create_name()) file_item.write( - json.dumps([self.env.link_seg_info['linkto_seg_a']]), + json.dumps([ + self.env.link_seg_info['linkto_seg_a']]).encode('ascii'), parms={'multipart-manifest': 'put'}) # The container listing has the etag of the actual manifest object @@ -1182,8 +1183,10 @@ class TestSymlinkToSloSegments(Base): def test_slo_etag_is_hash_of_etags(self): expected_hash = hashlib.md5() - expected_hash.update(hashlib.md5('a' * 1024 * 1024).hexdigest()) - expected_hash.update(hashlib.md5('b' * 1024 * 1024).hexdigest()) + expected_hash.update(hashlib.md5( + b'a' * 1024 * 1024).hexdigest().encode('ascii')) + expected_hash.update(hashlib.md5( + b'b' * 1024 * 1024).hexdigest().encode('ascii')) expected_etag = expected_hash.hexdigest() file_item = self.env.container.file('manifest-linkto-ab') @@ -1194,8 +1197,10 @@ class TestSymlinkToSloSegments(Base): file_item.copy(self.env.container.name, "copied-abcde") copied = self.env.container.file("copied-abcde") - copied_contents = copied.read(parms={'multipart-manifest': 'get'}) - self.assertEqual(2 * 1024 * 1024, len(copied_contents)) + self.assertEqual([ + (b'a', 1024 * 1024), + (b'b', 1024 * 1024), + ], group_by_byte(copied.read(parms={'multipart-manifest': 'get'}))) def test_slo_copy_the_manifest(self): # first just perform some tests of the contents of the manifest itself @@ -1270,31 +1275,44 @@ class TestSymlinkDlo(Base): '%s/%s' % (self.env.container.name, 'man1')}) - file_contents = file_symlink.read() - self.assertEqual( - file_contents, - "aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee") + self.assertEqual([ + (b'a', 10), + (b'b', 10), + (b'c', 10), + (b'd', 10), + (b'e', 10), + ], group_by_byte(file_symlink.read())) link_obj = uuid4().hex file_symlink = self.env.container.file(link_obj) file_symlink.write(hdrs={'X-Symlink-Target': '%s/%s' % (self.env.container.name, 'man2')}) - file_contents = file_symlink.read() - self.assertEqual( - file_contents, - "AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE") + self.assertEqual([ + (b'A', 10), + (b'B', 10), + (b'C', 10), + (b'D', 10), + (b'E', 10), + ], group_by_byte(file_symlink.read())) link_obj = uuid4().hex file_symlink = self.env.container.file(link_obj) file_symlink.write(hdrs={'X-Symlink-Target': '%s/%s' % (self.env.container.name, 'manall')}) - file_contents = file_symlink.read() - self.assertEqual( - file_contents, - ("aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee" + - "AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE")) + self.assertEqual([ + (b'a', 10), + (b'b', 10), + (b'c', 10), + (b'd', 10), + (b'e', 10), + (b'A', 10), + (b'B', 10), + (b'C', 10), + (b'D', 10), + (b'E', 10), + ], group_by_byte(file_symlink.read())) def test_get_manifest_document_itself(self): link_obj = uuid4().hex @@ -1303,7 +1321,7 @@ class TestSymlinkDlo(Base): '%s/%s' % (self.env.container.name, 'man1')}) file_contents = file_symlink.read(parms={'multipart-manifest': 'get'}) - self.assertEqual(file_contents, "man1-contents") + self.assertEqual(file_contents, b"man1-contents") self.assertEqual(file_symlink.info()['x_object_manifest'], "%s/%s/seg_lower" % (self.env.container.name, self.env.segment_prefix)) @@ -1314,11 +1332,15 @@ class TestSymlinkDlo(Base): file_symlink.write(hdrs={'X-Symlink-Target': '%s/%s' % (self.env.container.name, 'man1')}) - file_contents = file_symlink.read(size=25, offset=8) - self.assertEqual(file_contents, "aabbbbbbbbbbccccccccccddd") + self.assertEqual([ + (b'a', 2), + (b'b', 10), + (b'c', 10), + (b'd', 3), + ], group_by_byte(file_symlink.read(size=25, offset=8))) file_contents = file_symlink.read(size=1, offset=47) - self.assertEqual(file_contents, "e") + self.assertEqual(file_contents, b"e") def test_get_range_out_of_range(self): link_obj = uuid4().hex @@ -1373,7 +1395,7 @@ class TestSymlinkTargetObjectComparison(Base): if self.env.expect_body: self.assertTrue(body) else: - self.assertEqual('', body) + self.assertEqual(b'', body) self.assert_status(200) self.assert_header('etag', md5) @@ -1395,7 +1417,7 @@ class TestSymlinkTargetObjectComparison(Base): if self.env.expect_body: self.assertTrue(body) else: - self.assertEqual('', body) + self.assertEqual(b'', body) self.assert_status(200) self.assert_header('etag', md5) @@ -1417,7 +1439,7 @@ class TestSymlinkTargetObjectComparison(Base): if self.env.expect_body: self.assertTrue(body) else: - self.assertEqual('', body) + self.assertEqual(b'', body) self.assert_status(200) self.assert_header('etag', md5) @@ -1440,7 +1462,7 @@ class TestSymlinkTargetObjectComparison(Base): if self.env.expect_body: self.assertTrue(body) else: - self.assertEqual('', body) + self.assertEqual(b'', body) self.assert_status(200) self.assert_header('etag', md5) @@ -1464,7 +1486,7 @@ class TestSymlinkTargetObjectComparison(Base): if self.env.expect_body: self.assertTrue(body) else: - self.assertEqual('', body) + self.assertEqual(b'', body) self.assert_status(200) self.assert_header('etag', md5) self.assertTrue(file_symlink.info(hdrs=hdrs, parms=self.env.parms)) @@ -1493,7 +1515,7 @@ class TestSymlinkTargetObjectComparison(Base): if self.env.expect_body: self.assertTrue(body) else: - self.assertEqual('', body) + self.assertEqual(b'', body) self.assert_status(200) self.assert_header('etag', md5) self.assertTrue(file_symlink.info(hdrs=hdrs, parms=self.env.parms)) @@ -1521,7 +1543,7 @@ class TestSymlinkTargetObjectComparison(Base): if self.env.expect_body: self.assertTrue(body) else: - self.assertEqual('', body) + self.assertEqual(b'', body) self.assert_status(200) self.assert_header('etag', md5) @@ -1600,7 +1622,7 @@ class TestSymlinkComparison(TestSymlinkTargetObjectComparison): hdrs = {'If-Modified-Since': put_target_last_modified} body = file_symlink.read(hdrs=hdrs, parms=self.env.parms) - self.assertEqual('', body) + self.assertEqual(b'', body) self.assert_status(200) self.assert_header('etag', md5) @@ -1613,7 +1635,7 @@ class TestSymlinkComparison(TestSymlinkTargetObjectComparison): hdrs = {'If-Unmodified-Since': last_modified} body = file_symlink.read(hdrs=hdrs, parms=self.env.parms) - self.assertEqual('', body) + self.assertEqual(b'', body) self.assert_status(200) self.assert_header('etag', md5) @@ -1644,9 +1666,14 @@ class TestSymlinkAccountTempurl(Base): self.env.tempurl_key) def tempurl_parms(self, method, expires, path, key): + path = urllib.parse.unquote(path) + if not six.PY2: + method = method.encode('utf8') + path = path.encode('utf8') + key = key.encode('utf8') sig = hmac.new( key, - '%s\n%s\n%s' % (method, expires, urllib.parse.unquote(path)), + b'%s\n%d\n%s' % (method, expires, path), self.digest).hexdigest() return {'temp_url_sig': sig, 'temp_url_expires': str(expires)} @@ -1662,7 +1689,7 @@ class TestSymlinkAccountTempurl(Base): # try to create symlink object try: new_sym.write( - '', {'x-symlink-target': 'cont/foo'}, parms=put_parms, + b'', {'x-symlink-target': 'cont/foo'}, parms=put_parms, cfg={'no_auth_token': True}) except ResponseError as e: self.assertEqual(e.status, 400) @@ -1672,9 +1699,9 @@ class TestSymlinkAccountTempurl(Base): def test_GET_symlink_inside_container(self): tgt_obj = self.env.container.file(Utils.create_name()) sym = self.env.container.file(Utils.create_name()) - tgt_obj.write("target object body") + tgt_obj.write(b"target object body") sym.write( - '', + b'', {'x-symlink-target': '%s/%s' % (self.env.container.name, tgt_obj)}) expires = int(time.time()) + 86400 @@ -1684,18 +1711,18 @@ class TestSymlinkAccountTempurl(Base): contents = sym.read(parms=get_parms, cfg={'no_auth_token': True}) self.assert_status([200]) - self.assertEqual(contents, "target object body") + self.assertEqual(contents, b"target object body") def test_GET_symlink_outside_container(self): tgt_obj = self.env.container.file(Utils.create_name()) - tgt_obj.write("target object body") + tgt_obj.write(b"target object body") container2 = self.env.account.container(Utils.create_name()) container2.create() sym = container2.file(Utils.create_name()) sym.write( - '', + b'', {'x-symlink-target': '%s/%s' % (self.env.container.name, tgt_obj)}) expires = int(time.time()) + 86400 @@ -1706,7 +1733,7 @@ class TestSymlinkAccountTempurl(Base): # cross container tempurl works fine for account tempurl key contents = sym.read(parms=get_parms, cfg={'no_auth_token': True}) self.assert_status([200]) - self.assertEqual(contents, "target object body") + self.assertEqual(contents, b"target object body") class TestSymlinkContainerTempurl(Base): @@ -1737,9 +1764,14 @@ class TestSymlinkContainerTempurl(Base): 'temp_url_expires': str(expires)} def tempurl_sig(self, method, expires, path, key): + path = urllib.parse.unquote(path) + if not six.PY2: + method = method.encode('utf8') + path = path.encode('utf8') + key = key.encode('utf8') return hmac.new( key, - '%s\n%s\n%s' % (method, expires, urllib.parse.unquote(path)), + b'%s\n%d\n%s' % (method, expires, path), self.digest).hexdigest() def test_PUT_symlink(self): @@ -1756,7 +1788,7 @@ class TestSymlinkContainerTempurl(Base): # try to create symlink object, should fail try: new_sym.write( - '', {'x-symlink-target': 'cont/foo'}, parms=put_parms, + b'', {'x-symlink-target': 'cont/foo'}, parms=put_parms, cfg={'no_auth_token': True}) except ResponseError as e: self.assertEqual(e.status, 400) @@ -1766,9 +1798,9 @@ class TestSymlinkContainerTempurl(Base): def test_GET_symlink_inside_container(self): tgt_obj = self.env.container.file(Utils.create_name()) sym = self.env.container.file(Utils.create_name()) - tgt_obj.write("target object body") + tgt_obj.write(b"target object body") sym.write( - '', + b'', {'x-symlink-target': '%s/%s' % (self.env.container.name, tgt_obj)}) expires = int(time.time()) + 86400 @@ -1780,18 +1812,18 @@ class TestSymlinkContainerTempurl(Base): contents = sym.read(parms=parms, cfg={'no_auth_token': True}) self.assert_status([200]) - self.assertEqual(contents, "target object body") + self.assertEqual(contents, b"target object body") def test_GET_symlink_outside_container(self): tgt_obj = self.env.container.file(Utils.create_name()) - tgt_obj.write("target object body") + tgt_obj.write(b"target object body") container2 = self.env.account.container(Utils.create_name()) container2.create() sym = container2.file(Utils.create_name()) sym.write( - '', + b'', {'x-symlink-target': '%s/%s' % (self.env.container.name, tgt_obj)}) expires = int(time.time()) + 86400 diff --git a/test/functional/test_tempurl.py b/test/functional/test_tempurl.py index 5f490a3bea..aad65cd2c7 100644 --- a/test/functional/test_tempurl.py +++ b/test/functional/test_tempurl.py @@ -82,9 +82,9 @@ class TestTempurlEnv(TestTempurlBaseEnv): raise ResponseError(cls.conn.response) cls.obj = cls.container.file(Utils.create_name()) - cls.obj.write("obj contents") + cls.obj.write(b"obj contents") cls.other_obj = cls.container.file(Utils.create_name()) - cls.other_obj.write("other obj contents") + cls.other_obj.write(b"other obj contents") class TestTempurl(Base): @@ -437,9 +437,9 @@ class TestContainerTempurlEnv(BaseEnv): raise ResponseError(cls.conn.response) cls.obj = cls.container.file(Utils.create_name()) - cls.obj.write("obj contents") + cls.obj.write(b"obj contents") cls.other_obj = cls.container.file(Utils.create_name()) - cls.other_obj.write("other obj contents") + cls.other_obj.write(b"other obj contents") class TestContainerTempurl(Base): diff --git a/test/unit/__init__.py b/test/unit/__init__.py index 3ba497bec3..97c8016204 100644 --- a/test/unit/__init__.py +++ b/test/unit/__init__.py @@ -1424,3 +1424,13 @@ def attach_fake_replication_rpc(rpc, replicate_hook=None, errors=None): return resp return FakeReplConnection + + +def group_by_byte(contents): + # This looks a little funny, but iterating through a byte string on py3 + # yields a sequence of ints, not a sequence of single-byte byte strings + # as it did on py2. + byte_iter = (contents[i:i + 1] for i in range(len(contents))) + return [ + (char, sum(1 for _ in grp)) + for char, grp in itertools.groupby(byte_iter)] diff --git a/test/unit/common/middleware/test_symlink.py b/test/unit/common/middleware/test_symlink.py index 005020e72d..faa2114ffd 100644 --- a/test/unit/common/middleware/test_symlink.py +++ b/test/unit/common/middleware/test_symlink.py @@ -415,6 +415,9 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase): do_test( {'X-Symlink-Target': 'cont/obj', 'X-Symlink-Target-Account': target}) + do_test( + {'X-Symlink-Target': 'cont/obj', + 'X-Symlink-Target-Account': swob.wsgi_quote(target)}) def test_check_symlink_header_invalid_format(self): def do_test(headers, status, err_msg): @@ -456,26 +459,25 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase): '412 Precondition Failed', b'Account name cannot contain slashes') # with multi-bytes + target = u'/\u30b0\u30e9\u30d6\u30eb/\u30a2\u30ba\u30ec\u30f3' + target = swob.bytes_to_wsgi(target.encode('utf8')) do_test( - {'X-Symlink-Target': - u'/\u30b0\u30e9\u30d6\u30eb/\u30a2\u30ba\u30ec\u30f3'}, + {'X-Symlink-Target': target}, '412 Precondition Failed', b'X-Symlink-Target header must be of the ' b'form /') - target = u'/\u30b0\u30e9\u30d6\u30eb/\u30a2\u30ba\u30ec\u30f3' - target = swob.bytes_to_wsgi(target.encode('utf8')) do_test( {'X-Symlink-Target': swob.wsgi_quote(target)}, '412 Precondition Failed', b'X-Symlink-Target header must be of the ' b'form /') account = u'\u30b0\u30e9\u30d6\u30eb/\u30a2\u30ba\u30ec\u30f3' + account = swob.bytes_to_wsgi(account.encode('utf8')) do_test( {'X-Symlink-Target': 'c/o', 'X-Symlink-Target-Account': account}, '412 Precondition Failed', b'Account name cannot contain slashes') - account = swob.bytes_to_wsgi(account.encode('utf8')) do_test( {'X-Symlink-Target': 'c/o', 'X-Symlink-Target-Account': swob.wsgi_quote(account)}, diff --git a/tox.ini b/tox.ini index cd10920533..9bba192162 100644 --- a/tox.ini +++ b/tox.ini @@ -129,6 +129,7 @@ basepython = python3 commands = pip install -U eventlet@git+https://github.com/eventlet/eventlet.git nosetests {posargs: \ + test/functional/test_symlink.py \ test/functional/tests.py} [testenv:func-encryption] From 37fa12cd83849a3ae8374ff07861d1d710d53174 Mon Sep 17 00:00:00 2001 From: Kuan-Lin Chen Date: Mon, 3 Jun 2019 18:39:51 +0800 Subject: [PATCH 3/8] Do not sync suffixes when remote rejects reconstructor sync The commit a0fcca1e makes reconstructor not sync suffixes when remote reject reconstructor revert. However, the exact same logic should be applied to SYNC job as well. REPLICATE requests aren't generally needed when using SSYC (which the reconstructor always does). If a ssync_sender fails to finish a sync the reconstructor should skip the REPLICATE call entirely and move on to the next partition without causing any useless remote IO. Change-Id: Ida50539e645ea7e2950ba668c7f031a8d10da787 Closes-Bug: #1665141 --- swift/obj/reconstructor.py | 3 ++- test/unit/obj/test_reconstructor.py | 5 +++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/swift/obj/reconstructor.py b/swift/obj/reconstructor.py index 71ae06e5fc..e95000dc8f 100644 --- a/swift/obj/reconstructor.py +++ b/swift/obj/reconstructor.py @@ -850,7 +850,8 @@ class ObjectReconstructor(Daemon): success, _ = ssync_sender( self, node, job, suffixes)() # let remote end know to rehash it's suffixes - self.rehash_remote(node, job, suffixes) + if success: + self.rehash_remote(node, job, suffixes) # update stats for this attempt self.suffix_sync += len(suffixes) self.logger.update_stats('suffix.syncs', len(suffixes)) diff --git a/test/unit/obj/test_reconstructor.py b/test/unit/obj/test_reconstructor.py index 6da0ad09ee..b4e1555913 100644 --- a/test/unit/obj/test_reconstructor.py +++ b/test/unit/obj/test_reconstructor.py @@ -4113,6 +4113,11 @@ class TestObjectReconstructor(BaseTestObjectReconstructor): (node['replication_ip'], '/%s/0' % node['device']), (node['replication_ip'], '/%s/0/123-abc' % node['device']), ]) + # the first (primary sync_to) node's rehash_remote will be skipped + first_node = part_nodes[0] + expected_suffix_calls.remove( + (first_node['replication_ip'], '/%s/0/123-abc' + % first_node['device'])) ssync_calls = [] with mock_ssync_sender(ssync_calls, From 98637dc1e7a6ef5641079a6226d12bf106436b35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=BF=9F=E5=B0=8F=E5=90=9B?= Date: Wed, 5 Jun 2019 12:35:00 +0800 Subject: [PATCH 4/8] Bump openstackdocstheme to 1.30.0 ...to pick up many improvements, including the return of table borders. Change-Id: I166211b690b08521171b489582fa419d756b1972 --- doc/requirements.txt | 2 +- lower-constraints.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/requirements.txt b/doc/requirements.txt index c5f6173b16..7c23cdc00f 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -4,7 +4,7 @@ # this is required for the docs build jobs sphinx>=1.6.2,<2.0.0;python_version=='2.7' # BSD sphinx>=1.6.2;python_version>='3.4' # BSD -openstackdocstheme>=1.11.0 # Apache-2.0 +openstackdocstheme>=1.30.0 # Apache-2.0 reno>=1.8.0 # Apache-2.0 os-api-ref>=1.0.0 # Apache-2.0 python-keystoneclient!=2.1.0,>=2.0.0 # Apache-2.0 diff --git a/lower-constraints.txt b/lower-constraints.txt index e30c1b53f1..7dc11b8200 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -46,7 +46,7 @@ netifaces==0.8 nose==1.3.7 nosehtmloutput==0.0.3 nosexcover==1.0.10 -openstackdocstheme==1.11.0 +openstackdocstheme==1.30.0 os-api-ref==1.0.0 os-testr==0.8.0 oslo.config==4.0.0 From d9cafca246bb15e706d9f7546e1f4bedda1b6c8b Mon Sep 17 00:00:00 2001 From: Tim Burke Date: Tue, 21 May 2019 18:04:05 -0700 Subject: [PATCH 5/8] py3: port ssync Change-Id: I63a502be13f5dcda2a457d38f2fc5f1ca469d562 --- swift/obj/reconstructor.py | 8 +- swift/obj/ssync_receiver.py | 52 ++-- swift/obj/ssync_sender.py | 80 +++-- test/unit/obj/test_ssync.py | 80 ++--- test/unit/obj/test_ssync_receiver.py | 432 +++++++++++++++------------ test/unit/obj/test_ssync_sender.py | 268 +++++++++-------- tox.ini | 3 + 7 files changed, 507 insertions(+), 416 deletions(-) diff --git a/swift/obj/reconstructor.py b/swift/obj/reconstructor.py index 71ae06e5fc..cb8172638f 100644 --- a/swift/obj/reconstructor.py +++ b/swift/obj/reconstructor.py @@ -463,7 +463,7 @@ class ObjectReconstructor(Daemon): if resp_frag_index not in buckets[timestamp]: buckets[timestamp][resp_frag_index] = resp if len(buckets[timestamp]) >= job['policy'].ec_ndata: - responses = buckets[timestamp].values() + responses = list(buckets[timestamp].values()) self.logger.debug( 'Reconstruct frag #%s with frag indexes %s' % (fi_to_rebuild, list(buckets[timestamp]))) @@ -509,15 +509,15 @@ class ObjectReconstructor(Daemon): """ def _get_one_fragment(resp): - buff = '' + buff = [] remaining_bytes = policy.fragment_size while remaining_bytes: chunk = resp.read(remaining_bytes) if not chunk: break remaining_bytes -= len(chunk) - buff += chunk - return buff + buff.append(chunk) + return b''.join(buff) def fragment_payload_iter(): # We need a fragment from each connections, so best to diff --git a/swift/obj/ssync_receiver.py b/swift/obj/ssync_receiver.py index ebce8569ba..3e39b8abbd 100644 --- a/swift/obj/ssync_receiver.py +++ b/swift/obj/ssync_receiver.py @@ -15,6 +15,7 @@ import eventlet.greenio +import eventlet.wsgi from six.moves import urllib from swift.common import exceptions @@ -35,7 +36,7 @@ def decode_missing(line): :py:func:`~swift.obj.ssync_sender.encode_missing` """ result = {} - parts = line.split() + parts = line.decode('ascii').split() result['object_hash'] = urllib.parse.unquote(parts[0]) t_data = urllib.parse.unquote(parts[1]) result['ts_data'] = Timestamp(t_data) @@ -129,7 +130,17 @@ class Receiver(object): # raised during processing because otherwise the sender could send for # quite some time before realizing it was all in vain. self.disconnect = True - self.initialize_request() + try: + self.initialize_request() + except swob.HTTPException: + # Old (pre-0.18.0) eventlet would try to drain the request body + # in a way that's prone to blowing up when the client has + # disconnected. Trick it into skipping that so we don't trip + # ValueError: invalid literal for int() with base 16 + # in tests. Note we disconnect shortly after receiving a non-200 + # response in the sender code, so this is not *so* crazy to do. + request.environ['wsgi.input'].chunked_input = False + raise def __call__(self): """ @@ -151,7 +162,7 @@ class Receiver(object): try: # Need to send something to trigger wsgi to return response # headers and kick off the ssync exchange. - yield '\r\n' + yield b'\r\n' # If semaphore is in use, try to acquire it, non-blocking, and # return a 503 if it fails. if self.app.replication_semaphore: @@ -176,21 +187,22 @@ class Receiver(object): '%s/%s/%s SSYNC LOCK TIMEOUT: %s' % ( self.request.remote_addr, self.device, self.partition, err)) - yield ':ERROR: %d %r\n' % (0, str(err)) + yield (':ERROR: %d %r\n' % (0, str(err))).encode('utf8') except exceptions.MessageTimeout as err: self.app.logger.error( '%s/%s/%s TIMEOUT in ssync.Receiver: %s' % ( self.request.remote_addr, self.device, self.partition, err)) - yield ':ERROR: %d %r\n' % (408, str(err)) + yield (':ERROR: %d %r\n' % (408, str(err))).encode('utf8') except swob.HTTPException as err: - body = ''.join(err({}, lambda *args: None)) - yield ':ERROR: %d %r\n' % (err.status_int, body) + body = b''.join(err({}, lambda *args: None)) + yield (':ERROR: %d %r\n' % ( + err.status_int, body)).encode('utf8') except Exception as err: self.app.logger.exception( '%s/%s/%s EXCEPTION in ssync.Receiver' % (self.request.remote_addr, self.device, self.partition)) - yield ':ERROR: %d %r\n' % (0, str(err)) + yield (':ERROR: %d %r\n' % (0, str(err))).encode('utf8') except Exception: self.app.logger.exception('EXCEPTION in ssync.Receiver') if self.disconnect: @@ -335,7 +347,7 @@ class Receiver(object): with exceptions.MessageTimeout( self.app.client_timeout, 'missing_check start'): line = self.fp.readline(self.app.network_chunk_size) - if line.strip() != ':MISSING_CHECK: START': + if line.strip() != b':MISSING_CHECK: START': raise Exception( 'Looking for :MISSING_CHECK: START got %r' % line[:1024]) object_hashes = [] @@ -343,16 +355,16 @@ class Receiver(object): with exceptions.MessageTimeout( self.app.client_timeout, 'missing_check line'): line = self.fp.readline(self.app.network_chunk_size) - if not line or line.strip() == ':MISSING_CHECK: END': + if not line or line.strip() == b':MISSING_CHECK: END': break want = self._check_missing(line) if want: object_hashes.append(want) - yield ':MISSING_CHECK: START\r\n' + yield b':MISSING_CHECK: START\r\n' if object_hashes: - yield '\r\n'.join(object_hashes) - yield '\r\n' - yield ':MISSING_CHECK: END\r\n' + yield b'\r\n'.join(hsh.encode('ascii') for hsh in object_hashes) + yield b'\r\n' + yield b':MISSING_CHECK: END\r\n' def updates(self): """ @@ -395,7 +407,7 @@ class Receiver(object): with exceptions.MessageTimeout( self.app.client_timeout, 'updates start'): line = self.fp.readline(self.app.network_chunk_size) - if line.strip() != ':UPDATES: START': + if line.strip() != b':UPDATES: START': raise Exception('Looking for :UPDATES: START got %r' % line[:1024]) successes = 0 failures = 0 @@ -403,10 +415,10 @@ class Receiver(object): with exceptions.MessageTimeout( self.app.client_timeout, 'updates line'): line = self.fp.readline(self.app.network_chunk_size) - if not line or line.strip() == ':UPDATES: END': + if not line or line.strip() == b':UPDATES: END': break # Read first line METHOD PATH of subrequest. - method, path = line.strip().split(' ', 1) + method, path = swob.bytes_to_wsgi(line.strip()).split(' ', 1) subreq = swob.Request.blank( '/%s/%s%s' % (self.device, self.partition, path), environ={'REQUEST_METHOD': method}) @@ -422,7 +434,7 @@ class Receiver(object): line = line.strip() if not line: break - header, value = line.split(':', 1) + header, value = swob.bytes_to_wsgi(line).split(':', 1) header = header.strip().lower() value = value.strip() subreq.headers[header] = value @@ -495,5 +507,5 @@ class Receiver(object): raise swob.HTTPInternalServerError( 'ERROR: With :UPDATES: %d failures to %d successes' % (failures, successes)) - yield ':UPDATES: START\r\n' - yield ':UPDATES: END\r\n' + yield b':UPDATES: START\r\n' + yield b':UPDATES: END\r\n' diff --git a/swift/obj/ssync_sender.py b/swift/obj/ssync_sender.py index da9ca63f1d..bcb16fb0e6 100644 --- a/swift/obj/ssync_sender.py +++ b/swift/obj/ssync_sender.py @@ -40,7 +40,7 @@ def encode_missing(object_hash, ts_data, ts_meta=None, ts_ctype=None): if ts_ctype and ts_ctype != ts_data: delta = ts_ctype.raw - ts_data.raw msg = '%s,t:%x' % (msg, delta) - return msg + return msg.encode('ascii') def decode_wanted(parts): @@ -52,7 +52,7 @@ def decode_wanted(parts): :py:func:`~swift.obj.ssync_receiver.encode_wanted` """ wanted = {} - key_map = dict(d='data', m='meta') + key_map = {'d': 'data', 'm': 'meta'} if parts: # receiver specified data and/or meta wanted, so use those as # conditions for sending PUT and/or POST subrequests @@ -72,7 +72,7 @@ def decode_wanted(parts): class SsyncBufferedHTTPResponse(bufferedhttp.BufferedHTTPResponse, object): def __init__(self, *args, **kwargs): super(SsyncBufferedHTTPResponse, self).__init__(*args, **kwargs) - self.ssync_response_buffer = '' + self.ssync_response_buffer = b'' self.ssync_response_chunk_left = 0 def readline(self, size=1024): @@ -84,13 +84,13 @@ class SsyncBufferedHTTPResponse(bufferedhttp.BufferedHTTPResponse, object): taken from Python's httplib itself. """ data = self.ssync_response_buffer - self.ssync_response_buffer = '' - while '\n' not in data and len(data) < size: + self.ssync_response_buffer = b'' + while b'\n' not in data and len(data) < size: if self.ssync_response_chunk_left == -1: # EOF-already indicator break if self.ssync_response_chunk_left == 0: line = self.fp.readline() - i = line.find(';') + i = line.find(b';') if i >= 0: line = line[:i] # strip chunk-extensions try: @@ -114,9 +114,9 @@ class SsyncBufferedHTTPResponse(bufferedhttp.BufferedHTTPResponse, object): if self.ssync_response_chunk_left == 0: self.fp.read(2) # discard the trailing \r\n data += chunk - if '\n' in data: - data, self.ssync_response_buffer = data.split('\n', 1) - data += '\n' + if b'\n' in data: + data, self.ssync_response_buffer = data.split(b'\n', 1) + data += b'\n' return data @@ -263,8 +263,8 @@ class Sender(object): # First, send our list. with exceptions.MessageTimeout( self.daemon.node_timeout, 'missing_check start'): - msg = ':MISSING_CHECK: START\r\n' - connection.send('%x\r\n%s\r\n' % (len(msg), msg)) + msg = b':MISSING_CHECK: START\r\n' + connection.send(b'%x\r\n%s\r\n' % (len(msg), msg)) hash_gen = self.df_mgr.yield_hashes( self.job['device'], self.job['partition'], self.job['policy'], self.suffixes, @@ -279,12 +279,12 @@ class Sender(object): with exceptions.MessageTimeout( self.daemon.node_timeout, 'missing_check send line'): - msg = '%s\r\n' % encode_missing(object_hash, **timestamps) - connection.send('%x\r\n%s\r\n' % (len(msg), msg)) + msg = b'%s\r\n' % encode_missing(object_hash, **timestamps) + connection.send(b'%x\r\n%s\r\n' % (len(msg), msg)) with exceptions.MessageTimeout( self.daemon.node_timeout, 'missing_check end'): - msg = ':MISSING_CHECK: END\r\n' - connection.send('%x\r\n%s\r\n' % (len(msg), msg)) + msg = b':MISSING_CHECK: END\r\n' + connection.send(b'%x\r\n%s\r\n' % (len(msg), msg)) # Now, retrieve the list of what they want. while True: with exceptions.MessageTimeout( @@ -293,9 +293,14 @@ class Sender(object): if not line: raise exceptions.ReplicationException('Early disconnect') line = line.strip() - if line == ':MISSING_CHECK: START': + if line == b':MISSING_CHECK: START': break elif line: + if not six.PY2: + try: + line = line.decode('ascii') + except UnicodeDecodeError: + pass raise exceptions.ReplicationException( 'Unexpected response: %r' % line[:1024]) while True: @@ -305,9 +310,9 @@ class Sender(object): if not line: raise exceptions.ReplicationException('Early disconnect') line = line.strip() - if line == ':MISSING_CHECK: END': + if line == b':MISSING_CHECK: END': break - parts = line.split() + parts = line.decode('ascii').split() if parts: send_map[parts[0]] = decode_wanted(parts[1:]) return available_map, send_map @@ -323,8 +328,8 @@ class Sender(object): # First, send all our subrequests based on the send_map. with exceptions.MessageTimeout( self.daemon.node_timeout, 'updates start'): - msg = ':UPDATES: START\r\n' - connection.send('%x\r\n%s\r\n' % (len(msg), msg)) + msg = b':UPDATES: START\r\n' + connection.send(b'%x\r\n%s\r\n' % (len(msg), msg)) for object_hash, want in send_map.items(): object_hash = urllib.parse.unquote(object_hash) try: @@ -360,8 +365,8 @@ class Sender(object): pass with exceptions.MessageTimeout( self.daemon.node_timeout, 'updates end'): - msg = ':UPDATES: END\r\n' - connection.send('%x\r\n%s\r\n' % (len(msg), msg)) + msg = b':UPDATES: END\r\n' + connection.send(b'%x\r\n%s\r\n' % (len(msg), msg)) # Now, read their response for any issues. while True: with exceptions.MessageTimeout( @@ -370,9 +375,14 @@ class Sender(object): if not line: raise exceptions.ReplicationException('Early disconnect') line = line.strip() - if line == ':UPDATES: START': + if line == b':UPDATES: START': break elif line: + if not six.PY2: + try: + line = line.decode('ascii') + except UnicodeDecodeError: + pass raise exceptions.ReplicationException( 'Unexpected response: %r' % line[:1024]) while True: @@ -382,20 +392,30 @@ class Sender(object): if not line: raise exceptions.ReplicationException('Early disconnect') line = line.strip() - if line == ':UPDATES: END': + if line == b':UPDATES: END': break elif line: + if not six.PY2: + try: + line = line.decode('ascii') + except UnicodeDecodeError: + pass raise exceptions.ReplicationException( 'Unexpected response: %r' % line[:1024]) def send_subrequest(self, connection, method, url_path, headers, df): - msg = ['%s %s' % (method, url_path)] + msg = [b'%s %s' % (method.encode('ascii'), url_path.encode('utf8'))] for key, value in sorted(headers.items()): - msg.append('%s: %s' % (key, value)) - msg = '\r\n'.join(msg) + '\r\n\r\n' + if six.PY2: + msg.append(b'%s: %s' % (key, value)) + else: + msg.append(b'%s: %s' % ( + key.encode('utf8', 'surrogateescape'), + str(value).encode('utf8', 'surrogateescape'))) + msg = b'\r\n'.join(msg) + b'\r\n\r\n' with exceptions.MessageTimeout(self.daemon.node_timeout, 'send_%s' % method.lower()): - connection.send('%x\r\n%s\r\n' % (len(msg), msg)) + connection.send(b'%x\r\n%s\r\n' % (len(msg), msg)) if df: bytes_read = 0 @@ -404,7 +424,7 @@ class Sender(object): with exceptions.MessageTimeout(self.daemon.node_timeout, 'send_%s chunk' % method.lower()): - connection.send('%x\r\n%s\r\n' % (len(chunk), chunk)) + connection.send(b'%x\r\n%s\r\n' % (len(chunk), chunk)) if bytes_read != df.content_length: # Since we may now have partial state on the receiver we have # to prevent the receiver finalising what may well be a bad or @@ -450,7 +470,7 @@ class Sender(object): try: with exceptions.MessageTimeout( self.daemon.node_timeout, 'disconnect'): - connection.send('0\r\n\r\n') + connection.send(b'0\r\n\r\n') except (Exception, exceptions.Timeout): pass # We're okay with the above failing. connection.close() diff --git a/test/unit/obj/test_ssync.py b/test/unit/obj/test_ssync.py index 1c6b1b20c7..19be0d4d72 100644 --- a/test/unit/obj/test_ssync.py +++ b/test/unit/obj/test_ssync.py @@ -25,6 +25,7 @@ from six.moves import urllib from swift.common.exceptions import DiskFileNotExist, DiskFileError, \ DiskFileDeleted, DiskFileExpired +from swift.common import swob from swift.common import utils from swift.common.storage_policy import POLICIES, EC_POLICY from swift.common.utils import Timestamp @@ -92,8 +93,8 @@ class TestBaseSsync(BaseTest): def make_send_wrapper(send): def wrapped_send(msg): - _msg = msg.split('\r\n', 1)[1] - _msg = _msg.rsplit('\r\n', 1)[0] + _msg = msg.split(b'\r\n', 1)[1] + _msg = _msg.rsplit(b'\r\n', 1)[0] add_trace('tx', _msg) send(msg) return wrapped_send @@ -118,7 +119,7 @@ class TestBaseSsync(BaseTest): def _get_object_data(self, path, **kwargs): # return data for given path if path not in self.obj_data: - self.obj_data[path] = '%s___data' % path + self.obj_data[path] = b'%s___data' % path.encode('ascii') return self.obj_data[path] def _create_ondisk_files(self, df_mgr, obj_name, policy, timestamp, @@ -162,7 +163,7 @@ class TestBaseSsync(BaseTest): for k, v in tx_df.get_metadata().items(): if k == 'X-Object-Sysmeta-Ec-Frag-Index': # if tx_df had a frag_index then rx_df should also have one - self.assertTrue(k in rx_metadata) + self.assertIn(k, rx_metadata) self.assertEqual(frag_index, int(rx_metadata.pop(k))) elif k == 'ETag' and not same_etag: self.assertNotEqual(v, rx_metadata.pop(k, None)) @@ -174,7 +175,7 @@ class TestBaseSsync(BaseTest): self.assertFalse(rx_metadata) expected_body = self._get_object_data(tx_df._name, frag_index=frag_index) - actual_body = ''.join([chunk for chunk in rx_df.reader()]) + actual_body = b''.join([chunk for chunk in rx_df.reader()]) self.assertEqual(expected_body, actual_body) def _analyze_trace(self, trace): @@ -194,22 +195,22 @@ class TestBaseSsync(BaseTest): def rx_missing(results, line): self.assertEqual('rx', line[0]) - parts = line[1].split('\r\n') + parts = line[1].split(b'\r\n') for part in parts: results['rx_missing'].append(part) def tx_updates(results, line): self.assertEqual('tx', line[0]) subrequests = results['tx_updates'] - if line[1].startswith(('PUT', 'DELETE', 'POST')): - parts = line[1].split('\r\n') + if line[1].startswith((b'PUT', b'DELETE', b'POST')): + parts = [swob.bytes_to_wsgi(l) for l in line[1].split(b'\r\n')] method, path = parts[0].split() subreq = {'method': method, 'path': path, 'req': line[1], 'headers': parts[1:]} subrequests.append(subreq) else: self.assertTrue(subrequests) - body = (subrequests[-1]).setdefault('body', '') + body = (subrequests[-1]).setdefault('body', b'') body += line[1] subrequests[-1]['body'] = body @@ -221,14 +222,14 @@ class TestBaseSsync(BaseTest): results.setdefault('unexpected', []).append(line) # each trace line is a tuple of ([tx|rx], msg) - handshakes = iter([(('tx', ':MISSING_CHECK: START'), tx_missing), - (('tx', ':MISSING_CHECK: END'), unexpected), - (('rx', ':MISSING_CHECK: START'), rx_missing), - (('rx', ':MISSING_CHECK: END'), unexpected), - (('tx', ':UPDATES: START'), tx_updates), - (('tx', ':UPDATES: END'), unexpected), - (('rx', ':UPDATES: START'), rx_updates), - (('rx', ':UPDATES: END'), unexpected)]) + handshakes = iter([(('tx', b':MISSING_CHECK: START'), tx_missing), + (('tx', b':MISSING_CHECK: END'), unexpected), + (('rx', b':MISSING_CHECK: START'), rx_missing), + (('rx', b':MISSING_CHECK: END'), unexpected), + (('tx', b':UPDATES: START'), tx_updates), + (('tx', b':UPDATES: END'), unexpected), + (('rx', b':UPDATES: START'), rx_updates), + (('rx', b':UPDATES: END'), unexpected)]) expect_handshake = next(handshakes) phases = ('tx_missing', 'rx_missing', 'tx_updates', 'rx_updates') results = dict((k, []) for k in phases) @@ -319,7 +320,8 @@ class TestBaseSsyncEC(TestBaseSsync): # for EC policies obj_data maps obj path -> list of frag archives if path not in self.obj_data: # make unique frag archives for each object name - data = path * 2 * (self.policy.ec_ndata + self.policy.ec_nparity) + data = path.encode('ascii') * 2 * ( + self.policy.ec_ndata + self.policy.ec_nparity) self.obj_data[path] = encode_frag_archive_bodies( self.policy, data) return self.obj_data[path][frag_index] @@ -740,7 +742,7 @@ class TestSsyncEC(TestBaseSsyncEC): self.device, self.partition, suffixes, policy) rx_hashes = rx_df_mgr.get_hashes( self.device, self.partition, suffixes, policy) - self.assertEqual(suffixes, tx_hashes.keys()) # sanity + self.assertEqual(suffixes, list(tx_hashes.keys())) # sanity self.assertEqual(tx_hashes, rx_hashes) # sanity check - run ssync again and expect no sync activity @@ -763,7 +765,7 @@ class FakeResponse(object): } self.frag_index = frag_index self.obj_data = obj_data - self.data = '' + self.data = b'' self.length = length def init(self, path): @@ -779,7 +781,7 @@ class FakeResponse(object): if isinstance(self.data, Exception): raise self.data val = self.data - self.data = '' + self.data = b'' return val if self.length is None else val[:self.length] @@ -1011,7 +1013,7 @@ class TestSsyncECReconstructorSyncJob(TestBaseSsyncEC): self.assertEqual( self._get_object_data(synced_obj_path, frag_index=self.rx_node_index), - ''.join([d for d in df.reader()])) + b''.join([d for d in df.reader()])) except DiskFileNotExist: msgs.append('Missing rx diskfile for %r' % obj_name) @@ -1057,7 +1059,7 @@ class TestSsyncECReconstructorSyncJob(TestBaseSsyncEC): self.assertEqual( self._get_object_data(df._name, frag_index=self.rx_node_index), - ''.join([d for d in df.reader()])) + b''.join([d for d in df.reader()])) except DiskFileNotExist: msgs.append('Missing rx diskfile for %r' % obj_name) if msgs: @@ -1499,7 +1501,7 @@ class TestSsyncReplication(TestBaseSsync): def _legacy_check_missing(self, line): # reproduces behavior of 'legacy' ssync receiver missing_checks() - parts = line.split() + parts = line.decode('ascii').split() object_hash = urllib.parse.unquote(parts[0]) timestamp = urllib.parse.unquote(parts[1]) want = False @@ -1562,14 +1564,14 @@ class TestSsyncReplication(TestBaseSsync): # o1 on tx only with two meta files name = 'o1' - t1 = self.ts_iter.next() + t1 = next(self.ts_iter) tx_objs[name] = self._create_ondisk_files(tx_df_mgr, name, policy, t1) - t1_type = self.ts_iter.next() + t1_type = next(self.ts_iter) metadata_1 = {'X-Timestamp': t1_type.internal, 'Content-Type': 'text/test', 'Content-Type-Timestamp': t1_type.internal} tx_objs[name][0].write_metadata(metadata_1) - t1_meta = self.ts_iter.next() + t1_meta = next(self.ts_iter) metadata_2 = {'X-Timestamp': t1_meta.internal, 'X-Object-Meta-Test': name} tx_objs[name][0].write_metadata(metadata_2) @@ -1579,14 +1581,14 @@ class TestSsyncReplication(TestBaseSsync): # o2 on tx with two meta files, rx has .data and newest .meta but is # missing latest content-type name = 'o2' - t2 = self.ts_iter.next() + t2 = next(self.ts_iter) tx_objs[name] = self._create_ondisk_files(tx_df_mgr, name, policy, t2) - t2_type = self.ts_iter.next() + t2_type = next(self.ts_iter) metadata_1 = {'X-Timestamp': t2_type.internal, 'Content-Type': 'text/test', 'Content-Type-Timestamp': t2_type.internal} tx_objs[name][0].write_metadata(metadata_1) - t2_meta = self.ts_iter.next() + t2_meta = next(self.ts_iter) metadata_2 = {'X-Timestamp': t2_meta.internal, 'X-Object-Meta-Test': name} tx_objs[name][0].write_metadata(metadata_2) @@ -1597,14 +1599,14 @@ class TestSsyncReplication(TestBaseSsync): # o3 on tx with two meta files, rx has .data and one .meta but does # have latest content-type so nothing to sync name = 'o3' - t3 = self.ts_iter.next() + t3 = next(self.ts_iter) tx_objs[name] = self._create_ondisk_files(tx_df_mgr, name, policy, t3) - t3_type = self.ts_iter.next() + t3_type = next(self.ts_iter) metadata_1 = {'X-Timestamp': t3_type.internal, 'Content-Type': 'text/test', 'Content-Type-Timestamp': t3_type.internal} tx_objs[name][0].write_metadata(metadata_1) - t3_meta = self.ts_iter.next() + t3_meta = next(self.ts_iter) metadata_2 = {'X-Timestamp': t3_meta.internal, 'X-Object-Meta-Test': name} tx_objs[name][0].write_metadata(metadata_2) @@ -1619,10 +1621,10 @@ class TestSsyncReplication(TestBaseSsync): # .data and two .meta having latest content-type so nothing to sync # i.e. o4 is the reverse of o3 scenario name = 'o4' - t4 = self.ts_iter.next() + t4 = next(self.ts_iter) tx_objs[name] = self._create_ondisk_files(tx_df_mgr, name, policy, t4) - t4_type = self.ts_iter.next() - t4_meta = self.ts_iter.next() + t4_type = next(self.ts_iter) + t4_meta = next(self.ts_iter) metadata_2b = {'X-Timestamp': t4_meta.internal, 'X-Object-Meta-Test': name, 'Content-Type': 'text/test', @@ -1640,10 +1642,10 @@ class TestSsyncReplication(TestBaseSsync): # o5 on tx with one meta file having latest content-type, rx has # .data and no .meta name = 'o5' - t5 = self.ts_iter.next() + t5 = next(self.ts_iter) tx_objs[name] = self._create_ondisk_files(tx_df_mgr, name, policy, t5) - t5_type = self.ts_iter.next() - t5_meta = self.ts_iter.next() + t5_type = next(self.ts_iter) + t5_meta = next(self.ts_iter) metadata = {'X-Timestamp': t5_meta.internal, 'X-Object-Meta-Test': name, 'Content-Type': 'text/test', diff --git a/test/unit/obj/test_ssync_receiver.py b/test/unit/obj/test_ssync_receiver.py index 1818cb6453..1cd9676887 100644 --- a/test/unit/obj/test_ssync_receiver.py +++ b/test/unit/obj/test_ssync_receiver.py @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import itertools import os import shutil import tempfile @@ -40,6 +39,12 @@ from test.unit import (debug_logger, patch_policies, make_timestamp_iter, from test.unit.obj.common import write_diskfile +if six.PY2: + UNPACK_ERR = b":ERROR: 0 'need more than 1 value to unpack'" +else: + UNPACK_ERR = b":ERROR: 0 'not enough values to unpack (expected 2, got 1)'" + + @unit.patch_policies() class TestReceiver(unittest.TestCase): @@ -92,7 +97,7 @@ class TestReceiver(unittest.TestCase): def body_lines(self, body): lines = [] - for line in body.split('\n'): + for line in body.split(b'\n'): line = line.strip() if line: lines.append(line) @@ -107,11 +112,19 @@ class TestReceiver(unittest.TestCase): req = swob.Request.blank( '/device/partition', environ={'REQUEST_METHOD': 'SSYNC'}) resp = req.get_response(self.controller) + if six.PY2: + last_line = ( + b":ERROR: 503 '

Service Unavailable

The " + b"server is currently unavailable. Please try again at a " + b"later time.

'") + else: + last_line = ( + b":ERROR: 503 b'

Service Unavailable

The " + b"server is currently unavailable. Please try again at a " + b"later time.

'") self.assertEqual( self.body_lines(resp.body), - [":ERROR: 503 '

Service Unavailable

The " - "server is currently unavailable. Please try again at a " - "later time.

'"]) + [last_line]) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertFalse(self.controller.logger.exception.called) @@ -129,8 +142,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) mocked_replication_lock.assert_called_once_with('sda1', POLICIES.legacy, @@ -147,8 +160,8 @@ class TestReceiver(unittest.TestCase): body_lines = [chunk.strip() for chunk in rcvr() if chunk.strip()] self.assertEqual( body_lines, - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(rcvr.policy, POLICIES[0]) def test_Receiver_with_storage_policy_index_header(self): @@ -166,8 +179,8 @@ class TestReceiver(unittest.TestCase): body_lines = [chunk.strip() for chunk in rcvr() if chunk.strip()] self.assertEqual( body_lines, - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(rcvr.policy, POLICIES[1]) self.assertIsNone(rcvr.frag_index) @@ -183,12 +196,10 @@ class TestReceiver(unittest.TestCase): ':MISSING_CHECK: END\r\n' ':UPDATES: START\r\n:UPDATES: END\r\n') self.controller.logger = mock.MagicMock() - try: + with self.assertRaises(HTTPException) as caught: ssync_receiver.Receiver(self.controller, req) - self.fail('Expected HTTPException to be raised.') - except HTTPException as err: - self.assertEqual('503 Service Unavailable', err.status) - self.assertEqual('No policy with index 2', err.body) + self.assertEqual('503 Service Unavailable', caught.exception.status) + self.assertEqual(b'No policy with index 2', caught.exception.body) @unit.patch_policies() def test_Receiver_with_only_frag_index_header(self): @@ -207,8 +218,8 @@ class TestReceiver(unittest.TestCase): body_lines = [chunk.strip() for chunk in rcvr() if chunk.strip()] self.assertEqual( body_lines, - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(rcvr.policy, POLICIES[1]) self.assertEqual(rcvr.frag_index, 7) @@ -229,8 +240,8 @@ class TestReceiver(unittest.TestCase): body_lines = [chunk.strip() for chunk in rcvr() if chunk.strip()] self.assertEqual( body_lines, - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(rcvr.policy, POLICIES[1]) # we used to require the reconstructor to send the frag_index twice as # two different headers because of evolutionary reasons, now we ignore @@ -255,8 +266,8 @@ class TestReceiver(unittest.TestCase): body_lines = [chunk.strip() for chunk in rcvr() if chunk.strip()] self.assertEqual( body_lines, - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(rcvr.policy, POLICIES[1]) self.assertEqual(rcvr.frag_index, 7) @@ -295,8 +306,8 @@ class TestReceiver(unittest.TestCase): body_lines = [chunk.strip() for chunk in rcvr() if chunk.strip()] self.assertEqual( body_lines, - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(rcvr.policy, POLICIES[1]) # node_index if provided should always match frag_index; but if they # differ, frag_index takes precedence @@ -319,7 +330,7 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [":ERROR: 0 '0.01 seconds: /somewhere/sda1'"]) + [b":ERROR: 0 '0.01 seconds: /somewhere/sda1'"]) self.controller.logger.debug.assert_called_once_with( 'None/sda1/1 SSYNC LOCK TIMEOUT: 0.01 seconds: ' '/somewhere/sda1') @@ -338,7 +349,7 @@ class TestReceiver(unittest.TestCase): body_lines1 = [] body_lines2 = [] - for chunk1, chunk2 in itertools.izip_longest(rcvr1(), rcvr2()): + for chunk1, chunk2 in six.moves.zip_longest(rcvr1(), rcvr2()): if chunk1 and chunk1.strip(): body_lines1.append(chunk1.strip()) if chunk2 and chunk2.strip(): @@ -354,23 +365,23 @@ class TestReceiver(unittest.TestCase): body_lines1, body_lines2 = _concurrent_ssync('/sda1/1', '/sda1/2') self.assertEqual( body_lines1, - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual( body_lines2, - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) # It should not be possible to lock the same partition twice body_lines1, body_lines2 = _concurrent_ssync('/sda1/1', '/sda1/1') self.assertEqual( body_lines1, - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertRegexpMatches( - ''.join(body_lines2), - "^:ERROR: 0 '0\.0[0-9]+ seconds: " - "/.+/sda1/objects/1/.lock-replication'$") + b''.join(body_lines2), + b"^:ERROR: 0 '0\.0[0-9]+ seconds: " + b"/.+/sda1/objects/1/.lock-replication'$") def test_SSYNC_initial_path(self): with mock.patch.object( @@ -381,7 +392,7 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - ["Invalid path: /device"]) + [b"Invalid path: /device"]) self.assertEqual(resp.status_int, 400) self.assertFalse(mocked_replication_semaphore.acquire.called) self.assertFalse(mocked_replication_semaphore.release.called) @@ -394,7 +405,7 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - ["Invalid path: /device/"]) + [b"Invalid path: /device/"]) self.assertEqual(resp.status_int, 400) self.assertFalse(mocked_replication_semaphore.acquire.called) self.assertFalse(mocked_replication_semaphore.release.called) @@ -405,9 +416,12 @@ class TestReceiver(unittest.TestCase): req = swob.Request.blank( '/device/partition', environ={'REQUEST_METHOD': 'SSYNC'}) resp = req.get_response(self.controller) - self.assertEqual( - self.body_lines(resp.body), - [':ERROR: 0 "Looking for :MISSING_CHECK: START got \'\'"']) + if six.PY2: + got = b"''" + else: + got = b"b''" + self.assertEqual(self.body_lines(resp.body), [ + b':ERROR: 0 "Looking for :MISSING_CHECK: START got %s"' % got]) self.assertEqual(resp.status_int, 200) mocked_replication_semaphore.acquire.assert_called_once_with(0) mocked_replication_semaphore.release.assert_called_once_with() @@ -421,7 +435,7 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - ["Invalid path: /device/partition/junk"]) + [b"Invalid path: /device/partition/junk"]) self.assertEqual(resp.status_int, 400) self.assertFalse(mocked_replication_semaphore.acquire.called) self.assertFalse(mocked_replication_semaphore.release.called) @@ -435,9 +449,12 @@ class TestReceiver(unittest.TestCase): req = swob.Request.blank( '/device/partition', environ={'REQUEST_METHOD': 'SSYNC'}) resp = req.get_response(self.controller) - self.assertEqual( - self.body_lines(resp.body), - [':ERROR: 0 "Looking for :MISSING_CHECK: START got \'\'"']) + if six.PY2: + got = b"''" + else: + got = b"b''" + self.assertEqual(self.body_lines(resp.body), [ + b':ERROR: 0 "Looking for :MISSING_CHECK: START got %s"' % got]) self.assertEqual(resp.status_int, 200) self.assertEqual([], mocks['ismount'].call_args_list) @@ -451,9 +468,9 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - ["

Insufficient Storage

There " - "was not enough space to save the resource. Drive: " - "device

"]) + [b"

Insufficient Storage

There " + b"was not enough space to save the resource. Drive: " + b"device

"]) self.assertEqual(resp.status_int, 507) self.assertEqual([mock.call(os.path.join( self.controller._diskfile_router[POLICIES.legacy].devices, @@ -464,9 +481,12 @@ class TestReceiver(unittest.TestCase): req = swob.Request.blank( '/device/partition', environ={'REQUEST_METHOD': 'SSYNC'}) resp = req.get_response(self.controller) - self.assertEqual( - self.body_lines(resp.body), - [':ERROR: 0 "Looking for :MISSING_CHECK: START got \'\'"']) + if six.PY2: + got = b"''" + else: + got = b"b''" + self.assertEqual(self.body_lines(resp.body), [ + b':ERROR: 0 "Looking for :MISSING_CHECK: START got %s"' % got]) self.assertEqual(resp.status_int, 200) self.assertEqual([mock.call(os.path.join( self.controller._diskfile_router[POLICIES.legacy].devices, @@ -474,10 +494,10 @@ class TestReceiver(unittest.TestCase): def test_SSYNC_Exception(self): - class _Wrapper(six.StringIO): + class _Wrapper(six.BytesIO): def __init__(self, value): - six.StringIO.__init__(self, value) + six.BytesIO.__init__(self, value) self.mock_socket = mock.MagicMock() def get_socket(self): @@ -498,8 +518,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'Got no headers for Bad content is here'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b":ERROR: 0 'Got no headers for Bad content is here'"]) self.assertEqual(resp.status_int, 200) mock_shutdown_safe.assert_called_once_with( mock_wsgi_input.mock_socket) @@ -509,10 +529,10 @@ class TestReceiver(unittest.TestCase): def test_SSYNC_Exception_Exception(self): - class _Wrapper(six.StringIO): + class _Wrapper(six.BytesIO): def __init__(self, value): - six.StringIO.__init__(self, value) + six.BytesIO.__init__(self, value) self.mock_socket = mock.MagicMock() def get_socket(self): @@ -535,7 +555,7 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END']) self.assertEqual(resp.status_int, 200) mock_shutdown_safe.assert_called_once_with( mock_wsgi_input.mock_socket) @@ -545,15 +565,15 @@ class TestReceiver(unittest.TestCase): def test_MISSING_CHECK_timeout(self): - class _Wrapper(six.StringIO): + class _Wrapper(six.BytesIO): def __init__(self, value): - six.StringIO.__init__(self, value) + six.BytesIO.__init__(self, value) self.mock_socket = mock.MagicMock() def readline(self, sizehint=-1): - line = six.StringIO.readline(self) - if line.startswith('hash'): + line = six.BytesIO.readline(self) + if line.startswith(b'hash'): eventlet.sleep(0.1) return line @@ -578,7 +598,7 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [":ERROR: 408 '0.01 seconds: missing_check line'"]) + [b":ERROR: 408 '0.01 seconds: missing_check line'"]) self.assertEqual(resp.status_int, 200) self.assertTrue(mock_shutdown_safe.called) self.controller.logger.error.assert_called_once_with( @@ -587,15 +607,15 @@ class TestReceiver(unittest.TestCase): def test_MISSING_CHECK_other_exception(self): - class _Wrapper(six.StringIO): + class _Wrapper(six.BytesIO): def __init__(self, value): - six.StringIO.__init__(self, value) + six.BytesIO.__init__(self, value) self.mock_socket = mock.MagicMock() def readline(self, sizehint=-1): - line = six.StringIO.readline(self) - if line.startswith('hash'): + line = six.BytesIO.readline(self) + if line.startswith(b'hash'): raise Exception('test exception') return line @@ -620,7 +640,7 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [":ERROR: 0 'test exception'"]) + [b":ERROR: 0 'test exception'"]) self.assertEqual(resp.status_int, 200) self.assertTrue(mock_shutdown_safe.called) self.controller.logger.exception.assert_called_once_with( @@ -638,8 +658,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertFalse(self.controller.logger.exception.called) @@ -658,11 +678,11 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', - self.hash1 + ' dm', - self.hash2 + ' dm', - ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', + (self.hash1 + ' dm').encode('ascii'), + (self.hash2 + ' dm').encode('ascii'), + b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertFalse(self.controller.logger.exception.called) @@ -684,11 +704,11 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', - self.hash1 + ' dm', - self.hash2 + ' dm', - ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', + (self.hash1 + ' dm').encode('ascii'), + (self.hash2 + ' dm').encode('ascii'), + b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertFalse(self.controller.logger.exception.called) @@ -717,10 +737,10 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', - self.hash2 + ' dm', - ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', + (self.hash2 + ' dm').encode('ascii'), + b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertFalse(self.controller.logger.exception.called) @@ -747,10 +767,10 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', - 'c2519f265f9633e74f9b2fe3b9bec27d m', - ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', + b'c2519f265f9633e74f9b2fe3b9bec27d m', + b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertFalse(self.controller.logger.exception.called) @@ -790,9 +810,9 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', - ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', + b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertFalse(self.controller.logger.exception.called) @@ -833,10 +853,10 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', - self.hash1 + ' dm', - ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', + (self.hash1 + ' dm').encode('ascii'), + b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertFalse(self.controller.logger.exception.called) @@ -855,10 +875,10 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', - self.hash1 + ' dm', - ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', + (self.hash1 + ' dm').encode('ascii'), + b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertTrue(self.controller.logger.exception.called) @@ -894,10 +914,10 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', - self.hash2 + ' dm', - ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', + (self.hash2 + ' dm').encode('ascii'), + b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertFalse(self.controller.logger.exception.called) @@ -928,10 +948,10 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', - self.hash2 + ' dm', - ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', + (self.hash2 + ' dm').encode('ascii'), + b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertFalse(self.controller.logger.exception.called) @@ -968,10 +988,10 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', - self.hash1 + ' d', - ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', + (self.hash1 + ' d').encode('ascii'), + b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertFalse(self.controller.logger.exception.called) @@ -1008,25 +1028,25 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', - self.hash1 + ' m', - ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', + (self.hash1 + ' m').encode('ascii'), + b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.error.called) self.assertFalse(self.controller.logger.exception.called) def test_UPDATES_timeout(self): - class _Wrapper(six.StringIO): + class _Wrapper(six.BytesIO): def __init__(self, value): - six.StringIO.__init__(self, value) + six.BytesIO.__init__(self, value) self.mock_socket = mock.MagicMock() def readline(self, sizehint=-1): - line = six.StringIO.readline(self) - if line.startswith('DELETE'): + line = six.BytesIO.readline(self) + if line.startswith(b'DELETE'): eventlet.sleep(0.1) return line @@ -1053,8 +1073,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 408 '0.01 seconds: updates line'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b":ERROR: 408 '0.01 seconds: updates line'"]) self.assertEqual(resp.status_int, 200) mock_shutdown_safe.assert_called_once_with( mock_wsgi_input.mock_socket) @@ -1065,15 +1085,15 @@ class TestReceiver(unittest.TestCase): def test_UPDATES_other_exception(self): - class _Wrapper(six.StringIO): + class _Wrapper(six.BytesIO): def __init__(self, value): - six.StringIO.__init__(self, value) + six.BytesIO.__init__(self, value) self.mock_socket = mock.MagicMock() def readline(self, sizehint=-1): - line = six.StringIO.readline(self) - if line.startswith('DELETE'): + line = six.BytesIO.readline(self) + if line.startswith(b'DELETE'): raise Exception('test exception') return line @@ -1100,8 +1120,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'test exception'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b":ERROR: 0 'test exception'"]) self.assertEqual(resp.status_int, 200) mock_shutdown_safe.assert_called_once_with( mock_wsgi_input.mock_socket) @@ -1111,10 +1131,10 @@ class TestReceiver(unittest.TestCase): def test_UPDATES_no_problems_no_hard_disconnect(self): - class _Wrapper(six.StringIO): + class _Wrapper(six.BytesIO): def __init__(self, value): - six.StringIO.__init__(self, value) + six.BytesIO.__init__(self, value) self.mock_socket = mock.MagicMock() def get_socket(self): @@ -1140,8 +1160,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(mock_shutdown_safe.called) self.assertFalse(mock_wsgi_input.mock_socket.close.called) @@ -1157,8 +1177,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'need more than 1 value to unpack'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + UNPACK_ERR]) self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in ssync.Receiver') @@ -1179,8 +1199,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'need more than 1 value to unpack'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + UNPACK_ERR]) self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in ssync.Receiver') @@ -1196,8 +1216,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'Got no headers for DELETE /a/c/o'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b":ERROR: 0 'Got no headers for DELETE /a/c/o'"]) self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in ssync.Receiver') @@ -1214,8 +1234,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'need more than 1 value to unpack'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + UNPACK_ERR]) self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in ssync.Receiver') @@ -1232,8 +1252,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'need more than 1 value to unpack'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + UNPACK_ERR]) self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in ssync.Receiver') @@ -1250,8 +1270,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':ERROR: 0 "invalid literal for int() with base 10: \'a\'"']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':ERROR: 0 "invalid literal for int() with base 10: \'a\'"']) self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in ssync.Receiver') @@ -1268,8 +1288,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'DELETE subrequest with content-length /a/c/o'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b":ERROR: 0 'DELETE subrequest with content-length /a/c/o'"]) self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in ssync.Receiver') @@ -1285,8 +1305,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'No content-length sent for PUT /a/c/o'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b":ERROR: 0 'No content-length sent for PUT /a/c/o'"]) self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in ssync.Receiver') @@ -1303,8 +1323,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'Early termination for PUT /a/c/o'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b":ERROR: 0 'Early termination for PUT /a/c/o'"]) self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in ssync.Receiver') @@ -1332,11 +1352,15 @@ class TestReceiver(unittest.TestCase): 'DELETE /a/c/o\r\n\r\n' 'DELETE /a/c/o\r\n\r\n') resp = req.get_response(self.controller) + if six.PY2: + final_line = (b":ERROR: 500 'ERROR: With :UPDATES: " + b"3 failures to 0 successes'") + else: + final_line = (b":ERROR: 500 b'ERROR: With :UPDATES: " + b"3 failures to 0 successes'") self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 500 'ERROR: With :UPDATES: 3 failures to 0 " - "successes'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', final_line]) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) @@ -1363,8 +1387,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'Too many 4 failures to 0 successes'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b":ERROR: 0 'Too many 4 failures to 0 successes'"]) self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in ssync.Receiver') @@ -1392,11 +1416,16 @@ class TestReceiver(unittest.TestCase): 'DELETE /a/c/o\r\n\r\n' ':UPDATES: END\r\n') resp = req.get_response(self.controller) + if six.PY2: + final_line = (b":ERROR: 500 'ERROR: With :UPDATES: " + b"4 failures to 3 successes'") + else: + final_line = (b":ERROR: 500 b'ERROR: With :UPDATES: " + b"4 failures to 3 successes'") self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 500 'ERROR: With :UPDATES: 4 failures to 3 " - "successes'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + final_line]) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) @@ -1424,8 +1453,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'Too many 4 failures to 2 successes'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b":ERROR: 0 'Too many 4 failures to 2 successes'"]) self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in ssync.Receiver') @@ -1462,8 +1491,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) @@ -1525,8 +1554,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) # verify diskfile has metadata permitted by replication headers @@ -1535,7 +1564,7 @@ class TestReceiver(unittest.TestCase): 'sda1', '0', 'a', 'c', 'o2', POLICIES.default) df.open() for chunk in df.reader(): - self.assertEqual('1', chunk) + self.assertEqual(b'1', chunk) expected = {'ETag': 'c4ca4238a0b923820dcc509a6f75849b', 'Content-Length': '1', 'Content-Type': 'text/plain', @@ -1569,8 +1598,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) @@ -1618,8 +1647,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) @@ -1639,7 +1668,7 @@ class TestReceiver(unittest.TestCase): 'X-Backend-Replication-Headers': ( 'content-length x-timestamp x-object-meta-test1 ' 'content-encoding specialty-header')}) - self.assertEqual(req.read_body, '1') + self.assertEqual(req.read_body, b'1') def test_UPDATES_PUT_with_storage_policy_and_node_index(self): # update router post policy patch @@ -1675,8 +1704,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) @@ -1697,7 +1726,7 @@ class TestReceiver(unittest.TestCase): 'X-Backend-Replication-Headers': ( 'content-length x-timestamp x-object-meta-test1 ' 'content-encoding specialty-header')}) - self.assertEqual(req.read_body, '1') + self.assertEqual(req.read_body, b'1') def test_UPDATES_DELETE(self): _DELETE_request = [None] @@ -1720,8 +1749,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) @@ -1756,8 +1785,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 0 'Invalid subrequest method BONK'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b":ERROR: 0 'Invalid subrequest method BONK'"]) self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in ssync.Receiver') @@ -1832,8 +1861,8 @@ class TestReceiver(unittest.TestCase): resp = req.get_response(self.controller) self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ':UPDATES: START', ':UPDATES: END']) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', + b':UPDATES: START', b':UPDATES: END']) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) @@ -1854,7 +1883,7 @@ class TestReceiver(unittest.TestCase): 'X-Backend-Replication-Headers': ( 'content-length x-timestamp x-object-meta-test1 ' 'content-encoding specialty-header')}) - self.assertEqual(req.read_body, '1') + self.assertEqual(req.read_body, b'1') req = _requests.pop(0) self.assertEqual(req.method, 'DELETE') self.assertEqual(req.path, '/device/partition/a/c/o2') @@ -1876,7 +1905,7 @@ class TestReceiver(unittest.TestCase): 'X-Backend-Replication': 'True', 'X-Backend-Replication-Headers': ( 'content-length x-timestamp')}) - self.assertEqual(req.read_body, '123') + self.assertEqual(req.read_body, b'123') req = _requests.pop(0) self.assertEqual(req.method, 'PUT') self.assertEqual(req.path, '/device/partition/a/c/o4') @@ -1889,7 +1918,7 @@ class TestReceiver(unittest.TestCase): 'X-Backend-Replication': 'True', 'X-Backend-Replication-Headers': ( 'content-length x-timestamp')}) - self.assertEqual(req.read_body, '1\r\n4') + self.assertEqual(req.read_body, b'1\r\n4') req = _requests.pop(0) self.assertEqual(req.method, 'DELETE') self.assertEqual(req.path, '/device/partition/a/c/o5') @@ -1920,7 +1949,7 @@ class TestReceiver(unittest.TestCase): 'X-Backend-Replication': 'True', 'X-Backend-Replication-Headers': ( 'content-length x-timestamp')}) - self.assertEqual(req.read_body, '1234567') + self.assertEqual(req.read_body, b'1234567') req = _requests.pop(0) self.assertEqual(req.method, 'POST') self.assertEqual(req.path, '/device/partition/a/c/o7') @@ -1952,13 +1981,13 @@ class TestReceiver(unittest.TestCase): request.read_body = request.environ['wsgi.input'].read(2) return swob.HTTPInternalServerError() - class _IgnoreReadlineHint(six.StringIO): + class _IgnoreReadlineHint(six.BytesIO): def __init__(self, value): - six.StringIO.__init__(self, value) + six.BytesIO.__init__(self, value) def readline(self, hint=-1): - return six.StringIO.readline(self) + return six.BytesIO.readline(self) self.controller.PUT = _PUT self.controller.network_chunk_size = 2 @@ -1980,10 +2009,15 @@ class TestReceiver(unittest.TestCase): '1') req.environ['wsgi.input'] = _IgnoreReadlineHint(req.body) resp = req.get_response(self.controller) + if six.PY2: + final_line = (b":ERROR: 500 'ERROR: With :UPDATES: " + b"2 failures to 0 successes'") + else: + final_line = (b":ERROR: 500 b'ERROR: With :UPDATES: " + b"2 failures to 0 successes'") self.assertEqual( self.body_lines(resp.body), - [':MISSING_CHECK: START', ':MISSING_CHECK: END', - ":ERROR: 500 'ERROR: With :UPDATES: 2 failures to 0 successes'"]) + [b':MISSING_CHECK: START', b':MISSING_CHECK: END', final_line]) self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) @@ -2001,7 +2035,7 @@ class TestReceiver(unittest.TestCase): 'X-Backend-Replication': 'True', 'X-Backend-Replication-Headers': ( 'content-length x-timestamp')}) - self.assertEqual(req.read_body, '12') + self.assertEqual(req.read_body, b'12') req = _requests.pop(0) self.assertEqual(req.path, '/device/partition/a/c/o2') self.assertEqual(req.content_length, 1) @@ -2013,7 +2047,7 @@ class TestReceiver(unittest.TestCase): 'X-Backend-Replication': 'True', 'X-Backend-Replication-Headers': ( 'content-length x-timestamp')}) - self.assertEqual(req.read_body, '1') + self.assertEqual(req.read_body, b'1') self.assertEqual(_requests, []) @@ -2132,7 +2166,7 @@ class TestSsyncRxServer(unittest.TestCase): resp = self.connection.getresponse() self.assertEqual(400, resp.status) error_msg = resp.read() - self.assertIn("Invalid X-Backend-Ssync-Frag-Index 'None'", error_msg) + self.assertIn(b"Invalid X-Backend-Ssync-Frag-Index 'None'", error_msg) resp.close() # sanity check that the receiver did not proceed to missing_check self.assertFalse(mock_missing_check.called) @@ -2154,7 +2188,8 @@ class TestModuleMethods(unittest.TestCase): ts_meta=t_data, ts_data=t_data, ts_ctype=t_data) - self.assertEqual(expected, ssync_receiver.decode_missing(msg)) + self.assertEqual(expected, + ssync_receiver.decode_missing(msg.encode('ascii'))) # hex meta delta encoded as extra message part msg = '%s %s m:%x' % (object_hash, t_data.internal, d_meta_data) @@ -2162,7 +2197,8 @@ class TestModuleMethods(unittest.TestCase): ts_data=t_data, ts_meta=t_meta, ts_ctype=t_data) - self.assertEqual(expected, ssync_receiver.decode_missing(msg)) + self.assertEqual(expected, + ssync_receiver.decode_missing(msg.encode('ascii'))) # hex content type delta encoded in extra message part msg = '%s %s t:%x,m:%x' % (object_hash, t_data.internal, @@ -2172,13 +2208,13 @@ class TestModuleMethods(unittest.TestCase): ts_meta=t_meta, ts_ctype=t_ctype) self.assertEqual( - expected, ssync_receiver.decode_missing(msg)) + expected, ssync_receiver.decode_missing(msg.encode('ascii'))) # order of subparts does not matter msg = '%s %s m:%x,t:%x' % (object_hash, t_data.internal, d_meta_data, d_ctype_data) self.assertEqual( - expected, ssync_receiver.decode_missing(msg)) + expected, ssync_receiver.decode_missing(msg.encode('ascii'))) # hex content type delta may be zero msg = '%s %s t:0,m:%x' % (object_hash, t_data.internal, d_meta_data) @@ -2187,7 +2223,7 @@ class TestModuleMethods(unittest.TestCase): ts_meta=t_meta, ts_ctype=t_data) self.assertEqual( - expected, ssync_receiver.decode_missing(msg)) + expected, ssync_receiver.decode_missing(msg.encode('ascii'))) # unexpected zero delta is tolerated msg = '%s %s m:0' % (object_hash, t_data.internal) @@ -2195,7 +2231,8 @@ class TestModuleMethods(unittest.TestCase): ts_meta=t_data, ts_data=t_data, ts_ctype=t_data) - self.assertEqual(expected, ssync_receiver.decode_missing(msg)) + self.assertEqual(expected, + ssync_receiver.decode_missing(msg.encode('ascii'))) # unexpected subparts in timestamp delta part are tolerated msg = '%s %s c:12345,m:%x,junk' % (object_hash, @@ -2206,7 +2243,7 @@ class TestModuleMethods(unittest.TestCase): ts_data=t_data, ts_ctype=t_data) self.assertEqual( - expected, ssync_receiver.decode_missing(msg)) + expected, ssync_receiver.decode_missing(msg.encode('ascii'))) # extra message parts tolerated msg = '%s %s m:%x future parts' % (object_hash, @@ -2216,7 +2253,8 @@ class TestModuleMethods(unittest.TestCase): ts_meta=t_meta, ts_data=t_data, ts_ctype=t_data) - self.assertEqual(expected, ssync_receiver.decode_missing(msg)) + self.assertEqual(expected, + ssync_receiver.decode_missing(msg.encode('ascii'))) def test_encode_wanted(self): ts_iter = make_timestamp_iter() diff --git a/test/unit/obj/test_ssync_sender.py b/test/unit/obj/test_ssync_sender.py index 38b2792c39..5bbd0c07a2 100644 --- a/test/unit/obj/test_ssync_sender.py +++ b/test/unit/obj/test_ssync_sender.py @@ -57,14 +57,16 @@ class FakeResponse(ssync_sender.SsyncBufferedHTTPResponse): def __init__(self, chunk_body=''): self.status = 200 self.close_called = False + if not six.PY2: + chunk_body = chunk_body.encode('ascii') if chunk_body: - self.fp = six.StringIO( - '%x\r\n%s\r\n0\r\n\r\n' % (len(chunk_body), chunk_body)) - self.ssync_response_buffer = '' + self.fp = six.BytesIO( + b'%x\r\n%s\r\n0\r\n\r\n' % (len(chunk_body), chunk_body)) + self.ssync_response_buffer = b'' self.ssync_response_chunk_left = 0 def read(self, *args, **kwargs): - return '' + return b'' def close(self): self.close_called = True @@ -487,9 +489,9 @@ class TestSender(BaseTest): self.assertTrue(success) found_post = found_put = False for chunk in connection.sent: - if 'POST' in chunk: + if b'POST' in chunk: found_post = True - if 'PUT' in chunk: + if b'PUT' in chunk: found_put = True self.assertFalse(found_post) self.assertTrue(found_put) @@ -677,56 +679,56 @@ class TestSender(BaseTest): def test_readline_newline_in_buffer(self): response = FakeResponse() - response.ssync_response_buffer = 'Has a newline already.\r\nOkay.' - self.assertEqual(response.readline(), 'Has a newline already.\r\n') - self.assertEqual(response.ssync_response_buffer, 'Okay.') + response.ssync_response_buffer = b'Has a newline already.\r\nOkay.' + self.assertEqual(response.readline(), b'Has a newline already.\r\n') + self.assertEqual(response.ssync_response_buffer, b'Okay.') def test_readline_buffer_exceeds_network_chunk_size_somehow(self): response = FakeResponse() - response.ssync_response_buffer = '1234567890' - self.assertEqual(response.readline(size=2), '1234567890') - self.assertEqual(response.ssync_response_buffer, '') + response.ssync_response_buffer = b'1234567890' + self.assertEqual(response.readline(size=2), b'1234567890') + self.assertEqual(response.ssync_response_buffer, b'') def test_readline_at_start_of_chunk(self): response = FakeResponse() - response.fp = six.StringIO('2\r\nx\n\r\n') - self.assertEqual(response.readline(), 'x\n') + response.fp = six.BytesIO(b'2\r\nx\n\r\n') + self.assertEqual(response.readline(), b'x\n') def test_readline_chunk_with_extension(self): response = FakeResponse() - response.fp = six.StringIO( - '2 ; chunk=extension\r\nx\n\r\n') - self.assertEqual(response.readline(), 'x\n') + response.fp = six.BytesIO( + b'2 ; chunk=extension\r\nx\n\r\n') + self.assertEqual(response.readline(), b'x\n') def test_readline_broken_chunk(self): response = FakeResponse() - response.fp = six.StringIO('q\r\nx\n\r\n') + response.fp = six.BytesIO(b'q\r\nx\n\r\n') self.assertRaises( exceptions.ReplicationException, response.readline) self.assertTrue(response.close_called) def test_readline_terminated_chunk(self): response = FakeResponse() - response.fp = six.StringIO('b\r\nnot enough') + response.fp = six.BytesIO(b'b\r\nnot enough') self.assertRaises( exceptions.ReplicationException, response.readline) self.assertTrue(response.close_called) def test_readline_all(self): response = FakeResponse() - response.fp = six.StringIO('2\r\nx\n\r\n0\r\n\r\n') - self.assertEqual(response.readline(), 'x\n') - self.assertEqual(response.readline(), '') - self.assertEqual(response.readline(), '') + response.fp = six.BytesIO(b'2\r\nx\n\r\n0\r\n\r\n') + self.assertEqual(response.readline(), b'x\n') + self.assertEqual(response.readline(), b'') + self.assertEqual(response.readline(), b'') def test_readline_all_trailing_not_newline_termed(self): response = FakeResponse() - response.fp = six.StringIO( - '2\r\nx\n\r\n3\r\n123\r\n0\r\n\r\n') - self.assertEqual(response.readline(), 'x\n') - self.assertEqual(response.readline(), '123') - self.assertEqual(response.readline(), '') - self.assertEqual(response.readline(), '') + response.fp = six.BytesIO( + b'2\r\nx\n\r\n3\r\n123\r\n0\r\n\r\n') + self.assertEqual(response.readline(), b'x\n') + self.assertEqual(response.readline(), b'123') + self.assertEqual(response.readline(), b'') + self.assertEqual(response.readline(), b'') def test_missing_check_timeout(self): connection = FakeConnection() @@ -761,9 +763,9 @@ class TestSender(BaseTest): available_map, send_map = self.sender.missing_check(connection, response) self.assertEqual( - ''.join(connection.sent), - '17\r\n:MISSING_CHECK: START\r\n\r\n' - '15\r\n:MISSING_CHECK: END\r\n\r\n') + b''.join(connection.sent), + b'17\r\n:MISSING_CHECK: START\r\n\r\n' + b'15\r\n:MISSING_CHECK: END\r\n\r\n') self.assertEqual(send_map, {}) self.assertEqual(available_map, {}) @@ -804,14 +806,14 @@ class TestSender(BaseTest): available_map, send_map = self.sender.missing_check(connection, response) self.assertEqual( - ''.join(connection.sent), - '17\r\n:MISSING_CHECK: START\r\n\r\n' - '33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n' - '3b\r\n9d41d8cd98f00b204e9800998ecf0def 1380144472.22222 ' - 'm:186a0\r\n\r\n' - '3f\r\n9d41d8cd98f00b204e9800998ecf1def 1380144474.44444 ' - 'm:186a0,t:4\r\n\r\n' - '15\r\n:MISSING_CHECK: END\r\n\r\n') + b''.join(connection.sent), + b'17\r\n:MISSING_CHECK: START\r\n\r\n' + b'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n' + b'3b\r\n9d41d8cd98f00b204e9800998ecf0def 1380144472.22222 ' + b'm:186a0\r\n\r\n' + b'3f\r\n9d41d8cd98f00b204e9800998ecf1def 1380144474.44444 ' + b'm:186a0,t:4\r\n\r\n' + b'15\r\n:MISSING_CHECK: END\r\n\r\n') self.assertEqual(send_map, {}) candidates = [('9d41d8cd98f00b204e9800998ecf0abc', dict(ts_data=Timestamp(1380144470.00000))), @@ -853,10 +855,10 @@ class TestSender(BaseTest): exc = err self.assertEqual(str(exc), 'Early disconnect') self.assertEqual( - ''.join(connection.sent), - '17\r\n:MISSING_CHECK: START\r\n\r\n' - '33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n' - '15\r\n:MISSING_CHECK: END\r\n\r\n') + b''.join(connection.sent), + b'17\r\n:MISSING_CHECK: START\r\n\r\n' + b'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n' + b'15\r\n:MISSING_CHECK: END\r\n\r\n') def test_missing_check_far_end_disconnect2(self): def yield_hashes(device, partition, policy, suffixes=None, **kwargs): @@ -888,10 +890,10 @@ class TestSender(BaseTest): exc = err self.assertEqual(str(exc), 'Early disconnect') self.assertEqual( - ''.join(connection.sent), - '17\r\n:MISSING_CHECK: START\r\n\r\n' - '33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n' - '15\r\n:MISSING_CHECK: END\r\n\r\n') + b''.join(connection.sent), + b'17\r\n:MISSING_CHECK: START\r\n\r\n' + b'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n' + b'15\r\n:MISSING_CHECK: END\r\n\r\n') def test_missing_check_far_end_unexpected(self): def yield_hashes(device, partition, policy, suffixes=None, **kwargs): @@ -922,10 +924,10 @@ class TestSender(BaseTest): exc = err self.assertEqual(str(exc), "Unexpected response: 'OH HAI'") self.assertEqual( - ''.join(connection.sent), - '17\r\n:MISSING_CHECK: START\r\n\r\n' - '33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n' - '15\r\n:MISSING_CHECK: END\r\n\r\n') + b''.join(connection.sent), + b'17\r\n:MISSING_CHECK: START\r\n\r\n' + b'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n' + b'15\r\n:MISSING_CHECK: END\r\n\r\n') def test_missing_check_send_map(self): def yield_hashes(device, partition, policy, suffixes=None, **kwargs): @@ -956,10 +958,10 @@ class TestSender(BaseTest): available_map, send_map = self.sender.missing_check(connection, response) self.assertEqual( - ''.join(connection.sent), - '17\r\n:MISSING_CHECK: START\r\n\r\n' - '33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n' - '15\r\n:MISSING_CHECK: END\r\n\r\n') + b''.join(connection.sent), + b'17\r\n:MISSING_CHECK: START\r\n\r\n' + b'33\r\n9d41d8cd98f00b204e9800998ecf0abc 1380144470.00000\r\n\r\n' + b'15\r\n:MISSING_CHECK: END\r\n\r\n') self.assertEqual(send_map, {'0123abc': {'data': True, 'meta': True}}) self.assertEqual(available_map, dict([('9d41d8cd98f00b204e9800998ecf0abc', @@ -1016,9 +1018,9 @@ class TestSender(BaseTest): ':UPDATES: END\r\n')) self.sender.updates(connection, response, {}) self.assertEqual( - ''.join(connection.sent), - '11\r\n:UPDATES: START\r\n\r\n' - 'f\r\n:UPDATES: END\r\n\r\n') + b''.join(connection.sent), + b'11\r\n:UPDATES: START\r\n\r\n' + b'f\r\n:UPDATES: END\r\n\r\n') def test_updates_unexpected_response_lines1(self): connection = FakeConnection() @@ -1034,9 +1036,9 @@ class TestSender(BaseTest): exc = err self.assertEqual(str(exc), "Unexpected response: 'abc'") self.assertEqual( - ''.join(connection.sent), - '11\r\n:UPDATES: START\r\n\r\n' - 'f\r\n:UPDATES: END\r\n\r\n') + b''.join(connection.sent), + b'11\r\n:UPDATES: START\r\n\r\n' + b'f\r\n:UPDATES: END\r\n\r\n') def test_updates_unexpected_response_lines2(self): connection = FakeConnection() @@ -1052,9 +1054,9 @@ class TestSender(BaseTest): exc = err self.assertEqual(str(exc), "Unexpected response: 'abc'") self.assertEqual( - ''.join(connection.sent), - '11\r\n:UPDATES: START\r\n\r\n' - 'f\r\n:UPDATES: END\r\n\r\n') + b''.join(connection.sent), + b'11\r\n:UPDATES: START\r\n\r\n' + b'f\r\n:UPDATES: END\r\n\r\n') def test_updates_is_deleted(self): device = 'dev' @@ -1086,9 +1088,9 @@ class TestSender(BaseTest): # note that the delete line isn't actually sent since we mock # send_delete; send_delete is tested separately. self.assertEqual( - ''.join(connection.sent), - '11\r\n:UPDATES: START\r\n\r\n' - 'f\r\n:UPDATES: END\r\n\r\n') + b''.join(connection.sent), + b'11\r\n:UPDATES: START\r\n\r\n' + b'f\r\n:UPDATES: END\r\n\r\n') def test_update_send_delete(self): device = 'dev' @@ -1113,13 +1115,13 @@ class TestSender(BaseTest): ':UPDATES: END\r\n')) self.sender.updates(connection, response, send_map) self.assertEqual( - ''.join(connection.sent), - '11\r\n:UPDATES: START\r\n\r\n' - '30\r\n' - 'DELETE /a/c/o\r\n' - 'X-Timestamp: %s\r\n\r\n\r\n' - 'f\r\n:UPDATES: END\r\n\r\n' - % delete_timestamp + b''.join(connection.sent), + b'11\r\n:UPDATES: START\r\n\r\n' + b'30\r\n' + b'DELETE /a/c/o\r\n' + b'X-Timestamp: %s\r\n\r\n\r\n' + b'f\r\n:UPDATES: END\r\n\r\n' + % delete_timestamp.encode('ascii') ) def test_updates_put(self): @@ -1166,9 +1168,9 @@ class TestSender(BaseTest): # note that the put line isn't actually sent since we mock send_put; # send_put is tested separately. self.assertEqual( - ''.join(connection.sent), - '11\r\n:UPDATES: START\r\n\r\n' - 'f\r\n:UPDATES: END\r\n\r\n') + b''.join(connection.sent), + b'11\r\n:UPDATES: START\r\n\r\n' + b'f\r\n:UPDATES: END\r\n\r\n') def test_updates_post(self): ts_iter = make_timestamp_iter() @@ -1213,9 +1215,9 @@ class TestSender(BaseTest): # note that the post line isn't actually sent since we mock send_post; # send_post is tested separately. self.assertEqual( - ''.join(connection.sent), - '11\r\n:UPDATES: START\r\n\r\n' - 'f\r\n:UPDATES: END\r\n\r\n') + b''.join(connection.sent), + b'11\r\n:UPDATES: START\r\n\r\n' + b'f\r\n:UPDATES: END\r\n\r\n') def test_updates_put_and_post(self): ts_iter = make_timestamp_iter() @@ -1265,9 +1267,9 @@ class TestSender(BaseTest): self.assertIsInstance(df, diskfile.DiskFile) self.assertEqual(expected, df.get_metadata()) self.assertEqual( - ''.join(connection.sent), - '11\r\n:UPDATES: START\r\n\r\n' - 'f\r\n:UPDATES: END\r\n\r\n') + b''.join(connection.sent), + b'11\r\n:UPDATES: START\r\n\r\n' + b'f\r\n:UPDATES: END\r\n\r\n') def test_updates_storage_policy_index(self): device = 'dev' @@ -1328,9 +1330,9 @@ class TestSender(BaseTest): exc = err self.assertEqual(str(exc), 'Early disconnect') self.assertEqual( - ''.join(connection.sent), - '11\r\n:UPDATES: START\r\n\r\n' - 'f\r\n:UPDATES: END\r\n\r\n') + b''.join(connection.sent), + b'11\r\n:UPDATES: START\r\n\r\n' + b'f\r\n:UPDATES: END\r\n\r\n') def test_updates_read_response_unexp_start(self): connection = FakeConnection() @@ -1346,9 +1348,9 @@ class TestSender(BaseTest): exc = err self.assertEqual(str(exc), "Unexpected response: 'anything else'") self.assertEqual( - ''.join(connection.sent), - '11\r\n:UPDATES: START\r\n\r\n' - 'f\r\n:UPDATES: END\r\n\r\n') + b''.join(connection.sent), + b'11\r\n:UPDATES: START\r\n\r\n' + b'f\r\n:UPDATES: END\r\n\r\n') def test_updates_read_response_timeout_end(self): connection = FakeConnection() @@ -1360,7 +1362,7 @@ class TestSender(BaseTest): def delayed_readline(*args, **kwargs): rv = orig_readline(*args, **kwargs) - if rv == ':UPDATES: END\r\n': + if rv == b':UPDATES: END\r\n': eventlet.sleep(1) return rv @@ -1382,9 +1384,9 @@ class TestSender(BaseTest): exc = err self.assertEqual(str(exc), 'Early disconnect') self.assertEqual( - ''.join(connection.sent), - '11\r\n:UPDATES: START\r\n\r\n' - 'f\r\n:UPDATES: END\r\n\r\n') + b''.join(connection.sent), + b'11\r\n:UPDATES: START\r\n\r\n' + b'f\r\n:UPDATES: END\r\n\r\n') def test_updates_read_response_unexp_end(self): connection = FakeConnection() @@ -1400,9 +1402,9 @@ class TestSender(BaseTest): exc = err self.assertEqual(str(exc), "Unexpected response: 'anything else'") self.assertEqual( - ''.join(connection.sent), - '11\r\n:UPDATES: START\r\n\r\n' - 'f\r\n:UPDATES: END\r\n\r\n') + b''.join(connection.sent), + b'11\r\n:UPDATES: START\r\n\r\n' + b'f\r\n:UPDATES: END\r\n\r\n') def test_send_delete_timeout(self): connection = FakeConnection() @@ -1421,11 +1423,11 @@ class TestSender(BaseTest): self.sender.send_delete(connection, '/a/c/o', utils.Timestamp('1381679759.90941')) self.assertEqual( - ''.join(connection.sent), - '30\r\n' - 'DELETE /a/c/o\r\n' - 'X-Timestamp: 1381679759.90941\r\n' - '\r\n\r\n') + b''.join(connection.sent), + b'30\r\n' + b'DELETE /a/c/o\r\n' + b'X-Timestamp: 1381679759.90941\r\n' + b'\r\n\r\n') def test_send_put_initial_timeout(self): df = self._make_open_diskfile() @@ -1465,19 +1467,20 @@ class TestSender(BaseTest): def _check_send_put(self, obj_name, meta_value): ts_iter = make_timestamp_iter() t1 = next(ts_iter) - body = 'test' + body = b'test' extra_metadata = {'Some-Other-Header': 'value', u'Unicode-Meta-Name': meta_value} df = self._make_open_diskfile(obj=obj_name, body=body, timestamp=t1, extra_metadata=extra_metadata) expected = dict(df.get_metadata()) - expected['body'] = body + expected['body'] = body if six.PY2 else body.decode('ascii') expected['chunk_size'] = len(body) expected['meta'] = meta_value + wire_meta = meta_value if six.PY2 else meta_value.encode('utf8') path = six.moves.urllib.parse.quote(expected['name']) expected['path'] = path - expected['length'] = format(145 + len(path) + len(meta_value), 'x') + expected['length'] = format(145 + len(path) + len(wire_meta), 'x') # .meta file metadata is not included in expected for data only PUT t2 = next(ts_iter) metadata = {'X-Timestamp': t2.internal, 'X-Object-Meta-Fruit': 'kiwi'} @@ -1485,8 +1488,7 @@ class TestSender(BaseTest): df.open() connection = FakeConnection() self.sender.send_put(connection, path, df) - self.assertEqual( - ''.join(connection.sent), + expected = ( '%(length)s\r\n' 'PUT %(path)s\r\n' 'Content-Length: %(Content-Length)s\r\n' @@ -1498,13 +1500,20 @@ class TestSender(BaseTest): '\r\n' '%(chunk_size)s\r\n' '%(body)s\r\n' % expected) + if not six.PY2: + expected = expected.encode('utf8') + self.assertEqual(b''.join(connection.sent), expected) def test_send_put(self): self._check_send_put('o', 'meta') def test_send_put_unicode(self): - self._check_send_put( - 'o_with_caract\xc3\xa8res_like_in_french', 'm\xc3\xa8ta') + if six.PY2: + self._check_send_put( + 'o_with_caract\xc3\xa8res_like_in_french', 'm\xc3\xa8ta') + else: + self._check_send_put( + 'o_with_caract\u00e8res_like_in_french', 'm\u00e8ta') def _check_send_post(self, obj_name, meta_value): ts_iter = make_timestamp_iter() @@ -1522,39 +1531,46 @@ class TestSender(BaseTest): 'X-Timestamp': ts_1.internal} df.write_metadata(newer_metadata) path = six.moves.urllib.parse.quote(df.read_metadata()['name']) - length = format(61 + len(path) + len(meta_value), 'x') + wire_meta = meta_value if six.PY2 else meta_value.encode('utf8') + length = format(61 + len(path) + len(wire_meta), 'x') connection = FakeConnection() with df.open(): self.sender.send_post(connection, path, df) self.assertEqual( - ''.join(connection.sent), - '%s\r\n' - 'POST %s\r\n' - 'X-Object-Meta-Foo: %s\r\n' - 'X-Timestamp: %s\r\n' - '\r\n' - '\r\n' % (length, path, meta_value, ts_1.internal)) + b''.join(connection.sent), + b'%s\r\n' + b'POST %s\r\n' + b'X-Object-Meta-Foo: %s\r\n' + b'X-Timestamp: %s\r\n' + b'\r\n' + b'\r\n' % (length.encode('ascii'), path.encode('ascii'), + wire_meta, + ts_1.internal.encode('ascii'))) def test_send_post(self): self._check_send_post('o', 'meta') def test_send_post_unicode(self): - self._check_send_post( - 'o_with_caract\xc3\xa8res_like_in_french', 'm\xc3\xa8ta') + if six.PY2: + self._check_send_post( + 'o_with_caract\xc3\xa8res_like_in_french', 'm\xc3\xa8ta') + else: + self._check_send_post( + 'o_with_caract\u00e8res_like_in_french', 'm\u00e8ta') def test_disconnect_timeout(self): connection = FakeConnection() connection.send = lambda d: eventlet.sleep(1) self.sender.daemon.node_timeout = 0.01 self.sender.disconnect(connection) - self.assertEqual(''.join(connection.sent), '') + self.assertEqual(b''.join(connection.sent), b'') self.assertTrue(connection.closed) def test_disconnect(self): connection = FakeConnection() self.sender.disconnect(connection) - self.assertEqual(''.join(connection.sent), '0\r\n\r\n') + self.assertEqual(b''.join(connection.sent), b'0\r\n\r\n') self.assertTrue(connection.closed) @@ -1571,34 +1587,34 @@ class TestModuleMethods(unittest.TestCase): # equal data and meta timestamps -> legacy single timestamp string expected = '%s %s' % (object_hash, t_data.internal) self.assertEqual( - expected, + expected.encode('ascii'), ssync_sender.encode_missing(object_hash, t_data, ts_meta=t_data)) # newer meta timestamp -> hex data delta encoded as extra message part expected = '%s %s m:%x' % (object_hash, t_data.internal, d_meta_data) self.assertEqual( - expected, + expected.encode('ascii'), ssync_sender.encode_missing(object_hash, t_data, ts_meta=t_meta)) # newer meta timestamp -> hex data delta encoded as extra message part # content type timestamp equals data timestamp -> no delta expected = '%s %s m:%x' % (object_hash, t_data.internal, d_meta_data) self.assertEqual( - expected, + expected.encode('ascii'), ssync_sender.encode_missing(object_hash, t_data, t_meta, t_data)) # content type timestamp newer data timestamp -> delta encoded expected = ('%s %s m:%x,t:%x' % (object_hash, t_data.internal, d_meta_data, d_type_data)) self.assertEqual( - expected, + expected.encode('ascii'), ssync_sender.encode_missing(object_hash, t_data, t_meta, t_type)) # content type timestamp equal to meta timestamp -> delta encoded expected = ('%s %s m:%x,t:%x' % (object_hash, t_data.internal, d_meta_data, d_type_data)) self.assertEqual( - expected, + expected.encode('ascii'), ssync_sender.encode_missing(object_hash, t_data, t_meta, t_type)) # test encode and decode functions invert diff --git a/tox.ini b/tox.ini index cd10920533..026ebf52bb 100644 --- a/tox.ini +++ b/tox.ini @@ -99,6 +99,9 @@ commands = test/unit/obj/test_expirer.py \ test/unit/obj/test_replicator.py \ test/unit/obj/test_server.py \ + test/unit/obj/test_ssync.py \ + test/unit/obj/test_ssync_receiver.py \ + test/unit/obj/test_ssync_sender.py \ test/unit/obj/test_updater.py \ test/unit/proxy} From dca658103a63d212bdf9195fcde6038557c13401 Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Thu, 6 Jun 2019 14:25:22 -0500 Subject: [PATCH 6/8] Fix swift with python <2.7.9 Closes-Bug: #1831932 Change-Id: I0d33864f4bffa401082548ee9a52f6eb50cb1f39 --- swift/common/utils.py | 3 ++- test/unit/common/test_utils.py | 16 ++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index 336f039adf..acd6d161ed 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -649,7 +649,8 @@ class StrAnonymizer(str): def __new__(cls, data, method, salt): method = method.lower() - if method not in hashlib.algorithms_guaranteed: + if method not in (hashlib.algorithms if six.PY2 else + hashlib.algorithms_guaranteed): raise ValueError('Unsupported hashing method: %r' % method) s = str.__new__(cls, data or '') s.method = method diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index bc9b3cd664..9dbbadcf89 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -3565,6 +3565,22 @@ cluster_dfw1 = http://dfw1.host/v1/ self.assertRaises(ValueError, utils.StrAnonymizer, 'Swift is great!', 'sha257', '') + def test_str_anonymizer_python_maddness(self): + with mock.patch('swift.common.utils.hashlib') as mocklib: + if six.PY2: + # python <2.7.9 doesn't have this algorithms_guaranteed, but + # our if block short-circuts before we explode + mocklib.algorithms = hashlib.algorithms + mocklib.algorithms_guaranteed.sideEffect = AttributeError() + else: + # python 3 doesn't have this algorithms but our if block + # short-circuts before we explode + mocklib.algorithms.sideEffect.sideEffect = AttributeError() + mocklib.algorithms_guaranteed = hashlib.algorithms_guaranteed + utils.StrAnonymizer('Swift is great!', 'sha1', '') + self.assertRaises(ValueError, utils.StrAnonymizer, + 'Swift is great!', 'sha257', '') + def test_str_format_time(self): dt = utils.StrFormatTime(10000.123456789) self.assertEqual(str(dt), '10000.123456789') From b7b92b97b12f2a5c0e1beed59b6ffd4791cec896 Mon Sep 17 00:00:00 2001 From: Tim Burke Date: Mon, 10 Jun 2019 15:40:58 -0700 Subject: [PATCH 7/8] Bump up minimum cryptography version ...not because we strictly *need* newer cryptography, but rather because distro packages have moved forward to the point where the 1.x series won't compile from source and PyPI doesn't have wheels for them. See changes like: - https://github.com/pyca/cryptography/commit/6e7ea2e - https://github.com/pyca/cryptography/commit/f88aea5 Change-Id: I1ff5b61873cf382c7a89873ed4ba6153f299262a --- lower-constraints.txt | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lower-constraints.txt b/lower-constraints.txt index 7dc11b8200..f8d3049444 100644 --- a/lower-constraints.txt +++ b/lower-constraints.txt @@ -12,7 +12,7 @@ chardet==3.0.4 cliff==2.11.0 cmd2==0.8.1 coverage==3.6 -cryptography==1.8.2 +cryptography==2.0.2 debtcollector==1.19.0 dnspython==1.14.0 docutils==0.11 diff --git a/requirements.txt b/requirements.txt index 3e59ee4c1d..609e540e63 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,5 +12,5 @@ requests>=2.14.2 # Apache-2.0 six>=1.9.0 xattr>=0.4;sys_platform!='win32' # MIT PyECLib>=1.3.1 # BSD -cryptography!=2.0,>=1.8.2 # BSD/Apache-2.0 +cryptography>=2.0.2 # BSD/Apache-2.0 ipaddress>=1.0.16;python_version<'3.3' # PSF From aa2f1db1b71c1b2bf746b72515e3efd15598b6aa Mon Sep 17 00:00:00 2001 From: Tim Burke Date: Tue, 11 Jun 2019 14:50:49 -0700 Subject: [PATCH 8/8] Ensure get_*_info keys are native strings Change-Id: I29bbea48ae38cfabf449a9f4cca1f5f27769405a --- swift/proxy/controllers/base.py | 16 ++++++++++++++-- test/unit/proxy/controllers/test_base.py | 18 ++++++++++++++---- 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/swift/proxy/controllers/base.py b/swift/proxy/controllers/base.py index 06328def36..568c6ddb49 100644 --- a/swift/proxy/controllers/base.py +++ b/swift/proxy/controllers/base.py @@ -594,13 +594,25 @@ def _get_info_from_memcache(app, env, account, container=None): info = memcache.get(cache_key) if info and six.PY2: # Get back to native strings + new_info = {} for key in info: + new_key = key.encode("utf-8") if isinstance( + key, six.text_type) else key if isinstance(info[key], six.text_type): - info[key] = info[key].encode("utf-8") + new_info[new_key] = info[key].encode("utf-8") elif isinstance(info[key], dict): + new_info[new_key] = {} for subkey, value in info[key].items(): + new_subkey = subkey.encode("utf-8") if isinstance( + subkey, six.text_type) else subkey if isinstance(value, six.text_type): - info[key][subkey] = value.encode("utf-8") + new_info[new_key][new_subkey] = \ + value.encode("utf-8") + else: + new_info[new_key][new_subkey] = value + else: + new_info[new_key] = info[key] + info = new_info if info: env.setdefault('swift.infocache', {})[cache_key] = info return info diff --git a/test/unit/proxy/controllers/test_base.py b/test/unit/proxy/controllers/test_base.py index a1a54c6729..2298dc466a 100644 --- a/test/unit/proxy/controllers/test_base.py +++ b/test/unit/proxy/controllers/test_base.py @@ -168,7 +168,8 @@ class FakeCache(FakeMemcache): super(FakeCache, self).__init__() if pre_cached: self.store.update(pre_cached) - self.stub = stub + # Fake a json roundtrip + self.stub = json.loads(json.dumps(stub)) def get(self, key): return self.stub or self.store.get(key) @@ -370,18 +371,27 @@ class TestFuncs(unittest.TestCase): def test_get_container_info_cache(self): cache_stub = { 'status': 404, 'bytes': 3333, 'object_count': 10, - 'versions': u"\u1F4A9"} + 'versions': u"\U0001F4A9", + 'meta': {u'some-\N{SNOWMAN}': u'non-ascii meta \U0001F334'}} req = Request.blank("/v1/account/cont", environ={'swift.cache': FakeCache(cache_stub)}) resp = get_container_info(req.environ, FakeApp()) + self.assertEqual([(k, type(k)) for k in resp], + [(k, str) for k in resp]) self.assertEqual(resp['storage_policy'], 0) self.assertEqual(resp['bytes'], 3333) self.assertEqual(resp['object_count'], 10) self.assertEqual(resp['status'], 404) if six.PY3: - self.assertEqual(resp['versions'], u'\u1f4a9') + self.assertEqual(resp['versions'], u'\U0001f4a9') else: - self.assertEqual(resp['versions'], "\xe1\xbd\x8a\x39") + self.assertEqual(resp['versions'], "\xf0\x9f\x92\xa9") + for subdict in resp.values(): + if isinstance(subdict, dict): + self.assertEqual([(k, type(k), v, type(v)) + for k, v in subdict.items()], + [(k, str, v, str) + for k, v in subdict.items()]) def test_get_container_info_env(self): cache_key = get_cache_key("account", "cont")