Fix bulk responses when using xml and Expect 100-continue

When we fixed bulk response heartbeating in https://review.openstack.org/#/c/510715/,
code review raised the issue of moving the xml header down to after the
early-exit clauses. At the time, it didn't seem to break anything, so
it was left in place. However, that insight was correct.

The purpose of the earlier patch was to force eventlet to use chunked
transfer encoding on the response in order to prevent eventlet from
buffering the whole response, thus defeating the purpose of the
heartbeat responses.

Moving the first line of the body lower (ie after the early exit
checks), allows other headers in a chunked transfer encoding response
to be appropriately processed before sending the headers. Sending the
xml declaration early causes it to get intermingled in the 100-continue
protocol, thus breaking the chunked transfer encoding semantics.

Closes-Bug: #1819252

Change-Id: I072f4dab21cd7cdb81b9e41072eb504131411dc8
This commit is contained in:
John Dickinson 2019-03-15 15:18:36 -07:00 committed by Tim Burke
parent e5eb673ccb
commit adc568c97f
3 changed files with 59 additions and 11 deletions

View File

@ -380,6 +380,10 @@ class Bulk(object):
query request.
"""
last_yield = time()
if out_content_type and out_content_type.endswith('/xml'):
to_yield = '<?xml version="1.0" encoding="UTF-8"?>\n'
else:
to_yield = ' '
separator = ''
failed_files = []
resp_dict = {'Response Status': HTTPOk().status,
@ -390,8 +394,6 @@ class Bulk(object):
try:
if not out_content_type:
raise HTTPNotAcceptable(request=req)
if out_content_type.endswith('/xml'):
yield '<?xml version="1.0" encoding="UTF-8"?>\n'
try:
vrs, account, _junk = req.split_path(2, 3, True)
@ -452,9 +454,9 @@ class Bulk(object):
for resp, obj_name, retry in pile.asyncstarmap(
do_delete, names_to_delete):
if last_yield + self.yield_frequency < time():
separator = '\r\n\r\n'
last_yield = time()
yield ' '
yield to_yield
to_yield, separator = ' ', '\r\n\r\n'
self._process_delete(resp, pile, obj_name,
resp_dict, failed_files,
failed_file_response, retry)
@ -462,9 +464,9 @@ class Bulk(object):
# Abort, but drain off the in-progress deletes
for resp, obj_name, retry in pile:
if last_yield + self.yield_frequency < time():
separator = '\r\n\r\n'
last_yield = time()
yield ' '
yield to_yield
to_yield, separator = ' ', '\r\n\r\n'
# Don't pass in the pile, as we shouldn't retry
self._process_delete(
resp, None, obj_name, resp_dict,
@ -508,14 +510,16 @@ class Bulk(object):
'Response Body': '', 'Number Files Created': 0}
failed_files = []
last_yield = time()
if out_content_type and out_content_type.endswith('/xml'):
to_yield = '<?xml version="1.0" encoding="UTF-8"?>\n'
else:
to_yield = ' '
separator = ''
containers_accessed = set()
req.environ['eventlet.minimum_write_chunk_size'] = 0
try:
if not out_content_type:
raise HTTPNotAcceptable(request=req)
if out_content_type.endswith('/xml'):
yield '<?xml version="1.0" encoding="UTF-8"?>\n'
if req.content_length is None and \
req.headers.get('transfer-encoding',
@ -533,9 +537,9 @@ class Bulk(object):
containers_created = 0
while True:
if last_yield + self.yield_frequency < time():
separator = '\r\n\r\n'
last_yield = time()
yield ' '
yield to_yield
to_yield, separator = ' ', '\r\n\r\n'
tar_info = next(tar)
if tar_info is None or \
len(failed_files) >= self.max_failed_extractions:

View File

@ -1289,3 +1289,15 @@ def requires_policies(f):
return f(self, *args, **kwargs)
return wrapper
def requires_bulk(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
if skip or not cluster_info:
raise SkipTest('Requires bulk middleware')
# Determine whether this cluster has bulk middleware; if not, skip test
if not cluster_info.get('bulk_upload', {}):
raise SkipTest('Requires bulk middleware')
return f(*args, **kwargs)
return wrapper

View File

@ -20,11 +20,12 @@ import json
import unittest2
from uuid import uuid4
import time
from xml.dom import minidom
from six.moves import range
from test.functional import check_response, retry, requires_acls, \
requires_policies, SkipTest
requires_policies, SkipTest, requires_bulk
import test.functional as tf
@ -1646,6 +1647,37 @@ class TestObject(unittest2.TestCase):
self.assertEqual(resp.status, 200)
self.assertEqual(body, resp.read())
@requires_bulk
def test_bulk_delete(self):
def bulk_delete(url, token, parsed, conn):
# try to bulk delete the object that was created during test setup
conn.request('DELETE', '%s/%s/%s?bulk-delete' % (
parsed.path, self.container, self.obj),
'%s/%s' % (self.container, self.obj),
{'X-Auth-Token': token,
'Accept': 'application/xml',
'Expect': '100-continue',
'Content-Type': 'text/plain'})
return check_response(conn)
resp = retry(bulk_delete)
self.assertEqual(resp.status, 200)
body = resp.read()
tree = minidom.parseString(body)
self.assertEqual(tree.documentElement.tagName, 'delete')
errors = tree.getElementsByTagName('errors')
self.assertEqual(len(errors), 1)
errors = [c.data if c.nodeType == c.TEXT_NODE else c.childNodes[0].data
for c in errors[0].childNodes
if c.nodeType != c.TEXT_NODE or c.data.strip()]
self.assertEqual(errors, [])
final_status = tree.getElementsByTagName('response_status')
self.assertEqual(len(final_status), 1)
self.assertEqual(len(final_status[0].childNodes), 1)
self.assertEqual(final_status[0].childNodes[0].data, '200 OK')
if __name__ == '__main__':
unittest2.main()