Merge branch 'master' into ec
Conflicts: swift/container/server.py swift/proxy/controllers/base.py swift/proxy/controllers/obj.py test/unit/container/test_server.py test/unit/obj/test_server.py test/unit/proxy/controllers/test_account.py test/unit/proxy/controllers/test_container.py test/unit/proxy/test_server.py tox.ini Change-Id: I3e3bd3e1186c36b2e4e75a611eaed15afc419712
This commit is contained in:
commit
3895441afd
|
@ -4,12 +4,7 @@ SRC_DIR=$(python -c "import os; print os.path.dirname(os.path.realpath('$0'))")
|
|||
|
||||
cd ${SRC_DIR}/test/functional
|
||||
nosetests --exe $@
|
||||
func1=$?
|
||||
rvalue=$?
|
||||
cd -
|
||||
|
||||
cd ${SRC_DIR}/test/functionalnosetests
|
||||
nosetests --exe $@
|
||||
func2=$?
|
||||
cd -
|
||||
|
||||
exit $((func1 + func2))
|
||||
exit $rvalue
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
SRC_DIR=$(python -c "import os; print os.path.dirname(os.path.realpath('$0'))")
|
||||
|
||||
cd ${SRC_DIR}/test/probe
|
||||
nosetests --exe
|
||||
nosetests --exe $@
|
||||
rvalue=$?
|
||||
cd -
|
||||
|
||||
|
|
|
@ -14,9 +14,11 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from swift.common.utils import parse_options
|
||||
from swift.common.wsgi import run_wsgi
|
||||
|
||||
if __name__ == '__main__':
|
||||
conf_file, options = parse_options()
|
||||
run_wsgi(conf_file, 'account-server', default_port=6002, **options)
|
||||
sys.exit(run_wsgi(conf_file,
|
||||
'account-server', default_port=6002, **options))
|
||||
|
|
|
@ -29,6 +29,28 @@ parser.add_option('-w', '--wsgi', action='store_true',
|
|||
help="use wsgi/paste parser instead of readconf")
|
||||
|
||||
|
||||
def _context_name(context):
|
||||
return ':'.join((context.object_type.name, context.name))
|
||||
|
||||
|
||||
def inspect_app_config(app_config):
|
||||
conf = {}
|
||||
context = app_config.context
|
||||
section_name = _context_name(context)
|
||||
conf[section_name] = context.config()
|
||||
if context.object_type.name == 'pipeline':
|
||||
filters = context.filter_contexts
|
||||
pipeline = []
|
||||
for filter_context in filters:
|
||||
conf[_context_name(filter_context)] = filter_context.config()
|
||||
pipeline.append(filter_context.entry_point_name)
|
||||
app_context = context.app_context
|
||||
conf[_context_name(app_context)] = app_context.config()
|
||||
pipeline.append(app_context.entry_point_name)
|
||||
conf[section_name]['pipeline'] = ' '.join(pipeline)
|
||||
return conf
|
||||
|
||||
|
||||
def main():
|
||||
options, args = parser.parse_args()
|
||||
options = dict(vars(options))
|
||||
|
@ -45,10 +67,7 @@ def main():
|
|||
print '# %s' % conf_file
|
||||
if options['wsgi']:
|
||||
app_config = appconfig(conf_file)
|
||||
context = app_config.context
|
||||
conf = dict([(c.name, c.config()) for c in getattr(
|
||||
context, 'filter_contexts', [])])
|
||||
conf[context.name] = app_config
|
||||
conf = inspect_app_config(app_config)
|
||||
else:
|
||||
conf = readconf(conf_file)
|
||||
flat_vars = {}
|
||||
|
|
|
@ -14,9 +14,11 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from swift.common.utils import parse_options
|
||||
from swift.common.wsgi import run_wsgi
|
||||
|
||||
if __name__ == '__main__':
|
||||
conf_file, options = parse_options()
|
||||
run_wsgi(conf_file, 'container-server', default_port=6001, **options)
|
||||
sys.exit(run_wsgi(conf_file,
|
||||
'container-server', default_port=6001, **options))
|
||||
|
|
|
@ -28,8 +28,9 @@ from eventlet import GreenPool, hubs, patcher, Timeout
|
|||
from eventlet.pools import Pool
|
||||
|
||||
from swift.common import direct_client
|
||||
from swiftclient import ClientException, Connection, get_auth
|
||||
from swiftclient import Connection, get_auth
|
||||
from swift.common.ring import Ring
|
||||
from swift.common.exceptions import ClientException
|
||||
from swift.common.utils import compute_eta, get_time_units, config_true_value
|
||||
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from swift.common.utils import parse_options
|
||||
from swift.common.wsgi import run_wsgi
|
||||
from swift.obj import server
|
||||
|
@ -21,5 +22,6 @@ from swift.obj import server
|
|||
|
||||
if __name__ == '__main__':
|
||||
conf_file, options = parse_options()
|
||||
run_wsgi(conf_file, 'object-server', default_port=6000,
|
||||
global_conf_callback=server.global_conf_callback, **options)
|
||||
sys.exit(run_wsgi(conf_file, 'object-server', default_port=6000,
|
||||
global_conf_callback=server.global_conf_callback,
|
||||
**options))
|
||||
|
|
|
@ -14,9 +14,10 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from swift.common.utils import parse_options
|
||||
from swift.common.wsgi import run_wsgi
|
||||
|
||||
if __name__ == '__main__':
|
||||
conf_file, options = parse_options()
|
||||
run_wsgi(conf_file, 'proxy-server', default_port=8080, **options)
|
||||
sys.exit(run_wsgi(conf_file, 'proxy-server', default_port=8080, **options))
|
||||
|
|
|
@ -242,20 +242,29 @@ class SwiftRecon(object):
|
|||
:param hosts: set of hosts to check. in the format of:
|
||||
set([('127.0.0.1', 6020), ('127.0.0.2', 6030)])
|
||||
"""
|
||||
stats = {}
|
||||
unmounted = {}
|
||||
errors = {}
|
||||
recon = Scout("unmounted", self.verbose, self.suppress_errors,
|
||||
self.timeout)
|
||||
print "[%s] Getting unmounted drives from %s hosts..." % \
|
||||
(self._ptime(), len(hosts))
|
||||
for url, response, status in self.pool.imap(recon.scout, hosts):
|
||||
if status == 200:
|
||||
stats[url] = []
|
||||
unmounted[url] = []
|
||||
errors[url] = []
|
||||
for i in response:
|
||||
stats[url].append(i['device'])
|
||||
for host in stats:
|
||||
if not isinstance(i['mounted'], bool):
|
||||
errors[url].append(i['device'])
|
||||
else:
|
||||
unmounted[url].append(i['device'])
|
||||
for host in unmounted:
|
||||
node = urlparse(host).netloc
|
||||
for entry in stats[host]:
|
||||
for entry in unmounted[host]:
|
||||
print "Not mounted: %s on %s" % (entry, node)
|
||||
for host in errors:
|
||||
node = urlparse(host).netloc
|
||||
for entry in errors[host]:
|
||||
print "Device errors: %s on %s" % (entry, node)
|
||||
print "=" * 79
|
||||
|
||||
def expirer_check(self, hosts):
|
||||
|
@ -655,7 +664,10 @@ class SwiftRecon(object):
|
|||
if status == 200:
|
||||
hostusage = []
|
||||
for entry in response:
|
||||
if entry['mounted']:
|
||||
if not isinstance(entry['mounted'], bool):
|
||||
print "-> %s/%s: Error: %s" % (url, entry['device'],
|
||||
entry['mounted'])
|
||||
elif entry['mounted']:
|
||||
used = float(entry['used']) / float(entry['size']) \
|
||||
* 100.0
|
||||
raw_total_used.append(entry['used'])
|
||||
|
@ -672,7 +684,7 @@ class SwiftRecon(object):
|
|||
|
||||
for url in stats:
|
||||
if len(stats[url]) > 0:
|
||||
#get per host hi/los for another day
|
||||
# get per host hi/los for another day
|
||||
low = min(stats[url])
|
||||
high = max(stats[url])
|
||||
highs.append(high)
|
||||
|
@ -685,7 +697,7 @@ class SwiftRecon(object):
|
|||
if len(lows) > 0:
|
||||
low = min(lows)
|
||||
high = max(highs)
|
||||
#dist graph shamelessly stolen from https://github.com/gholt/tcod
|
||||
# dist graph shamelessly stolen from https://github.com/gholt/tcod
|
||||
print "Distribution Graph:"
|
||||
mul = 69.0 / max(percents.values())
|
||||
for percent in sorted(percents):
|
||||
|
|
|
@ -63,12 +63,10 @@ def main():
|
|||
sys.exit(1)
|
||||
try:
|
||||
asyncs = get_async_count(device_dir, logger)
|
||||
dump_recon_cache({'async_pending': asyncs}, cache_file, logger)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
_('Exception during recon-cron while accessing devices'))
|
||||
|
||||
dump_recon_cache({'async_pending': asyncs}, cache_file, logger)
|
||||
|
||||
try:
|
||||
os.rmdir(lock_dir)
|
||||
except Exception:
|
||||
|
|
|
@ -76,3 +76,5 @@ Other
|
|||
* `Glance <https://github.com/openstack/glance>`_ - Provides services for discovering, registering, and retrieving virtual machine images (for OpenStack Compute [Nova], for example).
|
||||
* `Better Staticweb <https://github.com/CloudVPS/better-staticweb>`_ - Makes swift containers accessible by default.
|
||||
* `Swiftsync <https://github.com/stackforge/swiftsync>`_ - A massive syncer between two swift clusters.
|
||||
* `Swiftbrowser <https://github.com/cschwede/django-swiftbrowser>`_ - Simple web app to access Openstack Swift.
|
||||
* `Swift-account-stats <https://github.com/enovance/swift-account-stats>`_ - Swift-account-stats is a tool to report statistics on Swift usage at tenant and global levels.
|
||||
|
|
|
@ -69,7 +69,7 @@
|
|||
# eventlet_debug = false
|
||||
|
||||
[pipeline:main]
|
||||
pipeline = catch_errors healthcheck proxy-logging cache bulk slo ratelimit tempauth container-quotas account-quotas proxy-logging proxy-server
|
||||
pipeline = catch_errors gatekeeper healthcheck proxy-logging cache bulk slo ratelimit tempauth container-quotas account-quotas proxy-logging proxy-server
|
||||
|
||||
[app:proxy-server]
|
||||
use = egg:swift#proxy
|
||||
|
@ -496,6 +496,14 @@ use = egg:swift#bulk
|
|||
# max_failed_deletes = 1000
|
||||
# yield_frequency = 60
|
||||
|
||||
# Note: The following parameter is used during a bulk delete of objects and
|
||||
# their container. This would frequently fail because it is very likely
|
||||
# that all replicated objects have not been deleted by the time the middleware got a
|
||||
# successful response. It can be configured the number of retries. And the
|
||||
# number of seconds to wait between each retry will be 1.5**retry
|
||||
|
||||
# delete_container_retry_count = 0
|
||||
|
||||
# Note: Put after auth in the pipeline.
|
||||
[filter:container-quotas]
|
||||
use = egg:swift#container_quotas
|
||||
|
@ -509,3 +517,12 @@ use = egg:swift#slo
|
|||
|
||||
[filter:account-quotas]
|
||||
use = egg:swift#account_quotas
|
||||
|
||||
[filter:gatekeeper]
|
||||
use = egg:swift#gatekeeper
|
||||
# You can override the default log routing for this filter here:
|
||||
# set log_name = gatekeeper
|
||||
# set log_facility = LOG_LOCAL0
|
||||
# set log_level = INFO
|
||||
# set log_headers = false
|
||||
# set log_address = /dev/log
|
||||
|
|
|
@ -86,6 +86,7 @@ paste.filter_factory =
|
|||
proxy_logging = swift.common.middleware.proxy_logging:filter_factory
|
||||
slo = swift.common.middleware.slo:filter_factory
|
||||
list_endpoints = swift.common.middleware.list_endpoints:filter_factory
|
||||
gatekeeper = swift.common.middleware.gatekeeper:filter_factory
|
||||
|
||||
[build_sphinx]
|
||||
all_files = 1
|
||||
|
|
|
@ -25,8 +25,9 @@ from eventlet import GreenPool, sleep, Timeout
|
|||
import swift.common.db
|
||||
from swift.account.server import DATADIR
|
||||
from swift.account.backend import AccountBroker
|
||||
from swift.common.direct_client import ClientException, \
|
||||
direct_delete_container, direct_delete_object, direct_get_container
|
||||
from swift.common.direct_client import direct_delete_container, \
|
||||
direct_delete_object, direct_get_container
|
||||
from swift.common.exceptions import ClientException
|
||||
from swift.common.ring import Ring
|
||||
from swift.common.utils import get_logger, whataremyips, ismount, \
|
||||
config_true_value
|
||||
|
|
|
@ -37,6 +37,7 @@ from swift.common.swob import HTTPAccepted, HTTPBadRequest, \
|
|||
HTTPMethodNotAllowed, HTTPNoContent, HTTPNotFound, \
|
||||
HTTPPreconditionFailed, HTTPConflict, Request, \
|
||||
HTTPInsufficientStorage, HTTPException
|
||||
from swift.common.request_helpers import is_sys_or_user_meta
|
||||
|
||||
|
||||
DATADIR = 'accounts'
|
||||
|
@ -152,7 +153,7 @@ class AccountController(object):
|
|||
metadata = {}
|
||||
metadata.update((key, (value, timestamp))
|
||||
for key, value in req.headers.iteritems()
|
||||
if key.lower().startswith('x-account-meta-'))
|
||||
if is_sys_or_user_meta('account', key))
|
||||
if metadata:
|
||||
broker.update_metadata(metadata)
|
||||
if created:
|
||||
|
@ -258,7 +259,7 @@ class AccountController(object):
|
|||
metadata = {}
|
||||
metadata.update((key, (value, timestamp))
|
||||
for key, value in req.headers.iteritems()
|
||||
if key.lower().startswith('x-account-meta-'))
|
||||
if is_sys_or_user_meta('account', key))
|
||||
if metadata:
|
||||
broker.update_metadata(metadata)
|
||||
return HTTPNoContent(request=req)
|
||||
|
|
|
@ -26,13 +26,18 @@ from time import time
|
|||
from eventlet import sleep, Timeout
|
||||
|
||||
from swift.common.bufferedhttp import http_connect
|
||||
from swiftclient import ClientException, json_loads
|
||||
from swift.common.exceptions import ClientException
|
||||
from swift.common.utils import normalize_timestamp, FileLikeIter
|
||||
from swift.common.http import HTTP_NO_CONTENT, HTTP_INSUFFICIENT_STORAGE, \
|
||||
is_success, is_server_error
|
||||
from swift.common.swob import HeaderKeyDict
|
||||
from swift.common.utils import quote
|
||||
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
import json
|
||||
|
||||
|
||||
def _get_direct_account_container(path, stype, node, part,
|
||||
account, marker=None, limit=None,
|
||||
|
@ -74,7 +79,7 @@ def _get_direct_account_container(path, stype, node, part,
|
|||
if resp.status == HTTP_NO_CONTENT:
|
||||
resp.read()
|
||||
return resp_headers, []
|
||||
return resp_headers, json_loads(resp.read())
|
||||
return resp_headers, json.loads(resp.read())
|
||||
|
||||
|
||||
def gen_headers(hdrs_in=None, add_ts=False):
|
||||
|
|
|
@ -133,3 +133,57 @@ class ReplicationException(Exception):
|
|||
|
||||
class ReplicationLockTimeout(LockTimeout):
|
||||
pass
|
||||
|
||||
|
||||
class ClientException(Exception):
|
||||
|
||||
def __init__(self, msg, http_scheme='', http_host='', http_port='',
|
||||
http_path='', http_query='', http_status=0, http_reason='',
|
||||
http_device='', http_response_content=''):
|
||||
Exception.__init__(self, msg)
|
||||
self.msg = msg
|
||||
self.http_scheme = http_scheme
|
||||
self.http_host = http_host
|
||||
self.http_port = http_port
|
||||
self.http_path = http_path
|
||||
self.http_query = http_query
|
||||
self.http_status = http_status
|
||||
self.http_reason = http_reason
|
||||
self.http_device = http_device
|
||||
self.http_response_content = http_response_content
|
||||
|
||||
def __str__(self):
|
||||
a = self.msg
|
||||
b = ''
|
||||
if self.http_scheme:
|
||||
b += '%s://' % self.http_scheme
|
||||
if self.http_host:
|
||||
b += self.http_host
|
||||
if self.http_port:
|
||||
b += ':%s' % self.http_port
|
||||
if self.http_path:
|
||||
b += self.http_path
|
||||
if self.http_query:
|
||||
b += '?%s' % self.http_query
|
||||
if self.http_status:
|
||||
if b:
|
||||
b = '%s %s' % (b, self.http_status)
|
||||
else:
|
||||
b = str(self.http_status)
|
||||
if self.http_reason:
|
||||
if b:
|
||||
b = '%s %s' % (b, self.http_reason)
|
||||
else:
|
||||
b = '- %s' % self.http_reason
|
||||
if self.http_device:
|
||||
if b:
|
||||
b = '%s: device %s' % (b, self.http_device)
|
||||
else:
|
||||
b = 'device %s' % self.http_device
|
||||
if self.http_response_content:
|
||||
if len(self.http_response_content) <= 60:
|
||||
b += ' %s' % self.http_response_content
|
||||
else:
|
||||
b += ' [first 60 chars of response] %s' \
|
||||
% self.http_response_content[:60]
|
||||
return b and '%s: %s' % (a, b) or a
|
||||
|
|
|
@ -48,6 +48,7 @@ WARNING_WAIT = 3 # seconds to wait after message that may just be a warning
|
|||
|
||||
MAX_DESCRIPTORS = 32768
|
||||
MAX_MEMORY = (1024 * 1024 * 1024) * 2 # 2 GB
|
||||
MAX_PROCS = 8192 # workers * disks * threads_per_disk, can get high
|
||||
|
||||
|
||||
def setup_env():
|
||||
|
@ -56,10 +57,22 @@ def setup_env():
|
|||
try:
|
||||
resource.setrlimit(resource.RLIMIT_NOFILE,
|
||||
(MAX_DESCRIPTORS, MAX_DESCRIPTORS))
|
||||
except ValueError:
|
||||
print _("WARNING: Unable to modify file descriptor limit. "
|
||||
"Running as non-root?")
|
||||
|
||||
try:
|
||||
resource.setrlimit(resource.RLIMIT_DATA,
|
||||
(MAX_MEMORY, MAX_MEMORY))
|
||||
except ValueError:
|
||||
print _("WARNING: Unable to increase file descriptor limit. "
|
||||
print _("WARNING: Unable to modify memory limit. "
|
||||
"Running as non-root?")
|
||||
|
||||
try:
|
||||
resource.setrlimit(resource.RLIMIT_NPROC,
|
||||
(MAX_PROCS, MAX_PROCS))
|
||||
except ValueError:
|
||||
print _("WARNING: Unable to modify max process limit. "
|
||||
"Running as non-root?")
|
||||
|
||||
# Set PYTHON_EGG_CACHE if it isn't already set
|
||||
|
|
|
@ -43,6 +43,13 @@ Remove the quota::
|
|||
swift -A http://127.0.0.1:8080/auth/v1.0 -U account:reseller -K secret \
|
||||
post -m quota-bytes:
|
||||
|
||||
The same limitations apply for the account quotas as for the container quotas.
|
||||
|
||||
For example, when uploading an object without a content-length header the proxy
|
||||
server doesn't know the final size of the currently uploaded object and the
|
||||
upload will be allowed if the current account size is within the quota.
|
||||
Due to the eventual consistency further uploads might be possible until the
|
||||
account size has been updated.
|
||||
"""
|
||||
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ import tarfile
|
|||
from urllib import quote, unquote
|
||||
from xml.sax import saxutils
|
||||
from time import time
|
||||
from eventlet import sleep
|
||||
import zlib
|
||||
from swift.common.swob import Request, HTTPBadGateway, \
|
||||
HTTPCreated, HTTPBadRequest, HTTPNotFound, HTTPUnauthorized, HTTPOk, \
|
||||
|
@ -24,7 +25,7 @@ from swift.common.swob import Request, HTTPBadGateway, \
|
|||
HTTPLengthRequired, HTTPException, HTTPServerError, wsgify
|
||||
from swift.common.utils import json, get_logger, register_swift_info
|
||||
from swift.common.constraints import check_utf8, MAX_FILE_SIZE
|
||||
from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND
|
||||
from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND, HTTP_CONFLICT
|
||||
from swift.common.constraints import MAX_OBJECT_NAME_LENGTH, \
|
||||
MAX_CONTAINER_NAME_LENGTH
|
||||
|
||||
|
@ -186,7 +187,8 @@ class Bulk(object):
|
|||
|
||||
def __init__(self, app, conf, max_containers_per_extraction=10000,
|
||||
max_failed_extractions=1000, max_deletes_per_request=10000,
|
||||
max_failed_deletes=1000, yield_frequency=60):
|
||||
max_failed_deletes=1000, yield_frequency=60, retry_count=0,
|
||||
retry_interval=1.5):
|
||||
self.app = app
|
||||
self.logger = get_logger(conf, log_route='bulk')
|
||||
self.max_containers = max_containers_per_extraction
|
||||
|
@ -194,6 +196,8 @@ class Bulk(object):
|
|||
self.max_failed_deletes = max_failed_deletes
|
||||
self.max_deletes_per_request = max_deletes_per_request
|
||||
self.yield_frequency = yield_frequency
|
||||
self.retry_count = retry_count
|
||||
self.retry_interval = retry_interval
|
||||
|
||||
def create_container(self, req, container_path):
|
||||
"""
|
||||
|
@ -302,7 +306,7 @@ class Bulk(object):
|
|||
|
||||
if objs_to_delete is None:
|
||||
objs_to_delete = self.get_objs_to_delete(req)
|
||||
failed_file_response_type = HTTPBadRequest
|
||||
failed_file_response = {'type': HTTPBadRequest}
|
||||
req.environ['eventlet.minimum_write_chunk_size'] = 0
|
||||
for obj_to_delete in objs_to_delete:
|
||||
if last_yield + self.yield_frequency < time():
|
||||
|
@ -331,26 +335,16 @@ class Bulk(object):
|
|||
new_env['PATH_INFO'] = delete_path
|
||||
del(new_env['wsgi.input'])
|
||||
new_env['CONTENT_LENGTH'] = 0
|
||||
new_env['REQUEST_METHOD'] = 'DELETE'
|
||||
new_env['HTTP_USER_AGENT'] = \
|
||||
'%s %s' % (req.environ.get('HTTP_USER_AGENT'), user_agent)
|
||||
new_env['swift.source'] = swift_source
|
||||
delete_obj_req = Request.blank(delete_path, new_env)
|
||||
resp = delete_obj_req.get_response(self.app)
|
||||
if resp.status_int // 100 == 2:
|
||||
resp_dict['Number Deleted'] += 1
|
||||
elif resp.status_int == HTTP_NOT_FOUND:
|
||||
resp_dict['Number Not Found'] += 1
|
||||
elif resp.status_int == HTTP_UNAUTHORIZED:
|
||||
failed_files.append([quote(obj_name),
|
||||
HTTPUnauthorized().status])
|
||||
else:
|
||||
if resp.status_int // 100 == 5:
|
||||
failed_file_response_type = HTTPBadGateway
|
||||
failed_files.append([quote(obj_name), resp.status])
|
||||
self._process_delete(delete_path, obj_name, new_env, resp_dict,
|
||||
failed_files, failed_file_response)
|
||||
|
||||
if failed_files:
|
||||
resp_dict['Response Status'] = \
|
||||
failed_file_response_type().status
|
||||
failed_file_response['type']().status
|
||||
elif not (resp_dict['Number Deleted'] or
|
||||
resp_dict['Number Not Found']):
|
||||
resp_dict['Response Status'] = HTTPBadRequest().status
|
||||
|
@ -509,6 +503,29 @@ class Bulk(object):
|
|||
yield separator + get_response_body(
|
||||
out_content_type, resp_dict, failed_files)
|
||||
|
||||
def _process_delete(self, delete_path, obj_name, env, resp_dict,
|
||||
failed_files, failed_file_response, retry=0):
|
||||
delete_obj_req = Request.blank(delete_path, env)
|
||||
resp = delete_obj_req.get_response(self.app)
|
||||
if resp.status_int // 100 == 2:
|
||||
resp_dict['Number Deleted'] += 1
|
||||
elif resp.status_int == HTTP_NOT_FOUND:
|
||||
resp_dict['Number Not Found'] += 1
|
||||
elif resp.status_int == HTTP_UNAUTHORIZED:
|
||||
failed_files.append([quote(obj_name),
|
||||
HTTPUnauthorized().status])
|
||||
elif resp.status_int == HTTP_CONFLICT and \
|
||||
self.retry_count > 0 and self.retry_count > retry:
|
||||
retry += 1
|
||||
sleep(self.retry_interval ** retry)
|
||||
self._process_delete(delete_path, obj_name, env, resp_dict,
|
||||
failed_files, failed_file_response,
|
||||
retry)
|
||||
else:
|
||||
if resp.status_int // 100 == 5:
|
||||
failed_file_response['type'] = HTTPBadGateway
|
||||
failed_files.append([quote(obj_name), resp.status])
|
||||
|
||||
@wsgify
|
||||
def __call__(self, req):
|
||||
extract_type = req.params.get('extract-archive')
|
||||
|
@ -547,6 +564,8 @@ def filter_factory(global_conf, **local_conf):
|
|||
max_deletes_per_request = int(conf.get('max_deletes_per_request', 10000))
|
||||
max_failed_deletes = int(conf.get('max_failed_deletes', 1000))
|
||||
yield_frequency = int(conf.get('yield_frequency', 60))
|
||||
retry_count = int(conf.get('delete_container_retry_count', 0))
|
||||
retry_interval = 1.5
|
||||
|
||||
register_swift_info(
|
||||
'bulk_upload',
|
||||
|
@ -564,5 +583,7 @@ def filter_factory(global_conf, **local_conf):
|
|||
max_failed_extractions=max_failed_extractions,
|
||||
max_deletes_per_request=max_deletes_per_request,
|
||||
max_failed_deletes=max_failed_deletes,
|
||||
yield_frequency=yield_frequency)
|
||||
yield_frequency=yield_frequency,
|
||||
retry_count=retry_count,
|
||||
retry_interval=retry_interval)
|
||||
return bulk_filter
|
||||
|
|
|
@ -0,0 +1,94 @@
|
|||
# Copyright (c) 2010-2012 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""
|
||||
The ``gatekeeper`` middleware imposes restrictions on the headers that
|
||||
may be included with requests and responses. Request headers are filtered
|
||||
to remove headers that should never be generated by a client. Similarly,
|
||||
response headers are filtered to remove private headers that should
|
||||
never be passed to a client.
|
||||
|
||||
The ``gatekeeper`` middleware must always be present in the proxy server
|
||||
wsgi pipeline. It should be configured close to the start of the pipeline
|
||||
specified in ``/etc/swift/proxy-server.conf``, immediately after catch_errors
|
||||
and before any other middleware. It is essential that it is configured ahead
|
||||
of all middlewares using system metadata in order that they function
|
||||
correctly.
|
||||
|
||||
If ``gatekeeper`` middleware is not configured in the pipeline then it will be
|
||||
automatically inserted close to the start of the pipeline by the proxy server.
|
||||
"""
|
||||
|
||||
|
||||
from swift.common.swob import wsgify
|
||||
from swift.common.utils import get_logger
|
||||
from swift.common.request_helpers import remove_items, get_sys_meta_prefix
|
||||
import re
|
||||
|
||||
"""
|
||||
A list of python regular expressions that will be used to
|
||||
match against inbound request headers. Matching headers will
|
||||
be removed from the request.
|
||||
"""
|
||||
# Exclude headers starting with a sysmeta prefix.
|
||||
# If adding to this list, note that these are regex patterns,
|
||||
# so use a trailing $ to constrain to an exact header match
|
||||
# rather than prefix match.
|
||||
inbound_exclusions = [get_sys_meta_prefix('account'),
|
||||
get_sys_meta_prefix('container'),
|
||||
get_sys_meta_prefix('object')]
|
||||
# 'x-object-sysmeta' is reserved in anticipation of future support
|
||||
# for system metadata being applied to objects
|
||||
|
||||
|
||||
"""
|
||||
A list of python regular expressions that will be used to
|
||||
match against outbound response headers. Matching headers will
|
||||
be removed from the response.
|
||||
"""
|
||||
outbound_exclusions = inbound_exclusions
|
||||
|
||||
|
||||
def make_exclusion_test(exclusions):
|
||||
expr = '|'.join(exclusions)
|
||||
test = re.compile(expr, re.IGNORECASE)
|
||||
return test.match
|
||||
|
||||
|
||||
class GatekeeperMiddleware(object):
|
||||
def __init__(self, app, conf):
|
||||
self.app = app
|
||||
self.logger = get_logger(conf, log_route='gatekeeper')
|
||||
self.inbound_condition = make_exclusion_test(inbound_exclusions)
|
||||
self.outbound_condition = make_exclusion_test(outbound_exclusions)
|
||||
|
||||
@wsgify
|
||||
def __call__(self, req):
|
||||
removed = remove_items(req.headers, self.inbound_condition)
|
||||
if removed:
|
||||
self.logger.debug('removed request headers: %s' % removed)
|
||||
resp = req.get_response(self.app)
|
||||
removed = remove_items(resp.headers, self.outbound_condition)
|
||||
if removed:
|
||||
self.logger.debug('removed response headers: %s' % removed)
|
||||
return resp
|
||||
|
||||
|
||||
def filter_factory(global_conf, **local_conf):
|
||||
conf = global_conf.copy()
|
||||
conf.update(local_conf)
|
||||
|
||||
def gatekeeper_filter(app):
|
||||
return GatekeeperMiddleware(app, conf)
|
||||
return gatekeeper_filter
|
|
@ -41,7 +41,7 @@ class ReconMiddleware(object):
|
|||
|
||||
def __init__(self, app, conf, *args, **kwargs):
|
||||
self.app = app
|
||||
self.devices = conf.get('devices', '/srv/node/')
|
||||
self.devices = conf.get('devices', '/srv/node')
|
||||
swift_dir = conf.get('swift_dir', '/etc/swift')
|
||||
self.logger = get_logger(conf, log_route='recon')
|
||||
self.recon_cache_path = conf.get('recon_cache_path',
|
||||
|
@ -191,9 +191,12 @@ class ReconMiddleware(object):
|
|||
"""list unmounted (failed?) devices"""
|
||||
mountlist = []
|
||||
for entry in os.listdir(self.devices):
|
||||
mpoint = {'device': entry,
|
||||
'mounted': check_mount(self.devices, entry)}
|
||||
if not mpoint['mounted']:
|
||||
try:
|
||||
mounted = check_mount(self.devices, entry)
|
||||
except OSError as err:
|
||||
mounted = str(err)
|
||||
mpoint = {'device': entry, 'mounted': mounted}
|
||||
if mpoint['mounted'] is not True:
|
||||
mountlist.append(mpoint)
|
||||
return mountlist
|
||||
|
||||
|
|
|
@ -80,6 +80,15 @@ above example::
|
|||
https://swift-cluster.example.com/v1/AUTH_account/container/object?
|
||||
temp_url_sig=da39a3ee5e6b4b0d3255bfef95601890afd80709&
|
||||
temp_url_expires=1323479485&filename=My+Test+File.pdf
|
||||
|
||||
If you do not want the object to be downloaded, you can cause
|
||||
"Content-Disposition: inline" to be set on the response by adding the "inline"
|
||||
parameter to the query string, like so::
|
||||
|
||||
https://swift-cluster.example.com/v1/AUTH_account/container/object?
|
||||
temp_url_sig=da39a3ee5e6b4b0d3255bfef95601890afd80709&
|
||||
temp_url_expires=1323479485&inline
|
||||
|
||||
"""
|
||||
|
||||
__all__ = ['TempURL', 'filter_factory',
|
||||
|
@ -183,14 +192,14 @@ class TempURL(object):
|
|||
:param conf: The configuration dict for the middleware.
|
||||
"""
|
||||
|
||||
def __init__(self, app, conf):
|
||||
def __init__(self, app, conf, methods=('GET', 'HEAD', 'PUT')):
|
||||
#: The next WSGI application/filter in the paste.deploy pipeline.
|
||||
self.app = app
|
||||
#: The filter configuration dict.
|
||||
self.conf = conf
|
||||
|
||||
#: The methods allowed with Temp URLs.
|
||||
self.methods = conf.get('methods', 'GET HEAD PUT').split()
|
||||
self.methods = methods
|
||||
|
||||
headers = DEFAULT_INCOMING_REMOVE_HEADERS
|
||||
if 'incoming_remove_headers' in conf:
|
||||
|
@ -474,5 +483,8 @@ def filter_factory(global_conf, **local_conf):
|
|||
"""Returns the WSGI filter for use with paste.deploy."""
|
||||
conf = global_conf.copy()
|
||||
conf.update(local_conf)
|
||||
register_swift_info('tempurl')
|
||||
return lambda app: TempURL(app, conf)
|
||||
|
||||
methods = conf.get('methods', 'GET HEAD PUT').split()
|
||||
register_swift_info('tempurl', methods=methods)
|
||||
|
||||
return lambda app: TempURL(app, conf, methods=methods)
|
||||
|
|
|
@ -87,3 +87,109 @@ def split_and_validate_path(request, minsegs=1, maxsegs=None,
|
|||
except ValueError as err:
|
||||
raise HTTPBadRequest(body=str(err), request=request,
|
||||
content_type='text/plain')
|
||||
|
||||
|
||||
def is_user_meta(server_type, key):
|
||||
"""
|
||||
Tests if a header key starts with and is longer than the user
|
||||
metadata prefix for given server type.
|
||||
|
||||
:param server_type: type of backend server i.e. [account|container|object]
|
||||
:param key: header key
|
||||
:returns: True if the key satisfies the test, False otherwise
|
||||
"""
|
||||
if len(key) <= 8 + len(server_type):
|
||||
return False
|
||||
return key.lower().startswith(get_user_meta_prefix(server_type))
|
||||
|
||||
|
||||
def is_sys_meta(server_type, key):
|
||||
"""
|
||||
Tests if a header key starts with and is longer than the system
|
||||
metadata prefix for given server type.
|
||||
|
||||
:param server_type: type of backend server i.e. [account|container|object]
|
||||
:param key: header key
|
||||
:returns: True if the key satisfies the test, False otherwise
|
||||
"""
|
||||
if len(key) <= 11 + len(server_type):
|
||||
return False
|
||||
return key.lower().startswith(get_sys_meta_prefix(server_type))
|
||||
|
||||
|
||||
def is_sys_or_user_meta(server_type, key):
|
||||
"""
|
||||
Tests if a header key starts with and is longer than the user or system
|
||||
metadata prefix for given server type.
|
||||
|
||||
:param server_type: type of backend server i.e. [account|container|object]
|
||||
:param key: header key
|
||||
:returns: True if the key satisfies the test, False otherwise
|
||||
"""
|
||||
return is_user_meta(server_type, key) or is_sys_meta(server_type, key)
|
||||
|
||||
|
||||
def strip_user_meta_prefix(server_type, key):
|
||||
"""
|
||||
Removes the user metadata prefix for a given server type from the start
|
||||
of a header key.
|
||||
|
||||
:param server_type: type of backend server i.e. [account|container|object]
|
||||
:param key: header key
|
||||
:returns: stripped header key
|
||||
"""
|
||||
return key[len(get_user_meta_prefix(server_type)):]
|
||||
|
||||
|
||||
def strip_sys_meta_prefix(server_type, key):
|
||||
"""
|
||||
Removes the system metadata prefix for a given server type from the start
|
||||
of a header key.
|
||||
|
||||
:param server_type: type of backend server i.e. [account|container|object]
|
||||
:param key: header key
|
||||
:returns: stripped header key
|
||||
"""
|
||||
return key[len(get_sys_meta_prefix(server_type)):]
|
||||
|
||||
|
||||
def get_user_meta_prefix(server_type):
|
||||
"""
|
||||
Returns the prefix for user metadata headers for given server type.
|
||||
|
||||
This prefix defines the namespace for headers that will be persisted
|
||||
by backend servers.
|
||||
|
||||
:param server_type: type of backend server i.e. [account|container|object]
|
||||
:returns: prefix string for server type's user metadata headers
|
||||
"""
|
||||
return 'x-%s-%s-' % (server_type.lower(), 'meta')
|
||||
|
||||
|
||||
def get_sys_meta_prefix(server_type):
|
||||
"""
|
||||
Returns the prefix for system metadata headers for given server type.
|
||||
|
||||
This prefix defines the namespace for headers that will be persisted
|
||||
by backend servers.
|
||||
|
||||
:param server_type: type of backend server i.e. [account|container|object]
|
||||
:returns: prefix string for server type's system metadata headers
|
||||
"""
|
||||
return 'x-%s-%s-' % (server_type.lower(), 'sysmeta')
|
||||
|
||||
|
||||
def remove_items(headers, condition):
|
||||
"""
|
||||
Removes items from a dict whose keys satisfy
|
||||
the given condition.
|
||||
|
||||
:param headers: a dict of headers
|
||||
:param condition: a function that will be passed the header key as a
|
||||
single argument and should return True if the header is to be removed.
|
||||
:returns: a dict, possibly empty, of headers that have been removed
|
||||
"""
|
||||
removed = {}
|
||||
keys = filter(condition, headers)
|
||||
removed.update((key, headers.pop(key)) for key in keys)
|
||||
return removed
|
||||
|
|
|
@ -760,12 +760,15 @@ class RingBuilder(object):
|
|||
key=lambda x: x['sort_key'])
|
||||
|
||||
tier2devs = defaultdict(list)
|
||||
tier2sort_key = defaultdict(list)
|
||||
tier2sort_key = defaultdict(tuple)
|
||||
tier2dev_sort_key = defaultdict(list)
|
||||
max_tier_depth = 0
|
||||
for dev in available_devs:
|
||||
for tier in tiers_for_dev(dev):
|
||||
dev['tiers'] = tiers_for_dev(dev)
|
||||
for tier in dev['tiers']:
|
||||
tier2devs[tier].append(dev) # <-- starts out sorted!
|
||||
tier2sort_key[tier].append(dev['sort_key'])
|
||||
tier2dev_sort_key[tier].append(dev['sort_key'])
|
||||
tier2sort_key[tier] = dev['sort_key']
|
||||
if len(tier) > max_tier_depth:
|
||||
max_tier_depth = len(tier)
|
||||
|
||||
|
@ -778,10 +781,10 @@ class RingBuilder(object):
|
|||
new_tiers_list = []
|
||||
for tier in tiers_list:
|
||||
child_tiers = list(tier2children_sets[tier])
|
||||
child_tiers.sort(key=lambda t: tier2sort_key[t][-1])
|
||||
child_tiers.sort(key=tier2sort_key.__getitem__)
|
||||
tier2children[tier] = child_tiers
|
||||
tier2children_sort_key[tier] = map(
|
||||
lambda t: tier2sort_key[t][-1], child_tiers)
|
||||
tier2sort_key.__getitem__, child_tiers)
|
||||
new_tiers_list.extend(child_tiers)
|
||||
tiers_list = new_tiers_list
|
||||
depth += 1
|
||||
|
@ -794,7 +797,7 @@ class RingBuilder(object):
|
|||
for replica in self._replicas_for_part(part):
|
||||
if replica not in replace_replicas:
|
||||
dev = self.devs[self._replica2part2dev[replica][part]]
|
||||
for tier in tiers_for_dev(dev):
|
||||
for tier in dev['tiers']:
|
||||
other_replicas[tier] += 1
|
||||
unique_tiers_by_tier_len[len(tier)].add(tier)
|
||||
|
||||
|
@ -828,50 +831,45 @@ class RingBuilder(object):
|
|||
# with the largest sort_key value). This lets us
|
||||
# short-circuit the search while still ensuring we get the
|
||||
# right tier.
|
||||
candidate_tiers = sorted(
|
||||
tier2children[tier],
|
||||
key=lambda tier: tier2devs[tier][-1]['sort_key'],
|
||||
reverse=True)
|
||||
candidates_with_replicas = \
|
||||
unique_tiers_by_tier_len[len(tier) + 1]
|
||||
if len(candidate_tiers) > len(candidates_with_replicas):
|
||||
# There exists at least one tier with 0 other
|
||||
# replicas, so avoid calling the min() below, which is
|
||||
# expensive if you've got thousands of drives.
|
||||
min_replica_count = 0
|
||||
# Find a tier with the minimal replica count and the
|
||||
# hungriest drive among all the tiers with the minimal
|
||||
# replica count.
|
||||
if len(tier2children[tier]) > \
|
||||
len(candidates_with_replicas):
|
||||
# There exists at least one tier with 0 other replicas
|
||||
tier = max((t for t in tier2children[tier]
|
||||
if other_replicas[t] == 0),
|
||||
key=tier2sort_key.__getitem__)
|
||||
else:
|
||||
min_replica_count = min(other_replicas[t]
|
||||
for t in candidate_tiers)
|
||||
# Find the first tier with the minimal replica count.
|
||||
# Since they're sorted, this will also have the hungriest
|
||||
# drive among all the tiers with the minimal replica
|
||||
# count.
|
||||
for t in candidate_tiers:
|
||||
if other_replicas[t] == min_replica_count:
|
||||
tier = t
|
||||
break
|
||||
tier = max(tier2children[tier],
|
||||
key=lambda t: (-other_replicas[t],
|
||||
tier2sort_key[t]))
|
||||
depth += 1
|
||||
dev = tier2devs[tier][-1]
|
||||
dev['parts_wanted'] -= 1
|
||||
dev['parts'] += 1
|
||||
old_sort_key = dev['sort_key']
|
||||
new_sort_key = dev['sort_key'] = self._sort_key_for(dev)
|
||||
for tier in tiers_for_dev(dev):
|
||||
for tier in dev['tiers']:
|
||||
other_replicas[tier] += 1
|
||||
unique_tiers_by_tier_len[len(tier)].add(tier)
|
||||
|
||||
index = bisect.bisect_left(tier2sort_key[tier],
|
||||
index = bisect.bisect_left(tier2dev_sort_key[tier],
|
||||
old_sort_key)
|
||||
tier2devs[tier].pop(index)
|
||||
tier2sort_key[tier].pop(index)
|
||||
tier2dev_sort_key[tier].pop(index)
|
||||
|
||||
new_index = bisect.bisect_left(tier2sort_key[tier],
|
||||
new_index = bisect.bisect_left(tier2dev_sort_key[tier],
|
||||
new_sort_key)
|
||||
tier2devs[tier].insert(new_index, dev)
|
||||
tier2sort_key[tier].insert(new_index, new_sort_key)
|
||||
tier2dev_sort_key[tier].insert(new_index, new_sort_key)
|
||||
|
||||
new_last_sort_key = tier2dev_sort_key[tier][-1]
|
||||
tier2sort_key[tier] = new_last_sort_key
|
||||
|
||||
# Now jiggle tier2children values to keep them sorted
|
||||
new_last_sort_key = tier2sort_key[tier][-1]
|
||||
parent_tier = tier[0:-1]
|
||||
index = bisect.bisect_left(
|
||||
tier2children_sort_key[parent_tier],
|
||||
|
@ -891,19 +889,10 @@ class RingBuilder(object):
|
|||
# Just to save memory and keep from accidental reuse.
|
||||
for dev in self._iter_devs():
|
||||
del dev['sort_key']
|
||||
dev.pop('tiers', None) # May be absent for devices w/o weight
|
||||
|
||||
def _sort_key_for(self, dev):
|
||||
# The maximum value of self.parts is 2^32, which is 9 hex
|
||||
# digits wide (0x100000000). Using a width of 16 here gives us
|
||||
# plenty of breathing room; you'd need more than 2^28 replicas
|
||||
# to overflow it.
|
||||
# Since the sort key is a string and therefore an ascii sort applies,
|
||||
# the maximum_parts_wanted + parts_wanted is used so negative
|
||||
# parts_wanted end up sorted above positive parts_wanted.
|
||||
return '%016x.%04x.%04x' % (
|
||||
(self.parts * self.replicas) + dev['parts_wanted'],
|
||||
random.randint(0, 0xFFFF),
|
||||
dev['id'])
|
||||
return (dev['parts_wanted'], random.randint(0, 0xFFFF), dev['id'])
|
||||
|
||||
def _build_max_replicas_by_tier(self):
|
||||
"""
|
||||
|
|
|
@ -85,20 +85,44 @@ FALLOCATE_RESERVE = 0
|
|||
# Used by hash_path to offer a bit more security when generating hashes for
|
||||
# paths. It simply appends this value to all paths; guessing the hash a path
|
||||
# will end up with would also require knowing this suffix.
|
||||
hash_conf = ConfigParser()
|
||||
HASH_PATH_SUFFIX = ''
|
||||
HASH_PATH_PREFIX = ''
|
||||
if hash_conf.read('/etc/swift/swift.conf'):
|
||||
try:
|
||||
HASH_PATH_SUFFIX = hash_conf.get('swift-hash',
|
||||
'swift_hash_path_suffix')
|
||||
except (NoSectionError, NoOptionError):
|
||||
pass
|
||||
try:
|
||||
HASH_PATH_PREFIX = hash_conf.get('swift-hash',
|
||||
'swift_hash_path_prefix')
|
||||
except (NoSectionError, NoOptionError):
|
||||
pass
|
||||
|
||||
SWIFT_CONF_FILE = '/etc/swift/swift.conf'
|
||||
|
||||
|
||||
class InvalidHashPathConfigError(ValueError):
|
||||
|
||||
def __str__(self):
|
||||
return "[swift-hash]: both swift_hash_path_suffix and " \
|
||||
"swift_hash_path_prefix are missing from %s" % SWIFT_CONF_FILE
|
||||
|
||||
|
||||
def validate_hash_conf():
|
||||
global HASH_PATH_SUFFIX
|
||||
global HASH_PATH_PREFIX
|
||||
if not HASH_PATH_SUFFIX and not HASH_PATH_PREFIX:
|
||||
hash_conf = ConfigParser()
|
||||
if hash_conf.read(SWIFT_CONF_FILE):
|
||||
try:
|
||||
HASH_PATH_SUFFIX = hash_conf.get('swift-hash',
|
||||
'swift_hash_path_suffix')
|
||||
except (NoSectionError, NoOptionError):
|
||||
pass
|
||||
try:
|
||||
HASH_PATH_PREFIX = hash_conf.get('swift-hash',
|
||||
'swift_hash_path_prefix')
|
||||
except (NoSectionError, NoOptionError):
|
||||
pass
|
||||
if not HASH_PATH_SUFFIX and not HASH_PATH_PREFIX:
|
||||
raise InvalidHashPathConfigError()
|
||||
|
||||
|
||||
try:
|
||||
validate_hash_conf()
|
||||
except InvalidHashPathConfigError:
|
||||
# could get monkey patched or lazy loaded
|
||||
pass
|
||||
|
||||
|
||||
def get_hmac(request_method, path, expires, key):
|
||||
|
@ -239,10 +263,10 @@ def noop_libc_function(*args):
|
|||
|
||||
|
||||
def validate_configuration():
|
||||
if not HASH_PATH_SUFFIX and not HASH_PATH_PREFIX:
|
||||
sys.exit("Error: [swift-hash]: both swift_hash_path_suffix "
|
||||
"and swift_hash_path_prefix are missing "
|
||||
"from /etc/swift/swift.conf")
|
||||
try:
|
||||
validate_hash_conf()
|
||||
except InvalidHashPathConfigError as e:
|
||||
sys.exit("Error: %s" % e)
|
||||
|
||||
|
||||
def load_libc_function(func_name, log_error=True):
|
||||
|
@ -1796,8 +1820,7 @@ def affinity_key_function(affinity_str):
|
|||
priority values are what comes after the equals sign.
|
||||
|
||||
If affinity_str is empty or all whitespace, then the resulting function
|
||||
will not alter the ordering of the nodes. However, if affinity_str
|
||||
contains an invalid value, then None is returned.
|
||||
will not alter the ordering of the nodes.
|
||||
|
||||
:param affinity_str: affinity config value, e.g. "r1z2=3"
|
||||
or "r1=1, r2z1=2, r2z2=2"
|
||||
|
@ -2233,8 +2256,8 @@ class ThreadPool(object):
|
|||
try:
|
||||
result = func(*args, **kwargs)
|
||||
result_queue.put((ev, True, result))
|
||||
except BaseException as err:
|
||||
result_queue.put((ev, False, err))
|
||||
except BaseException:
|
||||
result_queue.put((ev, False, sys.exc_info()))
|
||||
finally:
|
||||
work_queue.task_done()
|
||||
os.write(self.wpipe, self.BYTE)
|
||||
|
@ -2264,7 +2287,7 @@ class ThreadPool(object):
|
|||
if success:
|
||||
ev.send(result)
|
||||
else:
|
||||
ev.send_exception(result)
|
||||
ev.send_exception(*result)
|
||||
finally:
|
||||
queue.task_done()
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
"""WSGI tools for use with swift."""
|
||||
|
||||
import errno
|
||||
import inspect
|
||||
import os
|
||||
import signal
|
||||
import time
|
||||
|
@ -109,7 +110,6 @@ def wrap_conf_type(f):
|
|||
|
||||
|
||||
appconfig = wrap_conf_type(loadwsgi.appconfig)
|
||||
loadapp = wrap_conf_type(loadwsgi.loadapp)
|
||||
|
||||
|
||||
def monkey_patch_mimetools():
|
||||
|
@ -197,6 +197,126 @@ class RestrictedGreenPool(GreenPool):
|
|||
self.waitall()
|
||||
|
||||
|
||||
class PipelineWrapper(object):
|
||||
"""
|
||||
This class provides a number of utility methods for
|
||||
modifying the composition of a wsgi pipeline.
|
||||
"""
|
||||
|
||||
def __init__(self, context):
|
||||
self.context = context
|
||||
|
||||
def __contains__(self, entry_point_name):
|
||||
try:
|
||||
self.index(entry_point_name)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
def startswith(self, entry_point_name):
|
||||
"""
|
||||
Tests if the pipeline starts with the given entry point name.
|
||||
|
||||
:param entry_point_name: entry point of middleware or app (Swift only)
|
||||
|
||||
:returns: True if entry_point_name is first in pipeline, False
|
||||
otherwise
|
||||
"""
|
||||
try:
|
||||
first_ctx = self.context.filter_contexts[0]
|
||||
except IndexError:
|
||||
first_ctx = self.context.app_context
|
||||
return first_ctx.entry_point_name == entry_point_name
|
||||
|
||||
def _format_for_display(self, ctx):
|
||||
if ctx.entry_point_name:
|
||||
return ctx.entry_point_name
|
||||
elif inspect.isfunction(ctx.object):
|
||||
# ctx.object is a reference to the actual filter_factory
|
||||
# function, so we pretty-print that. It's not the nice short
|
||||
# entry point, but it beats "<unknown>".
|
||||
#
|
||||
# These happen when, instead of something like
|
||||
#
|
||||
# use = egg:swift#healthcheck
|
||||
#
|
||||
# you have something like this:
|
||||
#
|
||||
# paste.filter_factory = \
|
||||
# swift.common.middleware.healthcheck:filter_factory
|
||||
return "%s:%s" % (inspect.getmodule(ctx.object).__name__,
|
||||
ctx.object.__name__)
|
||||
else:
|
||||
# No idea what this is
|
||||
return "<unknown context>"
|
||||
|
||||
def __str__(self):
|
||||
parts = [self._format_for_display(ctx)
|
||||
for ctx in self.context.filter_contexts]
|
||||
parts.append(self._format_for_display(self.context.app_context))
|
||||
return " ".join(parts)
|
||||
|
||||
def create_filter(self, entry_point_name):
|
||||
"""
|
||||
Creates a context for a filter that can subsequently be added
|
||||
to a pipeline context.
|
||||
|
||||
:param entry_point_name: entry point of the middleware (Swift only)
|
||||
|
||||
:returns: a filter context
|
||||
"""
|
||||
spec = 'egg:swift#' + entry_point_name
|
||||
ctx = loadwsgi.loadcontext(loadwsgi.FILTER, spec,
|
||||
global_conf=self.context.global_conf)
|
||||
ctx.protocol = 'paste.filter_factory'
|
||||
return ctx
|
||||
|
||||
def index(self, entry_point_name):
|
||||
"""
|
||||
Returns the first index of the given entry point name in the pipeline.
|
||||
|
||||
Raises ValueError if the given module is not in the pipeline.
|
||||
"""
|
||||
for i, ctx in enumerate(self.context.filter_contexts):
|
||||
if ctx.entry_point_name == entry_point_name:
|
||||
return i
|
||||
raise ValueError("%s is not in pipeline" % (entry_point_name,))
|
||||
|
||||
def insert_filter(self, ctx, index=0):
|
||||
"""
|
||||
Inserts a filter module into the pipeline context.
|
||||
|
||||
:param ctx: the context to be inserted
|
||||
:param index: (optional) index at which filter should be
|
||||
inserted in the list of pipeline filters. Default
|
||||
is 0, which means the start of the pipeline.
|
||||
"""
|
||||
self.context.filter_contexts.insert(index, ctx)
|
||||
|
||||
|
||||
def loadcontext(object_type, uri, name=None, relative_to=None,
|
||||
global_conf=None):
|
||||
add_conf_type = wrap_conf_type(lambda x: x)
|
||||
return loadwsgi.loadcontext(object_type, add_conf_type(uri), name=name,
|
||||
relative_to=relative_to,
|
||||
global_conf=global_conf)
|
||||
|
||||
|
||||
def loadapp(conf_file, global_conf):
|
||||
"""
|
||||
Loads a context from a config file, and if the context is a pipeline
|
||||
then presents the app with the opportunity to modify the pipeline.
|
||||
"""
|
||||
ctx = loadcontext(loadwsgi.APP, conf_file, global_conf=global_conf)
|
||||
if ctx.object_type.name == 'pipeline':
|
||||
# give app the opportunity to modify the pipeline context
|
||||
app = ctx.app_context.create()
|
||||
func = getattr(app, 'modify_wsgi_pipeline', None)
|
||||
if func:
|
||||
func(PipelineWrapper(ctx))
|
||||
return ctx.create()
|
||||
|
||||
|
||||
def run_server(conf, logger, sock, global_conf=None):
|
||||
# Ensure TZ environment variable exists to avoid stat('/etc/localtime') on
|
||||
# some platforms. This locks in reported times to the timezone in which
|
||||
|
@ -241,6 +361,7 @@ def run_wsgi(conf_path, app_section, *args, **kwargs):
|
|||
|
||||
:param conf_path: Path to paste.deploy style configuration file/directory
|
||||
:param app_section: App name from conf file to load config from
|
||||
:returns: 0 if successful, nonzero otherwise
|
||||
"""
|
||||
# Load configuration, Set logger and Load request processor
|
||||
try:
|
||||
|
@ -248,7 +369,7 @@ def run_wsgi(conf_path, app_section, *args, **kwargs):
|
|||
_initrp(conf_path, app_section, *args, **kwargs)
|
||||
except ConfigFileError as e:
|
||||
print e
|
||||
return
|
||||
return 1
|
||||
|
||||
# bind to address and port
|
||||
sock = get_socket(conf, default_port=kwargs.get('default_port', 8080))
|
||||
|
@ -273,7 +394,7 @@ def run_wsgi(conf_path, app_section, *args, **kwargs):
|
|||
# Useful for profiling [no forks].
|
||||
if worker_count == 0:
|
||||
run_server(conf, logger, sock, global_conf=global_conf)
|
||||
return
|
||||
return 0
|
||||
|
||||
def kill_children(*args):
|
||||
"""Kills the entire process group."""
|
||||
|
@ -300,7 +421,7 @@ def run_wsgi(conf_path, app_section, *args, **kwargs):
|
|||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
run_server(conf, logger, sock)
|
||||
logger.notice('Child %d exiting normally' % os.getpid())
|
||||
return
|
||||
return 0
|
||||
else:
|
||||
logger.notice('Started child %s' % pid)
|
||||
children.append(pid)
|
||||
|
@ -318,6 +439,7 @@ def run_wsgi(conf_path, app_section, *args, **kwargs):
|
|||
greenio.shutdown_safe(sock)
|
||||
sock.close()
|
||||
logger.notice('Exited')
|
||||
return 0
|
||||
|
||||
|
||||
class ConfigFileError(Exception):
|
||||
|
|
|
@ -26,7 +26,7 @@ import swift.common.db
|
|||
from swift.container.backend import ContainerBroker
|
||||
from swift.common.db import DatabaseAlreadyExists
|
||||
from swift.common.request_helpers import get_param, get_listing_content_type, \
|
||||
split_and_validate_path
|
||||
split_and_validate_path, is_sys_or_user_meta
|
||||
from swift.common.utils import get_logger, hash_path, public, \
|
||||
normalize_timestamp, storage_directory, validate_sync_to, \
|
||||
config_true_value, json, timing_stats, replication, \
|
||||
|
@ -57,7 +57,7 @@ class ContainerController(object):
|
|||
|
||||
def __init__(self, conf, logger=None, storage_policies=None):
|
||||
self.logger = logger or get_logger(conf, log_route='container-server')
|
||||
self.root = conf.get('devices', '/srv/node/')
|
||||
self.root = conf.get('devices', '/srv/node')
|
||||
self.mount_check = config_true_value(conf.get('mount_check', 'true'))
|
||||
self.node_timeout = int(conf.get('node_timeout', 3))
|
||||
self.conn_timeout = float(conf.get('conn_timeout', 0.5))
|
||||
|
@ -287,7 +287,7 @@ class ContainerController(object):
|
|||
(key, (value, timestamp))
|
||||
for key, value in req.headers.iteritems()
|
||||
if key.lower() in self.save_headers or
|
||||
key.lower().startswith('x-container-meta-'))
|
||||
is_sys_or_user_meta('container', key))
|
||||
if POLICY_INDEX in metadata:
|
||||
""" policy specified and pre-validated """
|
||||
requested_policy_index = metadata[POLICY_INDEX][0]
|
||||
|
@ -346,7 +346,7 @@ class ContainerController(object):
|
|||
(key, value)
|
||||
for key, (value, timestamp) in broker.metadata.iteritems()
|
||||
if value != '' and (key.lower() in self.save_headers or
|
||||
key.lower().startswith('x-container-meta-')))
|
||||
is_sys_or_user_meta('container', key)))
|
||||
headers['Content-Type'] = out_content_type
|
||||
return HTTPNoContent(request=req, headers=headers, charset='utf-8')
|
||||
|
||||
|
@ -413,7 +413,7 @@ class ContainerController(object):
|
|||
}
|
||||
for key, (value, timestamp) in broker.metadata.iteritems():
|
||||
if value and (key.lower() in self.save_headers or
|
||||
key.lower().startswith('x-container-meta-')):
|
||||
is_sys_or_user_meta('container', key)):
|
||||
resp_headers[key] = value
|
||||
ret = Response(request=req, headers=resp_headers,
|
||||
content_type=out_content_type, charset='utf-8')
|
||||
|
@ -497,7 +497,7 @@ class ContainerController(object):
|
|||
metadata.update(
|
||||
(key, (value, timestamp)) for key, value in req.headers.iteritems()
|
||||
if key.lower() in self.save_headers or
|
||||
key.lower().startswith('x-container-meta-'))
|
||||
is_sys_or_user_meta('container', key))
|
||||
if metadata:
|
||||
""" make sure the policy is not being updated """
|
||||
if POLICY_INDEX in metadata and POLICY_INDEX in broker.metadata:
|
||||
|
|
|
@ -22,10 +22,10 @@ from eventlet import sleep, Timeout
|
|||
|
||||
import swift.common.db
|
||||
from swift.container import server as container_server
|
||||
from swiftclient import ClientException, delete_object, put_object, \
|
||||
quote
|
||||
from swiftclient import delete_object, put_object, quote
|
||||
from swift.container.backend import ContainerBroker
|
||||
from swift.common.direct_client import direct_get_object
|
||||
from swift.common.exceptions import ClientException
|
||||
from swift.common.ring import Ring
|
||||
from swift.common.utils import audit_location_generator, get_logger, \
|
||||
hash_path, config_true_value, validate_sync_to, whataremyips, FileLikeIter
|
||||
|
|
|
@ -70,7 +70,7 @@ METADATA_KEY = 'user.swift.metadata'
|
|||
DATAFILE_SYSTEM_META = set('content-length content-type deleted etag'.split())
|
||||
|
||||
# XXX: fixme, this needs a bit of discussion wrt support of
|
||||
# multiple policyh types and the impact on replicator, auditor, etc
|
||||
# multiple policy types and the impact on replicator, auditor, etc.
|
||||
# for now this works
|
||||
DATADIR_REPL = storage_policy.get_by_index(0).data_dir
|
||||
ASYNCDIR = 'async_pending'
|
||||
|
@ -135,6 +135,53 @@ def quarantine_renamer(device_path, corrupted_file_path):
|
|||
return to_dir
|
||||
|
||||
|
||||
def get_ondisk_files(files, datadir):
|
||||
"""
|
||||
Given a simple list of files names, determine the files to use.
|
||||
|
||||
:params files: simple set of files as a python list
|
||||
:params datadir: directory name files are from for convenience
|
||||
:returns: a tuple of data, meta and ts (tombstone) files, in one of
|
||||
two states:
|
||||
|
||||
* ts_file is not None, data_file is None, meta_file is None
|
||||
|
||||
object is considered deleted
|
||||
|
||||
* data_file is not None, ts_file is None
|
||||
|
||||
object exists, and optionally has fast-POST metadata
|
||||
"""
|
||||
files.sort(reverse=True)
|
||||
data_file = meta_file = ts_file = None
|
||||
for afile in files:
|
||||
assert ts_file is None, "On-disk file search loop" \
|
||||
" continuing after tombstone, %s, encountered" % ts_file
|
||||
assert data_file is None, "On-disk file search loop" \
|
||||
" continuing after data file, %s, encountered" % data_file
|
||||
if afile.endswith('.ts'):
|
||||
meta_file = None
|
||||
ts_file = join(datadir, afile)
|
||||
break
|
||||
if afile.endswith('.meta') and not meta_file:
|
||||
meta_file = join(datadir, afile)
|
||||
# NOTE: this does not exit this loop, since a fast-POST
|
||||
# operation just updates metadata, writing one or more
|
||||
# .meta files, the data file will have an older timestamp,
|
||||
# so we keep looking.
|
||||
continue
|
||||
if afile.endswith('.data'):
|
||||
data_file = join(datadir, afile)
|
||||
break
|
||||
assert ((data_file is None and meta_file is None and ts_file is None)
|
||||
or (ts_file is not None and data_file is None
|
||||
and meta_file is None)
|
||||
or (data_file is not None and ts_file is None)), \
|
||||
"On-disk file search algorithm contract is broken: data_file:" \
|
||||
" %s, meta_file: %s, ts_file: %s" % (data_file, meta_file, ts_file)
|
||||
return data_file, meta_file, ts_file
|
||||
|
||||
|
||||
def hash_cleanup_listdir(hsh_path, reclaim_age=ONE_WEEK):
|
||||
"""
|
||||
List contents of a hash directory and clean up any old files.
|
||||
|
@ -153,18 +200,13 @@ def hash_cleanup_listdir(hsh_path, reclaim_age=ONE_WEEK):
|
|||
files.remove(files[0])
|
||||
elif files:
|
||||
files.sort(reverse=True)
|
||||
meta = data = tomb = None
|
||||
data_file, meta_file, ts_file = get_ondisk_files(files, '')
|
||||
newest_file = data_file or ts_file
|
||||
for filename in list(files):
|
||||
if not meta and filename.endswith('.meta'):
|
||||
meta = filename
|
||||
if not data and filename.endswith('.data'):
|
||||
data = filename
|
||||
if not tomb and filename.endswith('.ts'):
|
||||
tomb = filename
|
||||
if (filename < tomb or # any file older than tomb
|
||||
filename < data or # any file older than data
|
||||
(filename.endswith('.meta') and
|
||||
filename < meta)): # old meta
|
||||
if ((filename < newest_file)
|
||||
or (meta_file
|
||||
and filename.endswith('.meta')
|
||||
and filename < meta_file)):
|
||||
os.unlink(join(hsh_path, filename))
|
||||
files.remove(filename)
|
||||
return files
|
||||
|
@ -381,7 +423,7 @@ class DiskFileManager(object):
|
|||
"""
|
||||
def __init__(self, conf, logger):
|
||||
self.logger = logger
|
||||
self.devices = conf.get('devices', '/srv/node/')
|
||||
self.devices = conf.get('devices', '/srv/node')
|
||||
self.disk_chunk_size = int(conf.get('disk_chunk_size', 65536))
|
||||
self.keep_cache_size = int(conf.get('keep_cache_size', 5242880))
|
||||
self.bytes_per_sync = int(conf.get('mb_per_sync', 512)) * 1024 * 1024
|
||||
|
@ -685,8 +727,6 @@ class DiskFileWriter(object):
|
|||
:param metadata: dictionary of metadata to be associated with the
|
||||
object
|
||||
"""
|
||||
if not self._tmppath:
|
||||
raise ValueError("tmppath is unusable.")
|
||||
timestamp = normalize_timestamp(metadata['X-Timestamp'])
|
||||
metadata['name'] = self._name
|
||||
target_path = join(self._datadir, timestamp + self._extension)
|
||||
|
@ -817,10 +857,7 @@ class DiskFileReader(object):
|
|||
yield chunk
|
||||
finally:
|
||||
self._suppress_file_closing = False
|
||||
try:
|
||||
self.close()
|
||||
except DiskFileQuarantined:
|
||||
pass
|
||||
self.close()
|
||||
|
||||
def _drop_cache(self, fd, offset, length):
|
||||
"""Method for no-oping buffer cache drop method."""
|
||||
|
@ -830,6 +867,8 @@ class DiskFileReader(object):
|
|||
def _quarantine(self, msg):
|
||||
self._quarantined_dir = self._threadpool.run_in_thread(
|
||||
quarantine_renamer, self._device_path, self._data_file)
|
||||
self._logger.warn("Quarantined object %s: %s" % (
|
||||
self._data_file, msg))
|
||||
self._logger.increment('quarantines')
|
||||
self._quarantine_hook(msg)
|
||||
|
||||
|
@ -1028,6 +1067,8 @@ class DiskFile(object):
|
|||
"""
|
||||
self._quarantined_dir = self._threadpool.run_in_thread(
|
||||
quarantine_renamer, self._device_path, data_file)
|
||||
self._logger.warn("Quarantined object %s: %s" % (
|
||||
data_file, msg))
|
||||
self._logger.increment('quarantines')
|
||||
return DiskFileQuarantined(msg)
|
||||
|
||||
|
@ -1052,9 +1093,8 @@ class DiskFile(object):
|
|||
|
||||
object exists, and optionally has fast-POST metadata
|
||||
"""
|
||||
data_file = meta_file = ts_file = None
|
||||
try:
|
||||
files = sorted(os.listdir(self._datadir), reverse=True)
|
||||
files = os.listdir(self._datadir)
|
||||
except OSError as err:
|
||||
if err.errno == errno.ENOTDIR:
|
||||
# If there's a file here instead of a directory, quarantine
|
||||
|
@ -1069,33 +1109,10 @@ class DiskFile(object):
|
|||
raise DiskFileError(
|
||||
"Error listing directory %s: %s" % (self._datadir, err))
|
||||
# The data directory does not exist, so the object cannot exist.
|
||||
fileset = (None, None, None)
|
||||
else:
|
||||
for afile in files:
|
||||
assert ts_file is None, "On-disk file search loop" \
|
||||
" continuing after tombstone, %s, encountered" % ts_file
|
||||
assert data_file is None, "On-disk file search loop" \
|
||||
" continuing after data file, %s, encountered" % data_file
|
||||
if afile.endswith('.ts'):
|
||||
meta_file = None
|
||||
ts_file = join(self._datadir, afile)
|
||||
break
|
||||
if afile.endswith('.meta') and not meta_file:
|
||||
meta_file = join(self._datadir, afile)
|
||||
# NOTE: this does not exit this loop, since a fast-POST
|
||||
# operation just updates metadata, writing one or more
|
||||
# .meta files, the data file will have an older timestamp,
|
||||
# so we keep looking.
|
||||
continue
|
||||
if afile.endswith('.data'):
|
||||
data_file = join(self._datadir, afile)
|
||||
break
|
||||
assert ((data_file is None and meta_file is None and ts_file is None)
|
||||
or (ts_file is not None and data_file is None
|
||||
and meta_file is None)
|
||||
or (data_file is not None and ts_file is None)), \
|
||||
"On-disk file search algorithm contract is broken: data_file:" \
|
||||
" %s, meta_file: %s, ts_file: %s" % (data_file, meta_file, ts_file)
|
||||
return data_file, meta_file, ts_file
|
||||
fileset = get_ondisk_files(files, self._datadir)
|
||||
return fileset
|
||||
|
||||
def _construct_exception_from_ts_file(self, ts_file):
|
||||
"""
|
||||
|
|
|
@ -21,6 +21,7 @@ import multiprocessing
|
|||
import time
|
||||
import traceback
|
||||
import socket
|
||||
import math
|
||||
from datetime import datetime
|
||||
from swift import gettext_ as _
|
||||
from hashlib import md5
|
||||
|
@ -37,7 +38,7 @@ from swift.common.exceptions import ConnectionTimeout, DiskFileQuarantined, \
|
|||
DiskFileDeviceUnavailable, DiskFileExpired
|
||||
from swift.obj import ssync_receiver
|
||||
from swift.common.http import is_success
|
||||
from swift.common.request_helpers import split_and_validate_path
|
||||
from swift.common.request_helpers import split_and_validate_path, is_user_meta
|
||||
from swift.common.swob import HTTPAccepted, HTTPBadRequest, HTTPCreated, \
|
||||
HTTPInternalServerError, HTTPNoContent, HTTPNotFound, HTTPNotModified, \
|
||||
HTTPPreconditionFailed, HTTPRequestTimeout, HTTPUnprocessableEntity, \
|
||||
|
@ -338,7 +339,7 @@ class ObjectController(object):
|
|||
return HTTPConflict(request=request)
|
||||
metadata = {'X-Timestamp': request.headers['x-timestamp']}
|
||||
metadata.update(val for val in request.headers.iteritems()
|
||||
if val[0].startswith('X-Object-Meta-'))
|
||||
if is_user_meta('object', val[0]))
|
||||
for header_key in self.allowed_headers:
|
||||
if header_key in request.headers:
|
||||
header_caps = header_key.title()
|
||||
|
@ -351,12 +352,8 @@ class ObjectController(object):
|
|||
if orig_delete_at:
|
||||
self.delete_at_update('DELETE', orig_delete_at, account,
|
||||
container, obj, request, device)
|
||||
try:
|
||||
disk_file.write_metadata(metadata)
|
||||
except (DiskFileNotExist, DiskFileQuarantined):
|
||||
return HTTPNotFound(request=request)
|
||||
else:
|
||||
return HTTPAccepted(request=request)
|
||||
disk_file.write_metadata(metadata)
|
||||
return HTTPAccepted(request=request)
|
||||
|
||||
@public
|
||||
@timing_stats()
|
||||
|
@ -426,8 +423,7 @@ class ObjectController(object):
|
|||
'Content-Length': str(upload_size),
|
||||
}
|
||||
metadata.update(val for val in request.headers.iteritems()
|
||||
if val[0].lower().startswith('x-object-meta-')
|
||||
and len(val[0]) > 14)
|
||||
if is_user_meta('object', val[0]))
|
||||
for header_key in (
|
||||
request.headers.get('X-Backend-Replication-Headers') or
|
||||
self.allowed_headers):
|
||||
|
@ -446,16 +442,14 @@ class ObjectController(object):
|
|||
self.delete_at_update(
|
||||
'DELETE', orig_delete_at, account, container, obj,
|
||||
request, device)
|
||||
if not orig_timestamp or \
|
||||
orig_timestamp < request.headers['x-timestamp']:
|
||||
self.container_update(
|
||||
'PUT', account, container, obj, request,
|
||||
HeaderKeyDict({
|
||||
'x-size': metadata['Content-Length'],
|
||||
'x-content-type': metadata['Content-Type'],
|
||||
'x-timestamp': metadata['X-Timestamp'],
|
||||
'x-etag': metadata['ETag']}),
|
||||
device)
|
||||
self.container_update(
|
||||
'PUT', account, container, obj, request,
|
||||
HeaderKeyDict({
|
||||
'x-size': metadata['Content-Length'],
|
||||
'x-content-type': metadata['Content-Type'],
|
||||
'x-timestamp': metadata['X-Timestamp'],
|
||||
'x-etag': metadata['ETag']}),
|
||||
device)
|
||||
return HTTPCreated(request=request, etag=etag)
|
||||
|
||||
@public
|
||||
|
@ -499,7 +493,7 @@ class ObjectController(object):
|
|||
except (OverflowError, ValueError):
|
||||
# catches timestamps before the epoch
|
||||
return HTTPPreconditionFailed(request=request)
|
||||
if if_modified_since and file_x_ts_utc < if_modified_since:
|
||||
if if_modified_since and file_x_ts_utc <= if_modified_since:
|
||||
return HTTPNotModified(request=request)
|
||||
keep_cache = (self.keep_cache_private or
|
||||
('X-Auth-Token' not in request.headers and
|
||||
|
@ -510,11 +504,11 @@ class ObjectController(object):
|
|||
response.headers['Content-Type'] = metadata.get(
|
||||
'Content-Type', 'application/octet-stream')
|
||||
for key, value in metadata.iteritems():
|
||||
if key.lower().startswith('x-object-meta-') or \
|
||||
if is_user_meta('object', key) or \
|
||||
key.lower() in self.allowed_headers:
|
||||
response.headers[key] = value
|
||||
response.etag = metadata['ETag']
|
||||
response.last_modified = file_x_ts_flt
|
||||
response.last_modified = math.ceil(file_x_ts_flt)
|
||||
response.content_length = obj_size
|
||||
try:
|
||||
response.content_encoding = metadata[
|
||||
|
@ -551,12 +545,12 @@ class ObjectController(object):
|
|||
response.headers['Content-Type'] = metadata.get(
|
||||
'Content-Type', 'application/octet-stream')
|
||||
for key, value in metadata.iteritems():
|
||||
if key.lower().startswith('x-object-meta-') or \
|
||||
if is_user_meta('object', key) or \
|
||||
key.lower() in self.allowed_headers:
|
||||
response.headers[key] = value
|
||||
response.etag = metadata['ETag']
|
||||
ts = metadata['X-Timestamp']
|
||||
response.last_modified = float(ts)
|
||||
response.last_modified = math.ceil(float(ts))
|
||||
# Needed for container sync feature
|
||||
response.headers['X-Timestamp'] = ts
|
||||
response.content_length = int(metadata['Content-Length'])
|
||||
|
@ -601,13 +595,21 @@ class ObjectController(object):
|
|||
response_class = HTTPNoContent
|
||||
else:
|
||||
response_class = HTTPConflict
|
||||
if 'x-if-delete-at' in request.headers and \
|
||||
int(request.headers['x-if-delete-at']) != \
|
||||
int(orig_metadata.get('X-Delete-At') or 0):
|
||||
return HTTPPreconditionFailed(
|
||||
request=request,
|
||||
body='X-If-Delete-At and X-Delete-At do not match')
|
||||
orig_delete_at = int(orig_metadata.get('X-Delete-At') or 0)
|
||||
try:
|
||||
req_if_delete_at_val = request.headers['x-if-delete-at']
|
||||
req_if_delete_at = int(req_if_delete_at_val)
|
||||
except KeyError:
|
||||
pass
|
||||
except ValueError:
|
||||
return HTTPBadRequest(
|
||||
request=request,
|
||||
body='Bad X-If-Delete-At header value')
|
||||
else:
|
||||
if orig_delete_at != req_if_delete_at:
|
||||
return HTTPPreconditionFailed(
|
||||
request=request,
|
||||
body='X-If-Delete-At and X-Delete-At do not match')
|
||||
if orig_delete_at:
|
||||
self.delete_at_update('DELETE', orig_delete_at, account,
|
||||
container, obj, request, device)
|
||||
|
@ -642,7 +644,8 @@ class ObjectController(object):
|
|||
@public
|
||||
@replication
|
||||
@timing_stats(sample_rate=0.1)
|
||||
def REPLICATION(self, request):
|
||||
def REPLICATION(self, request, obj_dir):
|
||||
# TODO(torgomatic): do something with obj_dir
|
||||
return Response(app_iter=ssync_receiver.Receiver(self, request)())
|
||||
|
||||
def __call__(self, env, start_response):
|
||||
|
|
|
@ -65,7 +65,10 @@ class Receiver(object):
|
|||
self.device = None
|
||||
self.partition = None
|
||||
self.fp = None
|
||||
self.disconnect = False
|
||||
# We default to dropping the connection in case there is any exception
|
||||
# raised during processing because otherwise the sender could send for
|
||||
# quite some time before realizing it was all in vain.
|
||||
self.disconnect = True
|
||||
|
||||
def __call__(self):
|
||||
"""
|
||||
|
@ -100,6 +103,9 @@ class Receiver(object):
|
|||
yield data
|
||||
for data in self.updates():
|
||||
yield data
|
||||
# We didn't raise an exception, so end the request
|
||||
# normally.
|
||||
self.disconnect = False
|
||||
finally:
|
||||
if self.app.replication_semaphore:
|
||||
self.app.replication_semaphore.release()
|
||||
|
@ -288,10 +294,6 @@ class Receiver(object):
|
|||
raise Exception('Looking for :UPDATES: START got %r' % line[:1024])
|
||||
successes = 0
|
||||
failures = 0
|
||||
# We default to dropping the connection in case there is any exception
|
||||
# raised during processing because otherwise the sender could send for
|
||||
# quite some time before realizing it was all in vain.
|
||||
self.disconnect = True
|
||||
while True:
|
||||
with exceptions.MessageTimeout(
|
||||
self.app.client_timeout, 'updates line'):
|
||||
|
@ -376,8 +378,6 @@ class Receiver(object):
|
|||
raise swob.HTTPInternalServerError(
|
||||
'ERROR: With :UPDATES: %d failures to %d successes' %
|
||||
(failures, successes))
|
||||
# We didn't raise an exception, so end the request normally.
|
||||
self.disconnect = False
|
||||
yield ':UPDATES: START\r\n'
|
||||
yield ':UPDATES: END\r\n'
|
||||
for data in self._ensure_flush():
|
||||
|
|
|
@ -48,7 +48,11 @@ from swift.common.http import is_informational, is_success, is_redirection, \
|
|||
HTTP_INSUFFICIENT_STORAGE, HTTP_UNAUTHORIZED
|
||||
from swift.common.swob import Request, Response, HeaderKeyDict, Range, \
|
||||
HTTPException, HTTPRequestedRangeNotSatisfiable
|
||||
from swift.common.request_helpers import strip_sys_meta_prefix, \
|
||||
strip_user_meta_prefix, is_user_meta, is_sys_meta, is_sys_or_user_meta
|
||||
import swift.common.storage_policy as storage_policy
|
||||
|
||||
|
||||
POLICY_INDEX = storage_policy.POLICY_INDEX
|
||||
POLICY = storage_policy.POLICY
|
||||
|
||||
|
@ -109,11 +113,32 @@ def get_container_memcache_key(account, container):
|
|||
return cache_key
|
||||
|
||||
|
||||
def _prep_headers_to_info(headers, server_type):
|
||||
"""
|
||||
Helper method that iterates once over a dict of headers,
|
||||
converting all keys to lower case and separating
|
||||
into subsets containing user metadata, system metadata
|
||||
and other headers.
|
||||
"""
|
||||
meta = {}
|
||||
sysmeta = {}
|
||||
other = {}
|
||||
for key, val in dict(headers).iteritems():
|
||||
lkey = key.lower()
|
||||
if is_user_meta(server_type, lkey):
|
||||
meta[strip_user_meta_prefix(server_type, lkey)] = val
|
||||
elif is_sys_meta(server_type, lkey):
|
||||
sysmeta[strip_sys_meta_prefix(server_type, lkey)] = val
|
||||
else:
|
||||
other[lkey] = val
|
||||
return other, meta, sysmeta
|
||||
|
||||
|
||||
def headers_to_account_info(headers, status_int=HTTP_OK):
|
||||
"""
|
||||
Construct a cacheable dict of account info based on response headers.
|
||||
"""
|
||||
headers = dict((k.lower(), v) for k, v in dict(headers).iteritems())
|
||||
headers, meta, sysmeta = _prep_headers_to_info(headers, 'account')
|
||||
return {
|
||||
'status': status_int,
|
||||
# 'container_count' anomaly:
|
||||
|
@ -123,9 +148,8 @@ def headers_to_account_info(headers, status_int=HTTP_OK):
|
|||
'container_count': headers.get('x-account-container-count'),
|
||||
'total_object_count': headers.get('x-account-object-count'),
|
||||
'bytes': headers.get('x-account-bytes-used'),
|
||||
'meta': dict((key[15:], value)
|
||||
for key, value in headers.iteritems()
|
||||
if key.startswith('x-account-meta-'))
|
||||
'meta': meta,
|
||||
'sysmeta': sysmeta
|
||||
}
|
||||
|
||||
|
||||
|
@ -133,7 +157,7 @@ def headers_to_container_info(headers, status_int=HTTP_OK):
|
|||
"""
|
||||
Construct a cacheable dict of container info based on response headers.
|
||||
"""
|
||||
headers = dict((k.lower(), v) for k, v in dict(headers).iteritems())
|
||||
headers, meta, sysmeta = _prep_headers_to_info(headers, 'container')
|
||||
# assure we have a policy 0 set when the policy header doesn't exist
|
||||
if headers.get(POLICY_INDEX.lower()) is None:
|
||||
headers[POLICY_INDEX.lower()] = '0'
|
||||
|
@ -147,16 +171,12 @@ def headers_to_container_info(headers, status_int=HTTP_OK):
|
|||
'versions': headers.get('x-versions-location'),
|
||||
'storage_policy': headers.get(POLICY_INDEX.lower()),
|
||||
'cors': {
|
||||
'allow_origin': headers.get(
|
||||
'x-container-meta-access-control-allow-origin'),
|
||||
'expose_headers': headers.get(
|
||||
'x-container-meta-access-control-expose-headers'),
|
||||
'max_age': headers.get(
|
||||
'x-container-meta-access-control-max-age')
|
||||
'allow_origin': meta.get('access-control-allow-origin'),
|
||||
'expose_headers': meta.get('access-control-expose-headers'),
|
||||
'max_age': meta.get('access-control-max-age')
|
||||
},
|
||||
'meta': dict((key[17:], value)
|
||||
for key, value in headers.iteritems()
|
||||
if key.startswith('x-container-meta-'))
|
||||
'meta': meta,
|
||||
'sysmeta': sysmeta
|
||||
}
|
||||
|
||||
|
||||
|
@ -164,14 +184,12 @@ def headers_to_object_info(headers, status_int=HTTP_OK):
|
|||
"""
|
||||
Construct a cacheable dict of object info based on response headers.
|
||||
"""
|
||||
headers = dict((k.lower(), v) for k, v in dict(headers).iteritems())
|
||||
headers, meta, sysmeta = _prep_headers_to_info(headers, 'object')
|
||||
info = {'status': status_int,
|
||||
'length': headers.get('content-length'),
|
||||
'type': headers.get('content-type'),
|
||||
'etag': headers.get('etag'),
|
||||
'meta': dict((key[14:], value)
|
||||
for key, value in headers.iteritems()
|
||||
if key.startswith('x-object-meta-'))
|
||||
'meta': meta
|
||||
}
|
||||
return info
|
||||
|
||||
|
@ -427,6 +445,9 @@ def _get_info_cache(app, env, account, container=None):
|
|||
if memcache:
|
||||
info = memcache.get(cache_key)
|
||||
if info:
|
||||
for key in info:
|
||||
if isinstance(info[key], unicode):
|
||||
info[key] = info[key].encode("utf-8")
|
||||
env[env_key] = info
|
||||
return info
|
||||
return None
|
||||
|
@ -635,7 +656,7 @@ class GetOrHeadHandler(object):
|
|||
bytes_read_from_source += len(chunk)
|
||||
except ChunkReadTimeout:
|
||||
exc_type, exc_value, exc_traceback = exc_info()
|
||||
if self.newest:
|
||||
if self.newest or self.server_type != 'Object':
|
||||
raise exc_type, exc_value, exc_traceback
|
||||
try:
|
||||
self.fast_forward(bytes_read_from_source)
|
||||
|
@ -858,11 +879,10 @@ class Controller(object):
|
|||
if k.lower().startswith(x_remove) or
|
||||
k.lower() in self._x_remove_headers())
|
||||
|
||||
x_meta = 'x-%s-meta-' % st
|
||||
dst_headers.update((k.lower(), v)
|
||||
for k, v in src_headers.iteritems()
|
||||
if k.lower() in self.pass_through_headers or
|
||||
k.lower().startswith(x_meta))
|
||||
is_sys_or_user_meta(st, k))
|
||||
|
||||
def generate_request_headers(self, orig_req=None, additional=None,
|
||||
transfer=False):
|
||||
|
|
|
@ -28,6 +28,7 @@ import itertools
|
|||
import mimetypes
|
||||
import re
|
||||
import time
|
||||
import math
|
||||
from datetime import datetime
|
||||
from swift import gettext_ as _
|
||||
from urllib import unquote, quote
|
||||
|
@ -59,6 +60,7 @@ from swift.common.swob import HTTPAccepted, HTTPBadRequest, HTTPNotFound, \
|
|||
HTTPServerError, HTTPServiceUnavailable, Request, Response, \
|
||||
HTTPClientDisconnect, HTTPNotImplemented, HTTPException
|
||||
from swift.common.storage_policy import POLICY_INDEX
|
||||
from swift.common.request_helpers import is_user_meta
|
||||
|
||||
|
||||
def segment_listing_iter(listing):
|
||||
|
@ -78,7 +80,7 @@ def copy_headers_into(from_r, to_r):
|
|||
"""
|
||||
pass_headers = ['x-delete-at']
|
||||
for k, v in from_r.headers.items():
|
||||
if k.lower().startswith('x-object-meta-') or k.lower() in pass_headers:
|
||||
if is_user_meta('object', k) or k.lower() in pass_headers:
|
||||
to_r.headers[k] = v
|
||||
|
||||
|
||||
|
@ -1181,7 +1183,7 @@ class ObjectController(Controller):
|
|||
resp.headers['X-Copied-From-Last-Modified'] = \
|
||||
source_resp.headers['last-modified']
|
||||
copy_headers_into(req, resp)
|
||||
resp.last_modified = float(req.headers['X-Timestamp'])
|
||||
resp.last_modified = math.ceil(float(req.headers['X-Timestamp']))
|
||||
return resp
|
||||
|
||||
@public
|
||||
|
|
|
@ -24,6 +24,7 @@ import itertools
|
|||
from eventlet import Timeout
|
||||
|
||||
from swift import __canonical_version__ as swift_version
|
||||
from swift.common import constraints
|
||||
from swift.common.ring import Ring
|
||||
from swift.common.utils import cache_from_env, get_logger, \
|
||||
get_remote_client, split_path, config_true_value, generate_trans_id, \
|
||||
|
@ -38,6 +39,32 @@ from swift.common.swob import HTTPBadRequest, HTTPForbidden, \
|
|||
import swift.common.storage_policy
|
||||
|
||||
|
||||
# List of entry points for mandatory middlewares.
|
||||
#
|
||||
# Fields:
|
||||
#
|
||||
# "name" (required) is the entry point name from setup.py.
|
||||
#
|
||||
# "after" (optional) is a list of middlewares that this middleware should come
|
||||
# after. Default is for the middleware to go at the start of the pipeline. Any
|
||||
# middlewares in the "after" list that are not present in the pipeline will be
|
||||
# ignored, so you can safely name optional middlewares to come after. For
|
||||
# example, 'after: ["catch_errors", "bulk"]' would install this middleware
|
||||
# after catch_errors and bulk if both were present, but if bulk were absent,
|
||||
# would just install it after catch_errors.
|
||||
#
|
||||
# "after_fn" (optional) a function that takes a PipelineWrapper object as its
|
||||
# single argument and returns a list of middlewares that this middleware should
|
||||
# come after. This list overrides any defined by the "after" field.
|
||||
required_filters = [
|
||||
{'name': 'catch_errors'},
|
||||
{'name': 'gatekeeper',
|
||||
'after_fn': lambda pipe: (['catch_errors']
|
||||
if pipe.startswith("catch_errors")
|
||||
else [])}
|
||||
]
|
||||
|
||||
|
||||
class Application(object):
|
||||
"""WSGI application for the proxy server."""
|
||||
|
||||
|
@ -126,7 +153,7 @@ class Application(object):
|
|||
raise ValueError(
|
||||
'Invalid request_node_count value: %r' % ''.join(value))
|
||||
try:
|
||||
read_affinity = conf.get('read_affinity', '')
|
||||
self._read_affinity = read_affinity = conf.get('read_affinity', '')
|
||||
self.read_affinity_sort_key = affinity_key_function(read_affinity)
|
||||
except ValueError as err:
|
||||
# make the message a little more useful
|
||||
|
@ -176,7 +203,26 @@ class Application(object):
|
|||
self.disallowed_sections = list_from_csv(
|
||||
conf.get('disallowed_sections'))
|
||||
self.admin_key = conf.get('admin_key', None)
|
||||
register_swift_info(version=swift_version)
|
||||
register_swift_info(
|
||||
version=swift_version,
|
||||
max_file_size=constraints.MAX_FILE_SIZE,
|
||||
max_meta_name_length=constraints.MAX_META_NAME_LENGTH,
|
||||
max_meta_value_length=constraints.MAX_META_VALUE_LENGTH,
|
||||
max_meta_count=constraints.MAX_META_COUNT,
|
||||
account_listing_limit=constraints.ACCOUNT_LISTING_LIMIT,
|
||||
container_listing_limit=constraints.CONTAINER_LISTING_LIMIT,
|
||||
max_account_name_length=constraints.MAX_ACCOUNT_NAME_LENGTH,
|
||||
max_container_name_length=constraints.MAX_CONTAINER_NAME_LENGTH,
|
||||
max_object_name_length=constraints.MAX_OBJECT_NAME_LENGTH)
|
||||
|
||||
def check_config(self):
|
||||
"""
|
||||
Check the configuration for possible errors
|
||||
"""
|
||||
if self._read_affinity and self.sorting_method != 'affinity':
|
||||
self.logger.warn("sorting_method is set to '%s', not 'affinity'; "
|
||||
"read_affinity setting will have no effect." %
|
||||
self.sorting_method)
|
||||
|
||||
def get_object_ring(self, policy_idx):
|
||||
"""
|
||||
|
@ -493,9 +539,45 @@ class Application(object):
|
|||
{'type': typ, 'ip': node['ip'], 'port': node['port'],
|
||||
'device': node['device'], 'info': additional_info})
|
||||
|
||||
def modify_wsgi_pipeline(self, pipe):
|
||||
"""
|
||||
Called during WSGI pipeline creation. Modifies the WSGI pipeline
|
||||
context to ensure that mandatory middleware is present in the pipeline.
|
||||
|
||||
:param pipe: A PipelineWrapper object
|
||||
"""
|
||||
pipeline_was_modified = False
|
||||
for filter_spec in reversed(required_filters):
|
||||
filter_name = filter_spec['name']
|
||||
if filter_name not in pipe:
|
||||
if 'after_fn' in filter_spec:
|
||||
afters = filter_spec['after_fn'](pipe)
|
||||
else:
|
||||
afters = filter_spec.get('after', [])
|
||||
insert_at = 0
|
||||
for after in afters:
|
||||
try:
|
||||
insert_at = max(insert_at, pipe.index(after) + 1)
|
||||
except ValueError: # not in pipeline; ignore it
|
||||
pass
|
||||
self.logger.info(
|
||||
'Adding required filter %s to pipeline at position %d' %
|
||||
(filter_name, insert_at))
|
||||
ctx = pipe.create_filter(filter_name)
|
||||
pipe.insert_filter(ctx, index=insert_at)
|
||||
pipeline_was_modified = True
|
||||
|
||||
if pipeline_was_modified:
|
||||
self.logger.info("Pipeline was modified. New pipeline is \"%s\".",
|
||||
pipe)
|
||||
else:
|
||||
self.logger.debug("Pipeline is \"%s\"", pipe)
|
||||
|
||||
|
||||
def app_factory(global_conf, **local_conf):
|
||||
"""paste.deploy app factory for creating WSGI proxy apps."""
|
||||
conf = global_conf.copy()
|
||||
conf.update(local_conf)
|
||||
return Application(conf)
|
||||
app = Application(conf)
|
||||
app.check_config()
|
||||
return app
|
||||
|
|
|
@ -65,7 +65,7 @@ class ResponseError(Exception):
|
|||
|
||||
|
||||
def listing_empty(method):
|
||||
for i in xrange(0, 6):
|
||||
for i in xrange(6):
|
||||
if len(method()) == 0:
|
||||
return True
|
||||
|
||||
|
@ -723,7 +723,8 @@ class File(Base):
|
|||
else:
|
||||
raise RuntimeError
|
||||
|
||||
def write(self, data='', hdrs={}, parms={}, callback=None, cfg={}):
|
||||
def write(self, data='', hdrs={}, parms={}, callback=None, cfg={},
|
||||
return_resp=False):
|
||||
block_size = 2 ** 20
|
||||
|
||||
if isinstance(data, file):
|
||||
|
@ -767,6 +768,9 @@ class File(Base):
|
|||
pass
|
||||
self.md5 = self.compute_md5sum(data)
|
||||
|
||||
if return_resp:
|
||||
return self.conn.response
|
||||
|
||||
return True
|
||||
|
||||
def write_random(self, size=None, hdrs={}, parms={}, cfg={}):
|
||||
|
@ -776,3 +780,12 @@ class File(Base):
|
|||
self.conn.make_path(self.path))
|
||||
self.md5 = self.compute_md5sum(StringIO.StringIO(data))
|
||||
return data
|
||||
|
||||
def write_random_return_resp(self, size=None, hdrs={}, parms={}, cfg={}):
|
||||
data = self.random_data(size)
|
||||
resp = self.write(data, hdrs=hdrs, parms=parms, cfg=cfg,
|
||||
return_resp=True)
|
||||
if not resp:
|
||||
raise ResponseError(self.conn.response)
|
||||
self.md5 = self.compute_md5sum(StringIO.StringIO(data))
|
||||
return resp
|
||||
|
|
|
@ -581,6 +581,28 @@ class TestObject(unittest.TestCase):
|
|||
self.assertEquals(resp.getheader('Content-Type'),
|
||||
'text/html; charset=UTF-8')
|
||||
|
||||
def test_delete_if_delete_at_bad(self):
|
||||
if skip:
|
||||
raise SkipTest
|
||||
|
||||
def put(url, token, parsed, conn):
|
||||
conn.request('PUT',
|
||||
'%s/%s/hi-delete-bad' % (parsed.path, self.container),
|
||||
'there', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
resp = retry(put)
|
||||
resp.read()
|
||||
self.assertEquals(resp.status, 201)
|
||||
|
||||
def delete(url, token, parsed, conn):
|
||||
conn.request('DELETE', '%s/%s/hi' % (parsed.path, self.container),
|
||||
'', {'X-Auth-Token': token,
|
||||
'X-If-Delete-At': 'bad'})
|
||||
return check_response(conn)
|
||||
resp = retry(delete)
|
||||
resp.read()
|
||||
self.assertEquals(resp.status, 400)
|
||||
|
||||
def test_null_name(self):
|
||||
if skip:
|
||||
raise SkipTest
|
|
@ -1672,8 +1672,14 @@ class TestFileComparisonEnv:
|
|||
file_item.write_random(cls.file_size)
|
||||
cls.files.append(file_item)
|
||||
|
||||
cls.time_old = time.asctime(time.localtime(time.time() - 86400))
|
||||
cls.time_new = time.asctime(time.localtime(time.time() + 86400))
|
||||
cls.time_old_f1 = time.strftime("%a, %d %b %Y %H:%M:%S GMT",
|
||||
time.gmtime(time.time() - 86400))
|
||||
cls.time_old_f2 = time.strftime("%A, %d-%b-%y %H:%M:%S GMT",
|
||||
time.gmtime(time.time() - 86400))
|
||||
cls.time_old_f3 = time.strftime("%a %b %d %H:%M:%S %Y",
|
||||
time.gmtime(time.time() - 86400))
|
||||
cls.time_new = time.strftime("%a, %d %b %Y %H:%M:%S GMT",
|
||||
time.gmtime(time.time() + 86400))
|
||||
|
||||
|
||||
class TestFileComparison(Base):
|
||||
|
@ -1700,7 +1706,7 @@ class TestFileComparison(Base):
|
|||
|
||||
def testIfModifiedSince(self):
|
||||
for file_item in self.env.files:
|
||||
hdrs = {'If-Modified-Since': self.env.time_old}
|
||||
hdrs = {'If-Modified-Since': self.env.time_old_f1}
|
||||
self.assert_(file_item.read(hdrs=hdrs))
|
||||
|
||||
hdrs = {'If-Modified-Since': self.env.time_new}
|
||||
|
@ -1712,7 +1718,7 @@ class TestFileComparison(Base):
|
|||
hdrs = {'If-Unmodified-Since': self.env.time_new}
|
||||
self.assert_(file_item.read(hdrs=hdrs))
|
||||
|
||||
hdrs = {'If-Unmodified-Since': self.env.time_old}
|
||||
hdrs = {'If-Unmodified-Since': self.env.time_old_f2}
|
||||
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
|
||||
self.assert_status(412)
|
||||
|
||||
|
@ -1728,10 +1734,32 @@ class TestFileComparison(Base):
|
|||
self.assert_status(412)
|
||||
|
||||
hdrs = {'If-Match': file_item.md5,
|
||||
'If-Unmodified-Since': self.env.time_old}
|
||||
'If-Unmodified-Since': self.env.time_old_f3}
|
||||
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
|
||||
self.assert_status(412)
|
||||
|
||||
def testLastModified(self):
|
||||
file_name = Utils.create_name()
|
||||
content_type = Utils.create_name()
|
||||
|
||||
file = self.env.container.file(file_name)
|
||||
file.content_type = content_type
|
||||
resp = file.write_random_return_resp(self.env.file_size)
|
||||
put_last_modified = resp.getheader('last-modified')
|
||||
|
||||
file = self.env.container.file(file_name)
|
||||
info = file.info()
|
||||
self.assert_('last_modified' in info)
|
||||
last_modified = info['last_modified']
|
||||
self.assertEqual(put_last_modified, info['last_modified'])
|
||||
|
||||
hdrs = {'If-Modified-Since': last_modified}
|
||||
self.assertRaises(ResponseError, file.read, hdrs=hdrs)
|
||||
self.assert_status(304)
|
||||
|
||||
hdrs = {'If-Unmodified-Since': last_modified}
|
||||
self.assert_(file.read(hdrs=hdrs))
|
||||
|
||||
|
||||
class TestFileComparisonUTF8(Base2, TestFileComparison):
|
||||
set_up = False
|
||||
|
|
|
@ -25,6 +25,7 @@ from sqlite3 import connect
|
|||
from swiftclient import client
|
||||
|
||||
from swift.common import direct_client
|
||||
from swift.common.exceptions import ClientException
|
||||
from swift.common.utils import hash_path, readconf
|
||||
from test.probe.common import get_to_final_state, kill_nonprimary_server, \
|
||||
kill_server, kill_servers, reset_environment, start_server
|
||||
|
@ -107,7 +108,7 @@ class TestContainerFailures(TestCase):
|
|||
try:
|
||||
direct_client.direct_get_container(cnode, cpart, self.account,
|
||||
container1)
|
||||
except client.ClientException as err:
|
||||
except ClientException as err:
|
||||
exc = err
|
||||
self.assertEquals(exc.http_status, 404)
|
||||
headers, containers = client.get_account(self.url, self.token)
|
||||
|
|
|
@ -25,6 +25,7 @@ from uuid import uuid4
|
|||
from swiftclient import client
|
||||
|
||||
from swift.common import direct_client
|
||||
from swift.common.exceptions import ClientException
|
||||
from test.probe.common import kill_server, kill_servers, reset_environment,\
|
||||
start_server
|
||||
from swift.common.utils import readconf
|
||||
|
@ -129,7 +130,7 @@ class TestEmptyDevice(TestCase):
|
|||
try:
|
||||
direct_client.direct_get_object(onode, opart, self.account,
|
||||
container, obj)
|
||||
except direct_client.ClientException as err:
|
||||
except ClientException as err:
|
||||
exc = err
|
||||
self.assertEquals(exc.http_status, 404)
|
||||
self.assertFalse(os.path.exists(obj_dir))
|
||||
|
@ -158,7 +159,7 @@ class TestEmptyDevice(TestCase):
|
|||
try:
|
||||
direct_client.direct_get_object(another_onode, opart, self.account,
|
||||
container, obj)
|
||||
except direct_client.ClientException as err:
|
||||
except ClientException as err:
|
||||
exc = err
|
||||
self.assertEquals(exc.http_status, 404)
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ from uuid import uuid4
|
|||
from swiftclient import client
|
||||
|
||||
from swift.common import direct_client
|
||||
from swift.common.exceptions import ClientException
|
||||
from swift.common.utils import hash_path, readconf
|
||||
from swift.obj.diskfile import write_metadata, read_metadata
|
||||
from test.probe.common import kill_servers, reset_environment
|
||||
|
@ -93,7 +94,7 @@ class TestObjectFailures(TestCase):
|
|||
direct_client.direct_get_object(onode, opart, self.account,
|
||||
container, obj)
|
||||
raise Exception("Did not quarantine object")
|
||||
except client.ClientException as err:
|
||||
except ClientException as err:
|
||||
self.assertEquals(err.http_status, 404)
|
||||
|
||||
def run_quarantine_range_etag(self):
|
||||
|
@ -116,7 +117,7 @@ class TestObjectFailures(TestCase):
|
|||
direct_client.direct_get_object(onode, opart, self.account,
|
||||
container, obj)
|
||||
raise Exception("Did not quarantine object")
|
||||
except client.ClientException as err:
|
||||
except ClientException as err:
|
||||
self.assertEquals(err.http_status, 404)
|
||||
|
||||
def run_quarantine_zero_byte_get(self):
|
||||
|
@ -133,7 +134,7 @@ class TestObjectFailures(TestCase):
|
|||
container, obj, conn_timeout=1,
|
||||
response_timeout=1)
|
||||
raise Exception("Did not quarantine object")
|
||||
except client.ClientException as err:
|
||||
except ClientException as err:
|
||||
self.assertEquals(err.http_status, 404)
|
||||
|
||||
def run_quarantine_zero_byte_head(self):
|
||||
|
@ -150,7 +151,7 @@ class TestObjectFailures(TestCase):
|
|||
container, obj, conn_timeout=1,
|
||||
response_timeout=1)
|
||||
raise Exception("Did not quarantine object")
|
||||
except client.ClientException as err:
|
||||
except ClientException as err:
|
||||
self.assertEquals(err.http_status, 404)
|
||||
|
||||
def run_quarantine_zero_byte_post(self):
|
||||
|
@ -170,7 +171,7 @@ class TestObjectFailures(TestCase):
|
|||
conn_timeout=1,
|
||||
response_timeout=1)
|
||||
raise Exception("Did not quarantine object")
|
||||
except client.ClientException as err:
|
||||
except ClientException as err:
|
||||
self.assertEquals(err.http_status, 404)
|
||||
|
||||
def test_runner(self):
|
||||
|
|
|
@ -21,6 +21,7 @@ from uuid import uuid4
|
|||
from swiftclient import client
|
||||
|
||||
from swift.common import direct_client
|
||||
from swift.common.exceptions import ClientException
|
||||
from test.probe.common import kill_server, kill_servers, reset_environment, \
|
||||
start_server
|
||||
|
||||
|
@ -110,7 +111,7 @@ class TestObjectHandoff(TestCase):
|
|||
try:
|
||||
direct_client.direct_get_object(onode, opart, self.account,
|
||||
container, obj)
|
||||
except direct_client.ClientException as err:
|
||||
except ClientException as err:
|
||||
exc = err
|
||||
self.assertEquals(exc.http_status, 404)
|
||||
# Run the extra server last so it'll remove its extra partition
|
||||
|
@ -142,7 +143,7 @@ class TestObjectHandoff(TestCase):
|
|||
try:
|
||||
direct_client.direct_get_object(another_onode, opart, self.account,
|
||||
container, obj)
|
||||
except direct_client.ClientException as err:
|
||||
except ClientException as err:
|
||||
exc = err
|
||||
self.assertEquals(exc.http_status, 404)
|
||||
|
||||
|
@ -151,7 +152,7 @@ class TestObjectHandoff(TestCase):
|
|||
exc = None
|
||||
try:
|
||||
client.head_object(self.url, self.token, container, obj)
|
||||
except direct_client.ClientException as err:
|
||||
except client.ClientException as err:
|
||||
exc = err
|
||||
self.assertEquals(exc.http_status, 404)
|
||||
objs = [o['name'] for o in
|
||||
|
@ -189,7 +190,7 @@ class TestObjectHandoff(TestCase):
|
|||
try:
|
||||
direct_client.direct_get_object(another_onode, opart, self.account,
|
||||
container, obj)
|
||||
except direct_client.ClientException as err:
|
||||
except ClientException as err:
|
||||
exc = err
|
||||
self.assertEquals(exc.http_status, 404)
|
||||
|
||||
|
|
|
@ -25,8 +25,8 @@ from contextlib import nested
|
|||
|
||||
from swift.account import reaper
|
||||
from swift.account.server import DATADIR
|
||||
from swift.common.exceptions import ClientException
|
||||
from swift.common.utils import normalize_timestamp
|
||||
from swift.common.direct_client import ClientException
|
||||
|
||||
|
||||
class FakeLogger(object):
|
||||
|
|
|
@ -26,6 +26,7 @@ import xml.dom.minidom
|
|||
from swift.common.swob import Request
|
||||
from swift.account.server import AccountController, ACCOUNT_LISTING_LIMIT
|
||||
from swift.common.utils import normalize_timestamp, replication, public
|
||||
from swift.common.request_helpers import get_sys_meta_prefix
|
||||
|
||||
|
||||
class TestAccountController(unittest.TestCase):
|
||||
|
@ -371,6 +372,67 @@ class TestAccountController(unittest.TestCase):
|
|||
self.assertEqual(resp.status_int, 204)
|
||||
self.assert_('x-account-meta-test' not in resp.headers)
|
||||
|
||||
def test_PUT_GET_sys_metadata(self):
|
||||
prefix = get_sys_meta_prefix('account')
|
||||
hdr = '%stest' % prefix
|
||||
hdr2 = '%stest2' % prefix
|
||||
# Set metadata header
|
||||
req = Request.blank(
|
||||
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': normalize_timestamp(1),
|
||||
hdr.title(): 'Value'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 201)
|
||||
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
self.assertEqual(resp.headers.get(hdr), 'Value')
|
||||
# Set another metadata header, ensuring old one doesn't disappear
|
||||
req = Request.blank(
|
||||
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': normalize_timestamp(1),
|
||||
hdr2.title(): 'Value2'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
self.assertEqual(resp.headers.get(hdr), 'Value')
|
||||
self.assertEqual(resp.headers.get(hdr2), 'Value2')
|
||||
# Update metadata header
|
||||
req = Request.blank(
|
||||
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': normalize_timestamp(3),
|
||||
hdr.title(): 'New Value'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 202)
|
||||
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
self.assertEqual(resp.headers.get(hdr), 'New Value')
|
||||
# Send old update to metadata header
|
||||
req = Request.blank(
|
||||
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': normalize_timestamp(2),
|
||||
hdr.title(): 'Old Value'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 202)
|
||||
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
self.assertEqual(resp.headers.get(hdr), 'New Value')
|
||||
# Remove metadata header (by setting it to empty)
|
||||
req = Request.blank(
|
||||
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': normalize_timestamp(4),
|
||||
hdr.title(): ''})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 202)
|
||||
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
self.assert_(hdr not in resp.headers)
|
||||
|
||||
def test_PUT_invalid_partition(self):
|
||||
req = Request.blank('/sda1/./a', environ={'REQUEST_METHOD': 'PUT',
|
||||
'HTTP_X_TIMESTAMP': '1'})
|
||||
|
@ -435,6 +497,59 @@ class TestAccountController(unittest.TestCase):
|
|||
self.assertEqual(resp.status_int, 204)
|
||||
self.assert_('x-account-meta-test' not in resp.headers)
|
||||
|
||||
def test_POST_HEAD_sys_metadata(self):
|
||||
prefix = get_sys_meta_prefix('account')
|
||||
hdr = '%stest' % prefix
|
||||
req = Request.blank(
|
||||
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': normalize_timestamp(1)})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 201)
|
||||
# Set metadata header
|
||||
req = Request.blank(
|
||||
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': normalize_timestamp(1),
|
||||
hdr.title(): 'Value'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
self.assertEqual(resp.headers.get(hdr), 'Value')
|
||||
# Update metadata header
|
||||
req = Request.blank(
|
||||
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': normalize_timestamp(3),
|
||||
hdr.title(): 'New Value'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
self.assertEqual(resp.headers.get(hdr), 'New Value')
|
||||
# Send old update to metadata header
|
||||
req = Request.blank(
|
||||
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': normalize_timestamp(2),
|
||||
hdr.title(): 'Old Value'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
self.assertEqual(resp.headers.get(hdr), 'New Value')
|
||||
# Remove metadata header (by setting it to empty)
|
||||
req = Request.blank(
|
||||
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': normalize_timestamp(4),
|
||||
hdr.title(): ''})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
self.assert_(hdr not in resp.headers)
|
||||
|
||||
def test_POST_invalid_partition(self):
|
||||
req = Request.blank('/sda1/./a', environ={'REQUEST_METHOD': 'POST',
|
||||
'HTTP_X_TIMESTAMP': '1'})
|
||||
|
|
|
@ -19,10 +19,12 @@ import os
|
|||
import tarfile
|
||||
import urllib
|
||||
import zlib
|
||||
import mock
|
||||
from shutil import rmtree
|
||||
from tempfile import mkdtemp
|
||||
from StringIO import StringIO
|
||||
from mock import patch
|
||||
from eventlet import sleep
|
||||
from mock import patch, call
|
||||
from swift.common import utils
|
||||
from swift.common.middleware import bulk
|
||||
from swift.common.swob import Request, Response, HTTPException
|
||||
|
@ -35,6 +37,8 @@ class FakeApp(object):
|
|||
self.calls = 0
|
||||
self.delete_paths = []
|
||||
self.max_pathlen = 100
|
||||
self.del_cont_total_calls = 2
|
||||
self.del_cont_cur_call = 0
|
||||
|
||||
def __call__(self, env, start_response):
|
||||
self.calls += 1
|
||||
|
@ -64,7 +68,8 @@ class FakeApp(object):
|
|||
if len(env['PATH_INFO']) > 100:
|
||||
return Response(status='400 Bad Request')(env, start_response)
|
||||
return Response(status='201 Created')(env, start_response)
|
||||
if env['PATH_INFO'].startswith('/delete_works/'):
|
||||
if (env['PATH_INFO'].startswith('/delete_works/')
|
||||
and env['REQUEST_METHOD'] == 'DELETE'):
|
||||
self.delete_paths.append(env['PATH_INFO'])
|
||||
if len(env['PATH_INFO']) > self.max_pathlen:
|
||||
return Response(status='400 Bad Request')(env, start_response)
|
||||
|
@ -78,6 +83,12 @@ class FakeApp(object):
|
|||
return Response(status='409 Conflict')(env, start_response)
|
||||
if env['PATH_INFO'].startswith('/broke/'):
|
||||
return Response(status='500 Internal Error')(env, start_response)
|
||||
if env['PATH_INFO'].startswith('/delete_cont_success_after_attempts/'):
|
||||
if self.del_cont_cur_call < self.del_cont_total_calls:
|
||||
self.del_cont_cur_call += 1
|
||||
return Response(status='409 Conflict')(env, start_response)
|
||||
else:
|
||||
return Response(status='204 No Content')(env, start_response)
|
||||
|
||||
|
||||
def build_dir_tree(start_path, tree_obj):
|
||||
|
@ -695,11 +706,51 @@ class TestDelete(unittest.TestCase):
|
|||
req = Request.blank('/delete_cont_fail/AUTH_Acc', body='c\n',
|
||||
headers={'Accept': 'application/json'})
|
||||
req.method = 'POST'
|
||||
resp_body = self.handle_delete_and_iter(req)
|
||||
resp_data = json.loads(resp_body)
|
||||
self.assertEquals(resp_data['Number Deleted'], 0)
|
||||
self.assertEquals(resp_data['Errors'], [['c', '409 Conflict']])
|
||||
self.assertEquals(resp_data['Response Status'], '400 Bad Request')
|
||||
with patch('swift.common.middleware.bulk.sleep',
|
||||
new=mock.MagicMock(wraps=sleep,
|
||||
return_value=None)) as mock_sleep:
|
||||
resp_body = self.handle_delete_and_iter(req)
|
||||
resp_data = json.loads(resp_body)
|
||||
self.assertEquals(resp_data['Number Deleted'], 0)
|
||||
self.assertEquals(resp_data['Errors'], [['c', '409 Conflict']])
|
||||
self.assertEquals(resp_data['Response Status'], '400 Bad Request')
|
||||
self.assertEquals([], mock_sleep.call_args_list)
|
||||
|
||||
def test_bulk_delete_container_delete_retry_and_fails(self):
|
||||
self.bulk.retry_count = 3
|
||||
req = Request.blank('/delete_cont_fail/AUTH_Acc', body='c\n',
|
||||
headers={'Accept': 'application/json'})
|
||||
req.method = 'POST'
|
||||
with patch('swift.common.middleware.bulk.sleep',
|
||||
new=mock.MagicMock(wraps=sleep,
|
||||
return_value=None)) as mock_sleep:
|
||||
resp_body = self.handle_delete_and_iter(req)
|
||||
resp_data = json.loads(resp_body)
|
||||
self.assertEquals(resp_data['Number Deleted'], 0)
|
||||
self.assertEquals(resp_data['Errors'], [['c', '409 Conflict']])
|
||||
self.assertEquals(resp_data['Response Status'], '400 Bad Request')
|
||||
self.assertEquals([call(self.bulk.retry_interval),
|
||||
call(self.bulk.retry_interval ** 2),
|
||||
call(self.bulk.retry_interval ** 3)],
|
||||
mock_sleep.call_args_list)
|
||||
|
||||
def test_bulk_delete_container_delete_retry_and_success(self):
|
||||
self.bulk.retry_count = 3
|
||||
self.app.del_container_total = 2
|
||||
req = Request.blank('/delete_cont_success_after_attempts/AUTH_Acc',
|
||||
body='c\n', headers={'Accept': 'application/json'})
|
||||
req.method = 'DELETE'
|
||||
with patch('swift.common.middleware.bulk.sleep',
|
||||
new=mock.MagicMock(wraps=sleep,
|
||||
return_value=None)) as mock_sleep:
|
||||
resp_body = self.handle_delete_and_iter(req)
|
||||
resp_data = json.loads(resp_body)
|
||||
self.assertEquals(resp_data['Number Deleted'], 1)
|
||||
self.assertEquals(resp_data['Errors'], [])
|
||||
self.assertEquals(resp_data['Response Status'], '200 OK')
|
||||
self.assertEquals([call(self.bulk.retry_interval),
|
||||
call(self.bulk.retry_interval ** 2)],
|
||||
mock_sleep.call_args_list)
|
||||
|
||||
def test_bulk_delete_bad_file_too_long(self):
|
||||
req = Request.blank('/delete_works/AUTH_Acc',
|
||||
|
|
|
@ -0,0 +1,115 @@
|
|||
# Copyright (c) 2010-2012 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
|
||||
from swift.common.swob import Request, Response
|
||||
from swift.common.middleware import gatekeeper
|
||||
|
||||
|
||||
class FakeApp(object):
|
||||
def __init__(self, headers={}):
|
||||
self.headers = headers
|
||||
self.req = None
|
||||
|
||||
def __call__(self, env, start_response):
|
||||
self.req = Request(env)
|
||||
return Response(request=self.req, body='FAKE APP',
|
||||
headers=self.headers)(env, start_response)
|
||||
|
||||
|
||||
class TestGatekeeper(unittest.TestCase):
|
||||
methods = ['PUT', 'POST', 'GET', 'DELETE', 'HEAD', 'COPY', 'OPTIONS']
|
||||
|
||||
allowed_headers = {'xx-account-sysmeta-foo': 'value',
|
||||
'xx-container-sysmeta-foo': 'value',
|
||||
'xx-object-sysmeta-foo': 'value',
|
||||
'x-account-meta-foo': 'value',
|
||||
'x-container-meta-foo': 'value',
|
||||
'x-object-meta-foo': 'value',
|
||||
'x-timestamp-foo': 'value'}
|
||||
|
||||
sysmeta_headers = {'x-account-sysmeta-': 'value',
|
||||
'x-container-sysmeta-': 'value',
|
||||
'x-object-sysmeta-': 'value',
|
||||
'x-account-sysmeta-foo': 'value',
|
||||
'x-container-sysmeta-foo': 'value',
|
||||
'x-object-sysmeta-foo': 'value',
|
||||
'X-Account-Sysmeta-BAR': 'value',
|
||||
'X-Container-Sysmeta-BAR': 'value',
|
||||
'X-Object-Sysmeta-BAR': 'value'}
|
||||
|
||||
forbidden_headers_out = dict(sysmeta_headers)
|
||||
forbidden_headers_in = dict(sysmeta_headers)
|
||||
|
||||
def _assertHeadersEqual(self, expected, actual):
|
||||
for key in expected:
|
||||
self.assertTrue(key.lower() in actual,
|
||||
'%s missing from %s' % (key, actual))
|
||||
|
||||
def _assertHeadersAbsent(self, unexpected, actual):
|
||||
for key in unexpected:
|
||||
self.assertTrue(key.lower() not in actual,
|
||||
'%s is in %s' % (key, actual))
|
||||
|
||||
def get_app(self, app, global_conf, **local_conf):
|
||||
factory = gatekeeper.filter_factory(global_conf, **local_conf)
|
||||
return factory(app)
|
||||
|
||||
def test_ok_header(self):
|
||||
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers=self.allowed_headers)
|
||||
fake_app = FakeApp()
|
||||
app = self.get_app(fake_app, {})
|
||||
resp = req.get_response(app)
|
||||
self.assertEquals('200 OK', resp.status)
|
||||
self.assertEquals(resp.body, 'FAKE APP')
|
||||
self._assertHeadersEqual(self.allowed_headers, fake_app.req.headers)
|
||||
|
||||
def _test_reserved_header_removed_inbound(self, method):
|
||||
headers = dict(self.forbidden_headers_in)
|
||||
headers.update(self.allowed_headers)
|
||||
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': method},
|
||||
headers=headers)
|
||||
fake_app = FakeApp()
|
||||
app = self.get_app(fake_app, {})
|
||||
resp = req.get_response(app)
|
||||
self.assertEquals('200 OK', resp.status)
|
||||
self._assertHeadersEqual(self.allowed_headers, fake_app.req.headers)
|
||||
self._assertHeadersAbsent(self.forbidden_headers_in,
|
||||
fake_app.req.headers)
|
||||
|
||||
def test_reserved_header_removed_inbound(self):
|
||||
for method in self.methods:
|
||||
self._test_reserved_header_removed_inbound(method)
|
||||
|
||||
def _test_reserved_header_removed_outbound(self, method):
|
||||
headers = dict(self.forbidden_headers_out)
|
||||
headers.update(self.allowed_headers)
|
||||
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': method})
|
||||
fake_app = FakeApp(headers=headers)
|
||||
app = self.get_app(fake_app, {})
|
||||
resp = req.get_response(app)
|
||||
self.assertEquals('200 OK', resp.status)
|
||||
self._assertHeadersEqual(self.allowed_headers, resp.headers)
|
||||
self._assertHeadersAbsent(self.forbidden_headers_out, resp.headers)
|
||||
|
||||
def test_reserved_header_removed_outbound(self):
|
||||
for method in self.methods:
|
||||
self._test_reserved_header_removed_outbound(method)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -158,7 +158,7 @@ class TestRateLimit(unittest.TestCase):
|
|||
def _run(self, callable_func, num, rate, check_time=True):
|
||||
global time_ticker
|
||||
begin = time.time()
|
||||
for x in range(0, num):
|
||||
for x in range(num):
|
||||
callable_func()
|
||||
end = time.time()
|
||||
total_time = float(num) / rate - 1.0 / rate # 1st request not limited
|
||||
|
|
|
@ -101,7 +101,10 @@ class MockOS(object):
|
|||
|
||||
def fake_ismount(self, *args, **kwargs):
|
||||
self.ismount_calls.append((args, kwargs))
|
||||
return self.ismount_output
|
||||
if isinstance(self.ismount_output, Exception):
|
||||
raise self.ismount_output
|
||||
else:
|
||||
return self.ismount_output
|
||||
|
||||
def fake_statvfs(self, *args, **kwargs):
|
||||
self.statvfs_calls.append((args, kwargs))
|
||||
|
@ -560,13 +563,30 @@ class TestReconSuccess(TestCase):
|
|||
"quarantined": 0}})
|
||||
|
||||
def test_get_unmounted(self):
|
||||
|
||||
unmounted_resp = [{'device': 'fakeone', 'mounted': False},
|
||||
{'device': 'faketwo', 'mounted': False}]
|
||||
self.mockos.ls_output = ['fakeone', 'faketwo']
|
||||
self.mockos.ismount_output = False
|
||||
rv = self.app.get_unmounted()
|
||||
self.assertEquals(self.mockos.listdir_calls, [(('/srv/node/',), {})])
|
||||
self.assertEquals(self.mockos.listdir_calls, [(('/srv/node',), {})])
|
||||
self.assertEquals(rv, unmounted_resp)
|
||||
|
||||
def test_get_unmounted_everything_normal(self):
|
||||
unmounted_resp = []
|
||||
self.mockos.ls_output = ['fakeone', 'faketwo']
|
||||
self.mockos.ismount_output = True
|
||||
rv = self.app.get_unmounted()
|
||||
self.assertEquals(self.mockos.listdir_calls, [(('/srv/node',), {})])
|
||||
self.assertEquals(rv, unmounted_resp)
|
||||
|
||||
def test_get_unmounted_checkmount_fail(self):
|
||||
unmounted_resp = [{'device': 'fakeone', 'mounted': 'brokendrive'}]
|
||||
self.mockos.ls_output = ['fakeone']
|
||||
self.mockos.ismount_output = OSError('brokendrive')
|
||||
rv = self.app.get_unmounted()
|
||||
self.assertEquals(self.mockos.listdir_calls, [(('/srv/node',), {})])
|
||||
self.assertEquals(self.mockos.ismount_calls,
|
||||
[(('/srv/node/fakeone',), {})])
|
||||
self.assertEquals(rv, unmounted_resp)
|
||||
|
||||
def test_no_get_unmounted(self):
|
||||
|
@ -578,7 +598,7 @@ class TestReconSuccess(TestCase):
|
|||
self.mockos.ls_output = []
|
||||
self.mockos.ismount_output = False
|
||||
rv = self.app.get_unmounted()
|
||||
self.assertEquals(self.mockos.listdir_calls, [(('/srv/node/',), {})])
|
||||
self.assertEquals(self.mockos.listdir_calls, [(('/srv/node',), {})])
|
||||
self.assertEquals(rv, unmounted_resp)
|
||||
|
||||
def test_get_diskusage(self):
|
||||
|
@ -601,11 +621,11 @@ class TestReconSuccess(TestCase):
|
|||
|
||||
def test_get_diskusage_checkmount_fail(self):
|
||||
du_resp = [{'device': 'canhazdrive1', 'avail': '',
|
||||
'mounted': False, 'used': '', 'size': ''}]
|
||||
'mounted': 'brokendrive', 'used': '', 'size': ''}]
|
||||
self.mockos.ls_output = ['canhazdrive1']
|
||||
self.mockos.ismount_output = False
|
||||
self.mockos.ismount_output = OSError('brokendrive')
|
||||
rv = self.app.get_diskusage()
|
||||
self.assertEquals(self.mockos.listdir_calls, [(('/srv/node/',), {})])
|
||||
self.assertEquals(self.mockos.listdir_calls, [(('/srv/node',), {})])
|
||||
self.assertEquals(self.mockos.ismount_calls,
|
||||
[(('/srv/node/canhazdrive1',), {})])
|
||||
self.assertEquals(rv, du_resp)
|
||||
|
|
|
@ -20,7 +20,7 @@ from time import time
|
|||
|
||||
from swift.common.middleware import tempauth, tempurl
|
||||
from swift.common.swob import Request, Response, HeaderKeyDict
|
||||
from swift.common.utils import split_path
|
||||
from swift.common import utils
|
||||
|
||||
|
||||
class FakeApp(object):
|
||||
|
@ -59,7 +59,7 @@ class TestTempURL(unittest.TestCase):
|
|||
if environ is None:
|
||||
environ = {}
|
||||
|
||||
_junk, account, _junk, _junk = split_path(path, 2, 4)
|
||||
_junk, account, _junk, _junk = utils.split_path(path, 2, 4)
|
||||
self._fake_cache_environ(environ, account, keys)
|
||||
req = Request.blank(path, environ=environ, **kwargs)
|
||||
return req
|
||||
|
@ -869,5 +869,25 @@ class TestTempURL(unittest.TestCase):
|
|||
self.assertTrue(isinstance(str_value, str))
|
||||
|
||||
|
||||
class TestSwiftInfo(unittest.TestCase):
|
||||
def setUp(self):
|
||||
utils._swift_info = {}
|
||||
utils._swift_admin_info = {}
|
||||
|
||||
def test_registered_defaults(self):
|
||||
tempurl.filter_factory({})
|
||||
swift_info = utils.get_swift_info()
|
||||
self.assertTrue('tempurl' in swift_info)
|
||||
self.assertEqual(set(swift_info['tempurl']['methods']),
|
||||
set(('GET', 'HEAD', 'PUT')))
|
||||
|
||||
def test_non_default_methods(self):
|
||||
tempurl.filter_factory({'methods': 'GET HEAD PUT POST DELETE'})
|
||||
swift_info = utils.get_swift_info()
|
||||
self.assertTrue('tempurl' in swift_info)
|
||||
self.assertEqual(set(swift_info['tempurl']['methods']),
|
||||
set(('GET', 'HEAD', 'PUT', 'POST', 'DELETE')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -150,13 +150,16 @@ class TestRing(unittest.TestCase):
|
|||
# test invalid endcap
|
||||
_orig_hash_path_suffix = utils.HASH_PATH_SUFFIX
|
||||
_orig_hash_path_prefix = utils.HASH_PATH_PREFIX
|
||||
_orig_swift_conf_file = utils.SWIFT_CONF_FILE
|
||||
try:
|
||||
utils.HASH_PATH_SUFFIX = ''
|
||||
utils.HASH_PATH_PREFIX = ''
|
||||
utils.SWIFT_CONF_FILE = ''
|
||||
self.assertRaises(SystemExit, ring.Ring, self.testdir, 'whatever')
|
||||
finally:
|
||||
utils.HASH_PATH_SUFFIX = _orig_hash_path_suffix
|
||||
utils.HASH_PATH_PREFIX = _orig_hash_path_prefix
|
||||
utils.SWIFT_CONF_FILE = _orig_swift_conf_file
|
||||
|
||||
def test_has_changed(self):
|
||||
self.assertEquals(self.ring.has_changed(), False)
|
||||
|
|
|
@ -20,6 +20,7 @@ import StringIO
|
|||
from hashlib import md5
|
||||
|
||||
from swift.common import direct_client
|
||||
from swift.common.exceptions import ClientException
|
||||
from swiftclient import json_loads
|
||||
|
||||
|
||||
|
@ -292,9 +293,8 @@ class TestDirectClient(unittest.TestCase):
|
|||
was_http_connector = direct_client.http_connect
|
||||
direct_client.http_connect = mock_http_connect(500)
|
||||
|
||||
self.assertRaises(direct_client.ClientException,
|
||||
direct_client.direct_put_object, node, part, account,
|
||||
container, name, contents)
|
||||
self.assertRaises(ClientException, direct_client.direct_put_object,
|
||||
node, part, account, container, name, contents)
|
||||
|
||||
direct_client.http_connect = was_http_connector
|
||||
|
||||
|
|
|
@ -32,6 +32,19 @@ class TestExceptions(unittest.TestCase):
|
|||
finally:
|
||||
exc.cancel()
|
||||
|
||||
def test_client_exception(self):
|
||||
strerror = 'test: HTTP://random:888/randompath?foo=1 666 reason: ' \
|
||||
'device /sdb1 content'
|
||||
exc = exceptions.ClientException('test', http_scheme='HTTP',
|
||||
http_host='random',
|
||||
http_port=888,
|
||||
http_path='/randompath',
|
||||
http_query='foo=1',
|
||||
http_status=666,
|
||||
http_reason='reason',
|
||||
http_device='/sdb1',
|
||||
http_response_content='content')
|
||||
self.assertEqual(str(exc), strerror)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -105,6 +105,8 @@ class TestManagerModule(unittest.TestCase):
|
|||
manager.MAX_DESCRIPTORS)),
|
||||
(resource.RLIMIT_DATA, (manager.MAX_MEMORY,
|
||||
manager.MAX_MEMORY)),
|
||||
(resource.RLIMIT_NPROC, (manager.MAX_PROCS,
|
||||
manager.MAX_PROCS)),
|
||||
]
|
||||
self.assertEquals(manager.resource.called_with_args, expected)
|
||||
self.assertTrue(
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
# Copyright (c) 2010-2012 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tests for swift.common.request_helpers"""
|
||||
|
||||
import unittest
|
||||
from swift.common.request_helpers import is_sys_meta, is_user_meta, \
|
||||
is_sys_or_user_meta, strip_sys_meta_prefix, strip_user_meta_prefix, \
|
||||
remove_items
|
||||
|
||||
server_types = ['account', 'container', 'object']
|
||||
|
||||
|
||||
class TestRequestHelpers(unittest.TestCase):
|
||||
def test_is_user_meta(self):
|
||||
m_type = 'meta'
|
||||
for st in server_types:
|
||||
self.assertTrue(is_user_meta(st, 'x-%s-%s-foo' % (st, m_type)))
|
||||
self.assertFalse(is_user_meta(st, 'x-%s-%s-' % (st, m_type)))
|
||||
self.assertFalse(is_user_meta(st, 'x-%s-%sfoo' % (st, m_type)))
|
||||
|
||||
def test_is_sys_meta(self):
|
||||
m_type = 'sysmeta'
|
||||
for st in server_types:
|
||||
self.assertTrue(is_sys_meta(st, 'x-%s-%s-foo' % (st, m_type)))
|
||||
self.assertFalse(is_sys_meta(st, 'x-%s-%s-' % (st, m_type)))
|
||||
self.assertFalse(is_sys_meta(st, 'x-%s-%sfoo' % (st, m_type)))
|
||||
|
||||
def test_is_sys_or_user_meta(self):
|
||||
m_types = ['sysmeta', 'meta']
|
||||
for mt in m_types:
|
||||
for st in server_types:
|
||||
self.assertTrue(is_sys_or_user_meta(st, 'x-%s-%s-foo'
|
||||
% (st, mt)))
|
||||
self.assertFalse(is_sys_or_user_meta(st, 'x-%s-%s-'
|
||||
% (st, mt)))
|
||||
self.assertFalse(is_sys_or_user_meta(st, 'x-%s-%sfoo'
|
||||
% (st, mt)))
|
||||
|
||||
def test_strip_sys_meta_prefix(self):
|
||||
mt = 'sysmeta'
|
||||
for st in server_types:
|
||||
self.assertEquals(strip_sys_meta_prefix(st, 'x-%s-%s-a'
|
||||
% (st, mt)), 'a')
|
||||
|
||||
def test_strip_user_meta_prefix(self):
|
||||
mt = 'meta'
|
||||
for st in server_types:
|
||||
self.assertEquals(strip_user_meta_prefix(st, 'x-%s-%s-a'
|
||||
% (st, mt)), 'a')
|
||||
|
||||
def test_remove_items(self):
|
||||
src = {'a': 'b',
|
||||
'c': 'd'}
|
||||
test = lambda x: x == 'a'
|
||||
rem = remove_items(src, test)
|
||||
self.assertEquals(src, {'c': 'd'})
|
||||
self.assertEquals(rem, {'a': 'b'})
|
|
@ -33,6 +33,7 @@ from textwrap import dedent
|
|||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
import unittest
|
||||
import fcntl
|
||||
import shutil
|
||||
|
@ -2578,13 +2579,8 @@ class TestThreadpool(unittest.TestCase):
|
|||
result = tp.force_run_in_thread(self._capture_args, 1, 2, bert='ernie')
|
||||
self.assertEquals(result, {'args': (1, 2),
|
||||
'kwargs': {'bert': 'ernie'}})
|
||||
|
||||
caught = False
|
||||
try:
|
||||
tp.force_run_in_thread(self._raise_valueerror)
|
||||
except ValueError:
|
||||
caught = True
|
||||
self.assertTrue(caught)
|
||||
self.assertRaises(ValueError, tp.force_run_in_thread,
|
||||
self._raise_valueerror)
|
||||
|
||||
def test_run_in_thread_without_threads(self):
|
||||
# with zero threads, run_in_thread doesn't actually do so
|
||||
|
@ -2597,13 +2593,8 @@ class TestThreadpool(unittest.TestCase):
|
|||
result = tp.run_in_thread(self._capture_args, 1, 2, bert='ernie')
|
||||
self.assertEquals(result, {'args': (1, 2),
|
||||
'kwargs': {'bert': 'ernie'}})
|
||||
|
||||
caught = False
|
||||
try:
|
||||
tp.run_in_thread(self._raise_valueerror)
|
||||
except ValueError:
|
||||
caught = True
|
||||
self.assertTrue(caught)
|
||||
self.assertRaises(ValueError, tp.run_in_thread,
|
||||
self._raise_valueerror)
|
||||
|
||||
def test_force_run_in_thread_without_threads(self):
|
||||
# with zero threads, force_run_in_thread uses eventlet.tpool
|
||||
|
@ -2616,12 +2607,36 @@ class TestThreadpool(unittest.TestCase):
|
|||
result = tp.force_run_in_thread(self._capture_args, 1, 2, bert='ernie')
|
||||
self.assertEquals(result, {'args': (1, 2),
|
||||
'kwargs': {'bert': 'ernie'}})
|
||||
caught = False
|
||||
self.assertRaises(ValueError, tp.force_run_in_thread,
|
||||
self._raise_valueerror)
|
||||
|
||||
def test_preserving_stack_trace_from_thread(self):
|
||||
def gamma():
|
||||
return 1 / 0 # ZeroDivisionError
|
||||
|
||||
def beta():
|
||||
return gamma()
|
||||
|
||||
def alpha():
|
||||
return beta()
|
||||
|
||||
tp = utils.ThreadPool(1)
|
||||
try:
|
||||
tp.force_run_in_thread(self._raise_valueerror)
|
||||
except ValueError:
|
||||
caught = True
|
||||
self.assertTrue(caught)
|
||||
tp.run_in_thread(alpha)
|
||||
except ZeroDivisionError:
|
||||
# NB: format is (filename, line number, function name, text)
|
||||
tb_func = [elem[2] for elem
|
||||
in traceback.extract_tb(sys.exc_traceback)]
|
||||
else:
|
||||
self.fail("Expected ZeroDivisionError")
|
||||
|
||||
self.assertEqual(tb_func[-1], "gamma")
|
||||
self.assertEqual(tb_func[-2], "beta")
|
||||
self.assertEqual(tb_func[-3], "alpha")
|
||||
# omit the middle; what's important is that the start and end are
|
||||
# included, not the exact names of helper methods
|
||||
self.assertEqual(tb_func[1], "run_in_thread")
|
||||
self.assertEqual(tb_func[0], "test_preserving_stack_trace_from_thread")
|
||||
|
||||
|
||||
class TestAuditLocationGenerator(unittest.TestCase):
|
||||
|
|
|
@ -35,6 +35,7 @@ from eventlet import listen
|
|||
import mock
|
||||
|
||||
import swift.common.middleware.catch_errors
|
||||
import swift.common.middleware.gatekeeper
|
||||
import swift.proxy.server
|
||||
|
||||
from swift.common.swob import Request
|
||||
|
@ -44,7 +45,7 @@ from swift.common.storage_policy import StoragePolicy, \
|
|||
|
||||
from test.unit import temptree
|
||||
|
||||
from mock import patch
|
||||
from paste.deploy import loadwsgi
|
||||
|
||||
|
||||
def _fake_rings(tmpdir):
|
||||
|
@ -135,14 +136,11 @@ class TestWSGI(unittest.TestCase):
|
|||
swift_dir = TEMPDIR
|
||||
|
||||
[pipeline:main]
|
||||
pipeline = catch_errors proxy-server
|
||||
pipeline = proxy-server
|
||||
|
||||
[app:proxy-server]
|
||||
use = egg:swift#proxy
|
||||
conn_timeout = 0.2
|
||||
|
||||
[filter:catch_errors]
|
||||
use = egg:swift#catch_errors
|
||||
"""
|
||||
contents = dedent(config)
|
||||
with temptree(['proxy-server.conf']) as t:
|
||||
|
@ -152,9 +150,12 @@ class TestWSGI(unittest.TestCase):
|
|||
_fake_rings(t)
|
||||
app, conf, logger, log_name = wsgi.init_request_processor(
|
||||
conf_file, 'proxy-server')
|
||||
# verify pipeline is catch_errors -> proxy-servery
|
||||
# verify pipeline is catch_errors -> proxy-server
|
||||
expected = swift.common.middleware.catch_errors.CatchErrorMiddleware
|
||||
self.assert_(isinstance(app, expected))
|
||||
app = app.app
|
||||
expected = swift.common.middleware.gatekeeper.GatekeeperMiddleware
|
||||
self.assert_(isinstance(app, expected))
|
||||
self.assert_(isinstance(app.app, swift.proxy.server.Application))
|
||||
# config settings applied to app instance
|
||||
self.assertEquals(0.2, app.app.conn_timeout)
|
||||
|
@ -188,14 +189,15 @@ class TestWSGI(unittest.TestCase):
|
|||
}
|
||||
# strip indent from test config contents
|
||||
config_dir = dict((f, dedent(c)) for (f, c) in config_dir.items())
|
||||
with temptree(*zip(*config_dir.items())) as conf_root:
|
||||
conf_dir = os.path.join(conf_root, 'proxy-server.conf.d')
|
||||
with open(os.path.join(conf_dir, 'swift.conf'), 'w') as f:
|
||||
f.write('[DEFAULT]\nswift_dir = %s' % conf_root)
|
||||
_fake_rings(conf_root)
|
||||
app, conf, logger, log_name = wsgi.init_request_processor(
|
||||
conf_dir, 'proxy-server')
|
||||
# verify pipeline is catch_errors -> proxy-servery
|
||||
with mock.patch('swift.proxy.server.Application.modify_wsgi_pipeline'):
|
||||
with temptree(*zip(*config_dir.items())) as conf_root:
|
||||
conf_dir = os.path.join(conf_root, 'proxy-server.conf.d')
|
||||
with open(os.path.join(conf_dir, 'swift.conf'), 'w') as f:
|
||||
f.write('[DEFAULT]\nswift_dir = %s' % conf_root)
|
||||
_fake_rings(conf_root)
|
||||
app, conf, logger, log_name = wsgi.init_request_processor(
|
||||
conf_dir, 'proxy-server')
|
||||
# verify pipeline is catch_errors -> proxy-server
|
||||
expected = swift.common.middleware.catch_errors.CatchErrorMiddleware
|
||||
self.assert_(isinstance(app, expected))
|
||||
self.assert_(isinstance(app.app, swift.proxy.server.Application))
|
||||
|
@ -342,12 +344,14 @@ class TestWSGI(unittest.TestCase):
|
|||
with open(conf_file, 'w') as f:
|
||||
f.write(contents.replace('TEMPDIR', t))
|
||||
_fake_rings(t)
|
||||
with patch('swift.common.wsgi.wsgi') as _wsgi:
|
||||
with patch('swift.common.wsgi.eventlet') as _eventlet:
|
||||
conf = wsgi.appconfig(conf_file)
|
||||
logger = logging.getLogger('test')
|
||||
sock = listen(('localhost', 0))
|
||||
wsgi.run_server(conf, logger, sock)
|
||||
with mock.patch('swift.proxy.server.Application.'
|
||||
'modify_wsgi_pipeline'):
|
||||
with mock.patch('swift.common.wsgi.wsgi') as _wsgi:
|
||||
with mock.patch('swift.common.wsgi.eventlet') as _eventlet:
|
||||
conf = wsgi.appconfig(conf_file)
|
||||
logger = logging.getLogger('test')
|
||||
sock = listen(('localhost', 0))
|
||||
wsgi.run_server(conf, logger, sock)
|
||||
self.assertEquals('HTTP/1.0',
|
||||
_wsgi.HttpProtocol.default_request_version)
|
||||
self.assertEquals(30, _wsgi.WRITE_TIMEOUT)
|
||||
|
@ -388,14 +392,16 @@ class TestWSGI(unittest.TestCase):
|
|||
with open(os.path.join(conf_dir, 'swift.conf'), 'w') as f:
|
||||
f.write('[DEFAULT]\nswift_dir = %s' % conf_root)
|
||||
_fake_rings(conf_root)
|
||||
with patch('swift.common.wsgi.wsgi') as _wsgi:
|
||||
with patch('swift.common.wsgi.eventlet') as _eventlet:
|
||||
with patch.dict('os.environ', {'TZ': ''}):
|
||||
conf = wsgi.appconfig(conf_dir)
|
||||
logger = logging.getLogger('test')
|
||||
sock = listen(('localhost', 0))
|
||||
wsgi.run_server(conf, logger, sock)
|
||||
self.assert_(os.environ['TZ'] is not '')
|
||||
with mock.patch('swift.proxy.server.Application.'
|
||||
'modify_wsgi_pipeline'):
|
||||
with mock.patch('swift.common.wsgi.wsgi') as _wsgi:
|
||||
with mock.patch('swift.common.wsgi.eventlet') as _eventlet:
|
||||
with mock.patch.dict('os.environ', {'TZ': ''}):
|
||||
conf = wsgi.appconfig(conf_dir)
|
||||
logger = logging.getLogger('test')
|
||||
sock = listen(('localhost', 0))
|
||||
wsgi.run_server(conf, logger, sock)
|
||||
self.assert_(os.environ['TZ'] is not '')
|
||||
|
||||
self.assertEquals('HTTP/1.0',
|
||||
_wsgi.HttpProtocol.default_request_version)
|
||||
|
@ -547,6 +553,53 @@ class TestWSGI(unittest.TestCase):
|
|||
self.assertEqual(calls['_global_conf_callback'], 1)
|
||||
self.assertEqual(calls['_loadapp'], 1)
|
||||
|
||||
def test_run_server_success(self):
|
||||
calls = defaultdict(lambda: 0)
|
||||
|
||||
def _initrp(conf_file, app_section, *args, **kwargs):
|
||||
calls['_initrp'] += 1
|
||||
return (
|
||||
{'__file__': 'test', 'workers': 0},
|
||||
'logger',
|
||||
'log_name')
|
||||
|
||||
def _loadapp(uri, name=None, **kwargs):
|
||||
calls['_loadapp'] += 1
|
||||
|
||||
with nested(
|
||||
mock.patch.object(wsgi, '_initrp', _initrp),
|
||||
mock.patch.object(wsgi, 'get_socket'),
|
||||
mock.patch.object(wsgi, 'drop_privileges'),
|
||||
mock.patch.object(wsgi, 'loadapp', _loadapp),
|
||||
mock.patch.object(wsgi, 'capture_stdio'),
|
||||
mock.patch.object(wsgi, 'run_server')):
|
||||
rc = wsgi.run_wsgi('conf_file', 'app_section')
|
||||
self.assertEqual(calls['_initrp'], 1)
|
||||
self.assertEqual(calls['_loadapp'], 1)
|
||||
self.assertEqual(rc, 0)
|
||||
|
||||
def test_run_server_failure1(self):
|
||||
calls = defaultdict(lambda: 0)
|
||||
|
||||
def _initrp(conf_file, app_section, *args, **kwargs):
|
||||
calls['_initrp'] += 1
|
||||
raise wsgi.ConfigFileError('test exception')
|
||||
|
||||
def _loadapp(uri, name=None, **kwargs):
|
||||
calls['_loadapp'] += 1
|
||||
|
||||
with nested(
|
||||
mock.patch.object(wsgi, '_initrp', _initrp),
|
||||
mock.patch.object(wsgi, 'get_socket'),
|
||||
mock.patch.object(wsgi, 'drop_privileges'),
|
||||
mock.patch.object(wsgi, 'loadapp', _loadapp),
|
||||
mock.patch.object(wsgi, 'capture_stdio'),
|
||||
mock.patch.object(wsgi, 'run_server')):
|
||||
rc = wsgi.run_wsgi('conf_file', 'app_section')
|
||||
self.assertEqual(calls['_initrp'], 1)
|
||||
self.assertEqual(calls['_loadapp'], 0)
|
||||
self.assertEqual(rc, 1)
|
||||
|
||||
def test_pre_auth_req_with_empty_env_no_path(self):
|
||||
r = wsgi.make_pre_authed_request(
|
||||
{}, 'GET')
|
||||
|
@ -629,5 +682,324 @@ class TestWSGIContext(unittest.TestCase):
|
|||
self.assertRaises(StopIteration, iterator.next)
|
||||
|
||||
|
||||
class TestPipelineWrapper(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
config = """
|
||||
[DEFAULT]
|
||||
swift_dir = TEMPDIR
|
||||
|
||||
[pipeline:main]
|
||||
pipeline = healthcheck catch_errors tempurl proxy-server
|
||||
|
||||
[app:proxy-server]
|
||||
use = egg:swift#proxy
|
||||
conn_timeout = 0.2
|
||||
|
||||
[filter:catch_errors]
|
||||
use = egg:swift#catch_errors
|
||||
|
||||
[filter:healthcheck]
|
||||
use = egg:swift#healthcheck
|
||||
|
||||
[filter:tempurl]
|
||||
paste.filter_factory = swift.common.middleware.tempurl:filter_factory
|
||||
"""
|
||||
|
||||
contents = dedent(config)
|
||||
with temptree(['proxy-server.conf']) as t:
|
||||
conf_file = os.path.join(t, 'proxy-server.conf')
|
||||
with open(conf_file, 'w') as f:
|
||||
f.write(contents.replace('TEMPDIR', t))
|
||||
ctx = wsgi.loadcontext(loadwsgi.APP, conf_file, global_conf={})
|
||||
self.pipe = wsgi.PipelineWrapper(ctx)
|
||||
|
||||
def _entry_point_names(self):
|
||||
# Helper method to return a list of the entry point names for the
|
||||
# filters in the pipeline.
|
||||
return [c.entry_point_name for c in self.pipe.context.filter_contexts]
|
||||
|
||||
def test_startswith(self):
|
||||
self.assertTrue(self.pipe.startswith("healthcheck"))
|
||||
self.assertFalse(self.pipe.startswith("tempurl"))
|
||||
|
||||
def test_startswith_no_filters(self):
|
||||
config = """
|
||||
[DEFAULT]
|
||||
swift_dir = TEMPDIR
|
||||
|
||||
[pipeline:main]
|
||||
pipeline = proxy-server
|
||||
|
||||
[app:proxy-server]
|
||||
use = egg:swift#proxy
|
||||
conn_timeout = 0.2
|
||||
"""
|
||||
contents = dedent(config)
|
||||
with temptree(['proxy-server.conf']) as t:
|
||||
conf_file = os.path.join(t, 'proxy-server.conf')
|
||||
with open(conf_file, 'w') as f:
|
||||
f.write(contents.replace('TEMPDIR', t))
|
||||
ctx = wsgi.loadcontext(loadwsgi.APP, conf_file, global_conf={})
|
||||
pipe = wsgi.PipelineWrapper(ctx)
|
||||
self.assertTrue(pipe.startswith('proxy'))
|
||||
|
||||
def test_insert_filter(self):
|
||||
original_modules = ['healthcheck', 'catch_errors', None]
|
||||
self.assertEqual(self._entry_point_names(), original_modules)
|
||||
|
||||
self.pipe.insert_filter(self.pipe.create_filter('catch_errors'))
|
||||
expected_modules = ['catch_errors', 'healthcheck',
|
||||
'catch_errors', None]
|
||||
self.assertEqual(self._entry_point_names(), expected_modules)
|
||||
|
||||
def test_str(self):
|
||||
self.assertEqual(
|
||||
str(self.pipe),
|
||||
"healthcheck catch_errors " +
|
||||
"swift.common.middleware.tempurl:filter_factory proxy")
|
||||
|
||||
def test_str_unknown_filter(self):
|
||||
self.pipe.context.filter_contexts[0].entry_point_name = None
|
||||
self.pipe.context.filter_contexts[0].object = 'mysterious'
|
||||
self.assertEqual(
|
||||
str(self.pipe),
|
||||
"<unknown context> catch_errors " +
|
||||
"swift.common.middleware.tempurl:filter_factory proxy")
|
||||
|
||||
|
||||
class TestPipelineModification(unittest.TestCase):
|
||||
def pipeline_modules(self, app):
|
||||
# This is rather brittle; it'll break if a middleware stores its app
|
||||
# anywhere other than an attribute named "app", but it works for now.
|
||||
pipe = []
|
||||
for _ in xrange(1000):
|
||||
pipe.append(app.__class__.__module__)
|
||||
if not hasattr(app, 'app'):
|
||||
break
|
||||
app = app.app
|
||||
return pipe
|
||||
|
||||
def test_load_app(self):
|
||||
config = """
|
||||
[DEFAULT]
|
||||
swift_dir = TEMPDIR
|
||||
|
||||
[pipeline:main]
|
||||
pipeline = healthcheck proxy-server
|
||||
|
||||
[app:proxy-server]
|
||||
use = egg:swift#proxy
|
||||
conn_timeout = 0.2
|
||||
|
||||
[filter:catch_errors]
|
||||
use = egg:swift#catch_errors
|
||||
|
||||
[filter:healthcheck]
|
||||
use = egg:swift#healthcheck
|
||||
"""
|
||||
|
||||
def modify_func(app, pipe):
|
||||
new = pipe.create_filter('catch_errors')
|
||||
pipe.insert_filter(new)
|
||||
|
||||
contents = dedent(config)
|
||||
with temptree(['proxy-server.conf']) as t:
|
||||
conf_file = os.path.join(t, 'proxy-server.conf')
|
||||
with open(conf_file, 'w') as f:
|
||||
f.write(contents.replace('TEMPDIR', t))
|
||||
_fake_rings(t)
|
||||
with mock.patch(
|
||||
'swift.proxy.server.Application.modify_wsgi_pipeline',
|
||||
modify_func):
|
||||
app = wsgi.loadapp(conf_file, global_conf={})
|
||||
exp = swift.common.middleware.catch_errors.CatchErrorMiddleware
|
||||
self.assertTrue(isinstance(app, exp), app)
|
||||
exp = swift.common.middleware.healthcheck.HealthCheckMiddleware
|
||||
self.assertTrue(isinstance(app.app, exp), app.app)
|
||||
exp = swift.proxy.server.Application
|
||||
self.assertTrue(isinstance(app.app.app, exp), app.app.app)
|
||||
|
||||
def test_proxy_unmodified_wsgi_pipeline(self):
|
||||
# Make sure things are sane even when we modify nothing
|
||||
config = """
|
||||
[DEFAULT]
|
||||
swift_dir = TEMPDIR
|
||||
|
||||
[pipeline:main]
|
||||
pipeline = catch_errors gatekeeper proxy-server
|
||||
|
||||
[app:proxy-server]
|
||||
use = egg:swift#proxy
|
||||
conn_timeout = 0.2
|
||||
|
||||
[filter:catch_errors]
|
||||
use = egg:swift#catch_errors
|
||||
|
||||
[filter:gatekeeper]
|
||||
use = egg:swift#gatekeeper
|
||||
"""
|
||||
|
||||
contents = dedent(config)
|
||||
with temptree(['proxy-server.conf']) as t:
|
||||
conf_file = os.path.join(t, 'proxy-server.conf')
|
||||
with open(conf_file, 'w') as f:
|
||||
f.write(contents.replace('TEMPDIR', t))
|
||||
_fake_rings(t)
|
||||
app = wsgi.loadapp(conf_file, global_conf={})
|
||||
|
||||
self.assertEqual(self.pipeline_modules(app),
|
||||
['swift.common.middleware.catch_errors',
|
||||
'swift.common.middleware.gatekeeper',
|
||||
'swift.proxy.server'])
|
||||
|
||||
def test_proxy_modify_wsgi_pipeline(self):
|
||||
config = """
|
||||
[DEFAULT]
|
||||
swift_dir = TEMPDIR
|
||||
|
||||
[pipeline:main]
|
||||
pipeline = healthcheck proxy-server
|
||||
|
||||
[app:proxy-server]
|
||||
use = egg:swift#proxy
|
||||
conn_timeout = 0.2
|
||||
|
||||
[filter:healthcheck]
|
||||
use = egg:swift#healthcheck
|
||||
"""
|
||||
|
||||
contents = dedent(config)
|
||||
with temptree(['proxy-server.conf']) as t:
|
||||
conf_file = os.path.join(t, 'proxy-server.conf')
|
||||
with open(conf_file, 'w') as f:
|
||||
f.write(contents.replace('TEMPDIR', t))
|
||||
_fake_rings(t)
|
||||
app = wsgi.loadapp(conf_file, global_conf={})
|
||||
|
||||
self.assertEqual(self.pipeline_modules(app),
|
||||
['swift.common.middleware.catch_errors',
|
||||
'swift.common.middleware.gatekeeper',
|
||||
'swift.common.middleware.healthcheck',
|
||||
'swift.proxy.server'])
|
||||
|
||||
def test_proxy_modify_wsgi_pipeline_ordering(self):
|
||||
config = """
|
||||
[DEFAULT]
|
||||
swift_dir = TEMPDIR
|
||||
|
||||
[pipeline:main]
|
||||
pipeline = healthcheck proxy-logging bulk tempurl proxy-server
|
||||
|
||||
[app:proxy-server]
|
||||
use = egg:swift#proxy
|
||||
conn_timeout = 0.2
|
||||
|
||||
[filter:healthcheck]
|
||||
use = egg:swift#healthcheck
|
||||
|
||||
[filter:proxy-logging]
|
||||
use = egg:swift#proxy_logging
|
||||
|
||||
[filter:bulk]
|
||||
use = egg:swift#bulk
|
||||
|
||||
[filter:tempurl]
|
||||
use = egg:swift#tempurl
|
||||
"""
|
||||
|
||||
new_req_filters = [
|
||||
# not in pipeline, no afters
|
||||
{'name': 'catch_errors'},
|
||||
# already in pipeline
|
||||
{'name': 'proxy_logging',
|
||||
'after': ['catch_errors']},
|
||||
# not in pipeline, comes after more than one thing
|
||||
{'name': 'container_quotas',
|
||||
'after': ['catch_errors', 'bulk']}]
|
||||
|
||||
contents = dedent(config)
|
||||
with temptree(['proxy-server.conf']) as t:
|
||||
conf_file = os.path.join(t, 'proxy-server.conf')
|
||||
with open(conf_file, 'w') as f:
|
||||
f.write(contents.replace('TEMPDIR', t))
|
||||
_fake_rings(t)
|
||||
with mock.patch.object(swift.proxy.server, 'required_filters',
|
||||
new_req_filters):
|
||||
app = wsgi.loadapp(conf_file, global_conf={})
|
||||
|
||||
self.assertEqual(self.pipeline_modules(app), [
|
||||
'swift.common.middleware.catch_errors',
|
||||
'swift.common.middleware.healthcheck',
|
||||
'swift.common.middleware.proxy_logging',
|
||||
'swift.common.middleware.bulk',
|
||||
'swift.common.middleware.container_quotas',
|
||||
'swift.common.middleware.tempurl',
|
||||
'swift.proxy.server'])
|
||||
|
||||
def _proxy_modify_wsgi_pipeline(self, pipe):
|
||||
config = """
|
||||
[DEFAULT]
|
||||
swift_dir = TEMPDIR
|
||||
|
||||
[pipeline:main]
|
||||
pipeline = %s
|
||||
|
||||
[app:proxy-server]
|
||||
use = egg:swift#proxy
|
||||
conn_timeout = 0.2
|
||||
|
||||
[filter:healthcheck]
|
||||
use = egg:swift#healthcheck
|
||||
|
||||
[filter:catch_errors]
|
||||
use = egg:swift#catch_errors
|
||||
|
||||
[filter:gatekeeper]
|
||||
use = egg:swift#gatekeeper
|
||||
"""
|
||||
config = config % (pipe,)
|
||||
contents = dedent(config)
|
||||
with temptree(['proxy-server.conf']) as t:
|
||||
conf_file = os.path.join(t, 'proxy-server.conf')
|
||||
with open(conf_file, 'w') as f:
|
||||
f.write(contents.replace('TEMPDIR', t))
|
||||
_fake_rings(t)
|
||||
app = wsgi.loadapp(conf_file, global_conf={})
|
||||
return app
|
||||
|
||||
def test_gatekeeper_insertion_catch_errors_configured_at_start(self):
|
||||
# catch_errors is configured at start, gatekeeper is not configured,
|
||||
# so gatekeeper should be inserted just after catch_errors
|
||||
pipe = 'catch_errors healthcheck proxy-server'
|
||||
app = self._proxy_modify_wsgi_pipeline(pipe)
|
||||
self.assertEqual(self.pipeline_modules(app), [
|
||||
'swift.common.middleware.catch_errors',
|
||||
'swift.common.middleware.gatekeeper',
|
||||
'swift.common.middleware.healthcheck',
|
||||
'swift.proxy.server'])
|
||||
|
||||
def test_gatekeeper_insertion_catch_errors_configured_not_at_start(self):
|
||||
# catch_errors is configured, gatekeeper is not configured, so
|
||||
# gatekeeper should be inserted at start of pipeline
|
||||
pipe = 'healthcheck catch_errors proxy-server'
|
||||
app = self._proxy_modify_wsgi_pipeline(pipe)
|
||||
self.assertEqual(self.pipeline_modules(app), [
|
||||
'swift.common.middleware.gatekeeper',
|
||||
'swift.common.middleware.healthcheck',
|
||||
'swift.common.middleware.catch_errors',
|
||||
'swift.proxy.server'])
|
||||
|
||||
def test_catch_errors_gatekeeper_configured_not_at_start(self):
|
||||
# catch_errors is configured, gatekeeper is configured, so
|
||||
# no change should be made to pipeline
|
||||
pipe = 'healthcheck catch_errors gatekeeper proxy-server'
|
||||
app = self._proxy_modify_wsgi_pipeline(pipe)
|
||||
self.assertEqual(self.pipeline_modules(app), [
|
||||
'swift.common.middleware.healthcheck',
|
||||
'swift.common.middleware.catch_errors',
|
||||
'swift.common.middleware.gatekeeper',
|
||||
'swift.proxy.server'])
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -33,6 +33,7 @@ from swift.common.utils import normalize_timestamp, mkdirs, public, replication
|
|||
from test.unit import fake_http_connect
|
||||
from swift.common.storage_policy import StoragePolicy, \
|
||||
StoragePolicyCollection, POLICY_INDEX
|
||||
from swift.common.request_helpers import get_sys_meta_prefix
|
||||
|
||||
|
||||
@contextmanager
|
||||
|
@ -531,6 +532,64 @@ class TestContainerController(unittest.TestCase):
|
|||
self.assertEquals(resp.status_int, 204)
|
||||
self.assert_('x-container-meta-test' not in resp.headers)
|
||||
|
||||
def test_PUT_GET_sys_metadata(self):
|
||||
prefix = get_sys_meta_prefix('container')
|
||||
key = '%sTest' % prefix
|
||||
key2 = '%sTest2' % prefix
|
||||
# Set metadata header
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': normalize_timestamp(1),
|
||||
key: 'Value'})
|
||||
resp = self.controller.PUT(req)
|
||||
self.assertEquals(resp.status_int, 201)
|
||||
req = Request.blank('/sda1/p/a/c')
|
||||
resp = self.controller.GET(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
self.assertEquals(resp.headers.get(key.lower()), 'Value')
|
||||
# Set another metadata header, ensuring old one doesn't disappear
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': normalize_timestamp(1),
|
||||
key2: 'Value2'})
|
||||
resp = self.controller.POST(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
req = Request.blank('/sda1/p/a/c')
|
||||
resp = self.controller.GET(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
self.assertEquals(resp.headers.get(key.lower()), 'Value')
|
||||
self.assertEquals(resp.headers.get(key2.lower()), 'Value2')
|
||||
# Update metadata header
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': normalize_timestamp(3),
|
||||
key: 'New Value'})
|
||||
resp = self.controller.PUT(req)
|
||||
self.assertEquals(resp.status_int, 202)
|
||||
req = Request.blank('/sda1/p/a/c')
|
||||
resp = self.controller.GET(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
self.assertEquals(resp.headers.get(key.lower()),
|
||||
'New Value')
|
||||
# Send old update to metadata header
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': normalize_timestamp(2),
|
||||
key: 'Old Value'})
|
||||
resp = self.controller.PUT(req)
|
||||
self.assertEquals(resp.status_int, 202)
|
||||
req = Request.blank('/sda1/p/a/c')
|
||||
resp = self.controller.GET(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
self.assertEquals(resp.headers.get(key.lower()),
|
||||
'New Value')
|
||||
# Remove metadata header (by setting it to empty)
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': normalize_timestamp(4),
|
||||
key: ''})
|
||||
resp = self.controller.PUT(req)
|
||||
self.assertEquals(resp.status_int, 202)
|
||||
req = Request.blank('/sda1/p/a/c')
|
||||
resp = self.controller.GET(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
self.assert_(key.lower() not in resp.headers)
|
||||
|
||||
def test_PUT_invalid_partition(self):
|
||||
req = Request.blank('/sda1/./a/c', environ={'REQUEST_METHOD': 'PUT',
|
||||
'HTTP_X_TIMESTAMP': '1'})
|
||||
|
@ -608,6 +667,56 @@ class TestContainerController(unittest.TestCase):
|
|||
self.assertEquals(resp.status_int, 204)
|
||||
self.assert_('x-container-meta-test' not in resp.headers)
|
||||
|
||||
def test_POST_HEAD_sys_metadata(self):
|
||||
prefix = get_sys_meta_prefix('container')
|
||||
key = '%sTest' % prefix
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': normalize_timestamp(1)})
|
||||
resp = self.controller.PUT(req)
|
||||
self.assertEquals(resp.status_int, 201)
|
||||
# Set metadata header
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': normalize_timestamp(1),
|
||||
key: 'Value'})
|
||||
resp = self.controller.POST(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
|
||||
resp = self.controller.HEAD(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
self.assertEquals(resp.headers.get(key.lower()), 'Value')
|
||||
# Update metadata header
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': normalize_timestamp(3),
|
||||
key: 'New Value'})
|
||||
resp = self.controller.POST(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
|
||||
resp = self.controller.HEAD(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
self.assertEquals(resp.headers.get(key.lower()),
|
||||
'New Value')
|
||||
# Send old update to metadata header
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': normalize_timestamp(2),
|
||||
key: 'Old Value'})
|
||||
resp = self.controller.POST(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
|
||||
resp = self.controller.HEAD(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
self.assertEquals(resp.headers.get(key.lower()),
|
||||
'New Value')
|
||||
# Remove metadata header (by setting it to empty)
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': normalize_timestamp(4),
|
||||
key: ''})
|
||||
resp = self.controller.POST(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
|
||||
resp = self.controller.HEAD(req)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
self.assert_(key.lower() not in resp.headers)
|
||||
|
||||
def test_POST_invalid_partition(self):
|
||||
req = Request.blank('/sda1/./a/c', environ={'REQUEST_METHOD': 'POST',
|
||||
'HTTP_X_TIMESTAMP': '1'})
|
||||
|
|
|
@ -22,7 +22,7 @@ import mock
|
|||
from test.unit import FakeLogger
|
||||
from swift.container import sync
|
||||
from swift.common import utils
|
||||
from swiftclient import ClientException
|
||||
from swift.common.exceptions import ClientException
|
||||
|
||||
|
||||
utils.HASH_PATH_SUFFIX = 'endcap'
|
||||
|
|
|
@ -1,5 +1,19 @@
|
|||
#!/usr/bin/env python
|
||||
#-*- coding:utf-8 -*-
|
||||
# Copyright (c) 2013 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import unittest
|
||||
|
|
|
@ -40,7 +40,8 @@ from swift.common.utils import hash_path, mkdirs, normalize_timestamp
|
|||
from swift.common import ring
|
||||
from swift.common.exceptions import DiskFileNotExist, DiskFileQuarantined, \
|
||||
DiskFileDeviceUnavailable, DiskFileDeleted, DiskFileNotOpen, \
|
||||
DiskFileError, ReplicationLockTimeout
|
||||
DiskFileError, ReplicationLockTimeout, PathNotDir, DiskFileCollision, \
|
||||
DiskFileExpired, SwiftException, DiskFileNoSpace
|
||||
|
||||
|
||||
def _create_test_ring(path):
|
||||
|
@ -97,6 +98,31 @@ class TestDiskFileModuleMethods(unittest.TestCase):
|
|||
def tearDown(self):
|
||||
rmtree(self.testdir, ignore_errors=1)
|
||||
|
||||
def test_quarantine_renamer(self):
|
||||
# we use this for convenience, not really about a diskfile layout
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
|
||||
mkdirs(df._datadir)
|
||||
exp_dir = os.path.join(self.devices, 'quarantined', 'objects',
|
||||
os.path.basename(df._datadir))
|
||||
qbit = os.path.join(df._datadir, 'qbit')
|
||||
with open(qbit, 'w') as f:
|
||||
f.write('abc')
|
||||
to_dir = diskfile.quarantine_renamer(self.devices, qbit)
|
||||
self.assertEqual(to_dir, exp_dir)
|
||||
self.assertRaises(OSError, diskfile.quarantine_renamer, self.devices,
|
||||
qbit)
|
||||
|
||||
def test_hash_suffix_enoent(self):
|
||||
self.assertRaises(PathNotDir, diskfile.hash_suffix,
|
||||
os.path.join(self.testdir, "doesnotexist"), 101)
|
||||
|
||||
def test_hash_suffix_oserror(self):
|
||||
mocked_os_listdir = mock.Mock(
|
||||
side_effect=OSError(errno.EACCES, os.strerror(errno.EACCES)))
|
||||
with mock.patch("os.listdir", mocked_os_listdir):
|
||||
self.assertRaises(OSError, diskfile.hash_suffix,
|
||||
os.path.join(self.testdir, "doesnotexist"), 101)
|
||||
|
||||
def test_hash_suffix_hash_dir_is_file_quarantine(self):
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
|
||||
mkdirs(os.path.dirname(df._datadir))
|
||||
|
@ -136,6 +162,35 @@ class TestDiskFileModuleMethods(unittest.TestCase):
|
|||
diskfile.hash_suffix(whole_path_from, 99)
|
||||
self.assertEquals(len(os.listdir(self.parts['0'])), 0)
|
||||
|
||||
def test_hash_suffix_oserror_on_hcl(self):
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
|
||||
mkdirs(df._datadir)
|
||||
f = open(
|
||||
os.path.join(df._datadir,
|
||||
normalize_timestamp(time() - 100) + '.ts'),
|
||||
'wb')
|
||||
f.write('1234567890')
|
||||
f.close()
|
||||
ohash = hash_path('a', 'c', 'o')
|
||||
data_dir = ohash[-3:]
|
||||
whole_path_from = os.path.join(self.objects, '0', data_dir)
|
||||
state = [0]
|
||||
orig_os_listdir = os.listdir
|
||||
|
||||
def mock_os_listdir(*args, **kwargs):
|
||||
# We want the first call to os.listdir() to succeed, which is the
|
||||
# one directly from hash_suffix() itself, but then we want to fail
|
||||
# the next call to os.listdir() which is from
|
||||
# hash_cleanup_listdir()
|
||||
if state[0] == 1:
|
||||
raise OSError(errno.EACCES, os.strerror(errno.EACCES))
|
||||
state[0] = 1
|
||||
return orig_os_listdir(*args, **kwargs)
|
||||
|
||||
with mock.patch('os.listdir', mock_os_listdir):
|
||||
self.assertRaises(OSError, diskfile.hash_suffix, whole_path_from,
|
||||
101)
|
||||
|
||||
def test_hash_suffix_multi_file_one(self):
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
|
||||
mkdirs(df._datadir)
|
||||
|
@ -211,6 +266,24 @@ class TestDiskFileModuleMethods(unittest.TestCase):
|
|||
diskfile.invalidate_hash(whole_path_from)
|
||||
assertFileData(hashes_file, check_pickle_data)
|
||||
|
||||
def test_invalidate_hash_bad_pickle(self):
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
|
||||
mkdirs(df._datadir)
|
||||
ohash = hash_path('a', 'c', 'o')
|
||||
data_dir = ohash[-3:]
|
||||
whole_path_from = os.path.join(self.objects, '0', data_dir)
|
||||
hashes_file = os.path.join(self.objects, '0',
|
||||
diskfile.HASH_FILE)
|
||||
for data_hash in [{data_dir: None}, {data_dir: 'abcdefg'}]:
|
||||
with open(hashes_file, 'wb') as fp:
|
||||
fp.write('bad hash data')
|
||||
try:
|
||||
diskfile.invalidate_hash(whole_path_from)
|
||||
except Exception as err:
|
||||
self.fail("Unexpected exception raised: %s" % err)
|
||||
else:
|
||||
pass
|
||||
|
||||
def test_get_hashes(self):
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
|
||||
mkdirs(df._datadir)
|
||||
|
@ -261,6 +334,39 @@ class TestDiskFileModuleMethods(unittest.TestCase):
|
|||
part, recalculate=['a83'])
|
||||
self.assertEquals(i[0], 2)
|
||||
|
||||
def test_get_hashes_unmodified_norecalc(self):
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
|
||||
mkdirs(df._datadir)
|
||||
with open(
|
||||
os.path.join(df._datadir,
|
||||
normalize_timestamp(time()) + '.ts'),
|
||||
'wb') as f:
|
||||
f.write('1234567890')
|
||||
part = os.path.join(self.objects, '0')
|
||||
hashed, hashes_0 = diskfile.get_hashes(part)
|
||||
self.assertEqual(hashed, 1)
|
||||
self.assertTrue('a83' in hashes_0)
|
||||
hashed, hashes_1 = diskfile.get_hashes(part)
|
||||
self.assertEqual(hashed, 0)
|
||||
self.assertTrue('a83' in hashes_0)
|
||||
self.assertEqual(hashes_1, hashes_0)
|
||||
|
||||
def test_get_hashes_hash_suffix_error(self):
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
|
||||
mkdirs(df._datadir)
|
||||
with open(
|
||||
os.path.join(df._datadir,
|
||||
normalize_timestamp(time()) + '.ts'),
|
||||
'wb') as f:
|
||||
f.write('1234567890')
|
||||
part = os.path.join(self.objects, '0')
|
||||
mocked_hash_suffix = mock.MagicMock(
|
||||
side_effect=OSError(errno.EACCES, os.strerror(errno.EACCES)))
|
||||
with mock.patch('swift.obj.diskfile.hash_suffix', mocked_hash_suffix):
|
||||
hashed, hashes = diskfile.get_hashes(part)
|
||||
self.assertEqual(hashed, 0)
|
||||
self.assertEqual(hashes, {'a83': None})
|
||||
|
||||
def test_get_hashes_unmodified_and_zero_bytes(self):
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
|
||||
mkdirs(df._datadir)
|
||||
|
@ -303,8 +409,8 @@ class TestDiskFileModuleMethods(unittest.TestCase):
|
|||
part, recalculate=['a83'])
|
||||
self.assertEquals(i[0], 3)
|
||||
|
||||
def test_hash_cleanup_listdir(self):
|
||||
file_list = []
|
||||
def check_hash_cleanup_listdir(self, input_files, output_files):
|
||||
file_list = list(input_files)
|
||||
|
||||
def mock_listdir(path):
|
||||
return list(file_list)
|
||||
|
@ -313,35 +419,96 @@ class TestDiskFileModuleMethods(unittest.TestCase):
|
|||
file_list.remove(os.path.basename(path))
|
||||
|
||||
with unit_mock({'os.listdir': mock_listdir, 'os.unlink': mock_unlink}):
|
||||
# purge .data if there's a newer .ts
|
||||
file1 = normalize_timestamp(time()) + '.data'
|
||||
file2 = normalize_timestamp(time() + 1) + '.ts'
|
||||
file_list = [file1, file2]
|
||||
self.assertEquals(diskfile.hash_cleanup_listdir('/whatever'),
|
||||
[file2])
|
||||
output_files)
|
||||
|
||||
# purge .ts if there's a newer .data
|
||||
file1 = normalize_timestamp(time()) + '.ts'
|
||||
file2 = normalize_timestamp(time() + 1) + '.data'
|
||||
file_list = [file1, file2]
|
||||
self.assertEquals(diskfile.hash_cleanup_listdir('/whatever'),
|
||||
[file2])
|
||||
def test_hash_cleanup_listdir_purge_data_newer_ts(self):
|
||||
# purge .data if there's a newer .ts
|
||||
file1 = normalize_timestamp(time()) + '.data'
|
||||
file2 = normalize_timestamp(time() + 1) + '.ts'
|
||||
file_list = [file1, file2]
|
||||
self.check_hash_cleanup_listdir(file_list, [file2])
|
||||
|
||||
# keep .meta and .data if meta newer than data
|
||||
file1 = normalize_timestamp(time()) + '.ts'
|
||||
file2 = normalize_timestamp(time() + 1) + '.data'
|
||||
file3 = normalize_timestamp(time() + 2) + '.meta'
|
||||
file_list = [file1, file2, file3]
|
||||
self.assertEquals(diskfile.hash_cleanup_listdir('/whatever'),
|
||||
[file3, file2])
|
||||
def test_hash_cleanup_listdir_purge_ts_newer_data(self):
|
||||
# purge .ts if there's a newer .data
|
||||
file1 = normalize_timestamp(time()) + '.ts'
|
||||
file2 = normalize_timestamp(time() + 1) + '.data'
|
||||
file_list = [file1, file2]
|
||||
self.check_hash_cleanup_listdir(file_list, [file2])
|
||||
|
||||
# keep only latest of multiple .ts files
|
||||
file1 = normalize_timestamp(time()) + '.ts'
|
||||
file2 = normalize_timestamp(time() + 1) + '.ts'
|
||||
file3 = normalize_timestamp(time() + 2) + '.ts'
|
||||
file_list = [file1, file2, file3]
|
||||
self.assertEquals(diskfile.hash_cleanup_listdir('/whatever'),
|
||||
[file3])
|
||||
def test_hash_cleanup_listdir_keep_meta_data_purge_ts(self):
|
||||
# keep .meta and .data if meta newer than data and purge .ts
|
||||
file1 = normalize_timestamp(time()) + '.ts'
|
||||
file2 = normalize_timestamp(time() + 1) + '.data'
|
||||
file3 = normalize_timestamp(time() + 2) + '.meta'
|
||||
file_list = [file1, file2, file3]
|
||||
self.check_hash_cleanup_listdir(file_list, [file3, file2])
|
||||
|
||||
def test_hash_cleanup_listdir_keep_one_ts(self):
|
||||
# keep only latest of multiple .ts files
|
||||
file1 = normalize_timestamp(time()) + '.ts'
|
||||
file2 = normalize_timestamp(time() + 1) + '.ts'
|
||||
file3 = normalize_timestamp(time() + 2) + '.ts'
|
||||
file_list = [file1, file2, file3]
|
||||
self.check_hash_cleanup_listdir(file_list, [file3])
|
||||
|
||||
def test_hash_cleanup_listdir_keep_one_data(self):
|
||||
# keep only latest of multiple .data files
|
||||
file1 = normalize_timestamp(time()) + '.data'
|
||||
file2 = normalize_timestamp(time() + 1) + '.data'
|
||||
file3 = normalize_timestamp(time() + 2) + '.data'
|
||||
file_list = [file1, file2, file3]
|
||||
self.check_hash_cleanup_listdir(file_list, [file3])
|
||||
|
||||
def test_hash_cleanup_listdir_keep_one_meta(self):
|
||||
# keep only latest of multiple .meta files
|
||||
file1 = normalize_timestamp(time()) + '.data'
|
||||
file2 = normalize_timestamp(time() + 1) + '.meta'
|
||||
file3 = normalize_timestamp(time() + 2) + '.meta'
|
||||
file_list = [file1, file2, file3]
|
||||
self.check_hash_cleanup_listdir(file_list, [file3, file1])
|
||||
|
||||
def test_hash_cleanup_listdir_ignore_orphaned_ts(self):
|
||||
# A more recent orphaned .meta file will prevent old .ts files
|
||||
# from being cleaned up otherwise
|
||||
file1 = normalize_timestamp(time()) + '.ts'
|
||||
file2 = normalize_timestamp(time() + 1) + '.ts'
|
||||
file3 = normalize_timestamp(time() + 2) + '.meta'
|
||||
file_list = [file1, file2, file3]
|
||||
self.check_hash_cleanup_listdir(file_list, [file3, file2])
|
||||
|
||||
def test_hash_cleanup_listdir_purge_old_data_only(self):
|
||||
# Oldest .data will be purge, .meta and .ts won't be touched
|
||||
file1 = normalize_timestamp(time()) + '.data'
|
||||
file2 = normalize_timestamp(time() + 1) + '.ts'
|
||||
file3 = normalize_timestamp(time() + 2) + '.meta'
|
||||
file_list = [file1, file2, file3]
|
||||
self.check_hash_cleanup_listdir(file_list, [file3, file2])
|
||||
|
||||
def test_hash_cleanup_listdir_purge_old_ts(self):
|
||||
# A single old .ts file will be removed
|
||||
file1 = normalize_timestamp(time() - (diskfile.ONE_WEEK + 1)) + '.ts'
|
||||
file_list = [file1]
|
||||
self.check_hash_cleanup_listdir(file_list, [])
|
||||
|
||||
def test_hash_cleanup_listdir_meta_keeps_old_ts(self):
|
||||
# An orphaned .meta will not clean up a very old .ts
|
||||
file1 = normalize_timestamp(time() - (diskfile.ONE_WEEK + 1)) + '.ts'
|
||||
file2 = normalize_timestamp(time() + 2) + '.meta'
|
||||
file_list = [file1, file2]
|
||||
self.check_hash_cleanup_listdir(file_list, [file2, file1])
|
||||
|
||||
def test_hash_cleanup_listdir_keep_single_old_data(self):
|
||||
# A single old .data file will not be removed
|
||||
file1 = normalize_timestamp(time() - (diskfile.ONE_WEEK + 1)) + '.data'
|
||||
file_list = [file1]
|
||||
self.check_hash_cleanup_listdir(file_list, [file1])
|
||||
|
||||
def test_hash_cleanup_listdir_keep_single_old_meta(self):
|
||||
# A single old .meta file will not be removed
|
||||
file1 = normalize_timestamp(time() - (diskfile.ONE_WEEK + 1)) + '.meta'
|
||||
file_list = [file1]
|
||||
self.check_hash_cleanup_listdir(file_list, [file1])
|
||||
|
||||
|
||||
class TestObjectAuditLocationGenerator(unittest.TestCase):
|
||||
|
@ -355,6 +522,10 @@ class TestObjectAuditLocationGenerator(unittest.TestCase):
|
|||
with open(path, 'w'):
|
||||
pass
|
||||
|
||||
def test_audit_location_class(self):
|
||||
al = diskfile.AuditLocation('abc', '123', '_-_')
|
||||
self.assertEqual(str(al), 'abc')
|
||||
|
||||
def test_finding_of_hashdirs(self):
|
||||
with temptree([]) as tmpdir:
|
||||
# the good
|
||||
|
@ -435,6 +606,16 @@ class TestObjectAuditLocationGenerator(unittest.TestCase):
|
|||
"ec2871fe724411f91787462f97d30df3"),
|
||||
"sdp", "2607")])
|
||||
|
||||
# Do it again, this time with a logger.
|
||||
ml = mock.MagicMock()
|
||||
locations = [
|
||||
(loc.path, loc.device, loc.partition)
|
||||
for loc in diskfile.object_audit_location_generator(
|
||||
devices=tmpdir, mount_check=True, logger=ml)]
|
||||
ml.debug.assert_called_once_with(
|
||||
'Skipping %s as it is not mounted',
|
||||
'sdq')
|
||||
|
||||
def test_only_catch_expected_errors(self):
|
||||
# Crazy exceptions should still escape object_audit_location_generator
|
||||
# so that errors get logged and a human can see what's going wrong;
|
||||
|
@ -470,7 +651,9 @@ class TestObjectAuditLocationGenerator(unittest.TestCase):
|
|||
class TestDiskFileManager(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.testdir = os.path.join(mkdtemp(), 'tmp_test_obj_server_DiskFile')
|
||||
self.tmpdir = mkdtemp()
|
||||
self.testdir = os.path.join(
|
||||
self.tmpdir, 'tmp_test_obj_server_DiskFile')
|
||||
mkdirs(os.path.join(self.testdir, 'sda1', 'tmp'))
|
||||
mkdirs(os.path.join(self.testdir, 'sda2', 'tmp'))
|
||||
self._orig_tpool_exc = tpool.execute
|
||||
|
@ -479,6 +662,47 @@ class TestDiskFileManager(unittest.TestCase):
|
|||
keep_cache_size=2 * 1024)
|
||||
self.df_mgr = diskfile.DiskFileManager(self.conf, FakeLogger())
|
||||
|
||||
def tearDown(self):
|
||||
rmtree(self.tmpdir, ignore_errors=1)
|
||||
|
||||
def test_construct_dev_path(self):
|
||||
res_path = self.df_mgr.construct_dev_path('abc')
|
||||
self.assertEqual(os.path.join(self.df_mgr.devices, 'abc'), res_path)
|
||||
|
||||
def test_pickle_async_update(self):
|
||||
self.df_mgr.logger.increment = mock.MagicMock()
|
||||
ts = normalize_timestamp(10000.0)
|
||||
with mock.patch('swift.obj.diskfile.write_pickle') as wp:
|
||||
self.df_mgr.pickle_async_update('sda1', 'a', 'c', 'o',
|
||||
dict(a=1, b=2), ts)
|
||||
dp = self.df_mgr.construct_dev_path('sda1')
|
||||
ohash = diskfile.hash_path('a', 'c', 'o')
|
||||
wp.assert_called_with({'a': 1, 'b': 2},
|
||||
os.path.join(dp, diskfile.ASYNCDIR,
|
||||
ohash[-3:], ohash + '-' + ts),
|
||||
os.path.join(dp, 'tmp'))
|
||||
self.df_mgr.logger.increment.assert_called_with('async_pendings')
|
||||
|
||||
def test_object_audit_location_generator(self):
|
||||
locations = list(self.df_mgr.object_audit_location_generator())
|
||||
self.assertEqual(locations, [])
|
||||
|
||||
def test_get_hashes_bad_dev(self):
|
||||
self.df_mgr.mount_check = True
|
||||
with mock.patch('swift.obj.diskfile.check_mount',
|
||||
mock.MagicMock(side_effect=[False])):
|
||||
self.assertRaises(DiskFileDeviceUnavailable,
|
||||
self.df_mgr.get_hashes, 'sdb1', '0', '123',
|
||||
'objects')
|
||||
|
||||
def test_get_hashes_w_nothing(self):
|
||||
hashes = self.df_mgr.get_hashes('sda1', '0', '123', 'objects')
|
||||
self.assertEqual(hashes, {})
|
||||
# get_hashes creates the partition path, so call again for code
|
||||
# path coverage, ensuring the result is unchanged
|
||||
hashes = self.df_mgr.get_hashes('sda1', '0', '123', 'objects')
|
||||
self.assertEqual(hashes, {})
|
||||
|
||||
def test_replication_lock_on(self):
|
||||
# Double check settings
|
||||
self.df_mgr.replication_one_per_device = True
|
||||
|
@ -538,17 +762,19 @@ class TestDiskFile(unittest.TestCase):
|
|||
|
||||
def setUp(self):
|
||||
"""Set up for testing swift.obj.diskfile"""
|
||||
self.testdir = os.path.join(mkdtemp(), 'tmp_test_obj_server_DiskFile')
|
||||
self.tmpdir = mkdtemp()
|
||||
self.testdir = os.path.join(
|
||||
self.tmpdir, 'tmp_test_obj_server_DiskFile')
|
||||
mkdirs(os.path.join(self.testdir, 'sda1', 'tmp'))
|
||||
self._orig_tpool_exc = tpool.execute
|
||||
tpool.execute = lambda f, *args, **kwargs: f(*args, **kwargs)
|
||||
self.conf = dict(devices=self.testdir, mount_check='false',
|
||||
keep_cache_size=2 * 1024)
|
||||
keep_cache_size=2 * 1024, mb_per_sync=1)
|
||||
self.df_mgr = diskfile.DiskFileManager(self.conf, FakeLogger())
|
||||
|
||||
def tearDown(self):
|
||||
"""Tear down for testing swift.obj.diskfile"""
|
||||
rmtree(os.path.dirname(self.testdir))
|
||||
rmtree(self.tmpdir, ignore_errors=1)
|
||||
tpool.execute = self._orig_tpool_exc
|
||||
|
||||
def _create_ondisk_file(self, df, data, timestamp, metadata=None,
|
||||
|
@ -576,20 +802,42 @@ class TestDiskFile(unittest.TestCase):
|
|||
pickle.dumps(metadata, diskfile.PICKLE_PROTOCOL))
|
||||
|
||||
def _create_test_file(self, data, timestamp=None, metadata=None,
|
||||
account='a', container='c', object='o'):
|
||||
account='a', container='c', obj='o'):
|
||||
if metadata is None:
|
||||
metadata = {}
|
||||
metadata.setdefault('name', '/%s/%s/%s' % (account, container, object))
|
||||
df = self.df_mgr.get_diskfile('sda', '0', account, container, object)
|
||||
metadata.setdefault('name', '/%s/%s/%s' % (account, container, obj))
|
||||
df = self.df_mgr.get_diskfile('sda', '0', account, container, obj)
|
||||
self._create_ondisk_file(df, data, timestamp, metadata)
|
||||
df = self.df_mgr.get_diskfile('sda', '0', account, container, object)
|
||||
df = self.df_mgr.get_diskfile('sda', '0', account, container, obj)
|
||||
df.open()
|
||||
return df
|
||||
|
||||
def test_open_not_exist(self):
|
||||
df = self.df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o')
|
||||
self.assertRaises(DiskFileNotExist, df.open)
|
||||
|
||||
def test_open_expired(self):
|
||||
self.assertRaises(DiskFileExpired,
|
||||
self._create_test_file,
|
||||
'1234567890', metadata={'X-Delete-At': '0'})
|
||||
|
||||
def test_open_not_expired(self):
|
||||
try:
|
||||
self._create_test_file(
|
||||
'1234567890', metadata={'X-Delete-At': str(2 * int(time()))})
|
||||
except SwiftException as err:
|
||||
self.fail("Unexpected swift exception raised: %r" % err)
|
||||
|
||||
def test_get_metadata(self):
|
||||
df = self._create_test_file('1234567890', timestamp=42)
|
||||
md = df.get_metadata()
|
||||
self.assertEquals(md['X-Timestamp'], normalize_timestamp(42))
|
||||
self.assertEqual(md['X-Timestamp'], normalize_timestamp(42))
|
||||
|
||||
def test_read_metadata(self):
|
||||
self._create_test_file('1234567890', timestamp=42)
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
|
||||
md = df.read_metadata()
|
||||
self.assertEqual(md['X-Timestamp'], normalize_timestamp(42))
|
||||
|
||||
def test_get_metadata_not_opened(self):
|
||||
df = self.df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o')
|
||||
|
@ -625,6 +873,23 @@ class TestDiskFile(unittest.TestCase):
|
|||
# new fast-post updateable keys are added
|
||||
self.assertEquals('Value2', df._metadata['X-Object-Meta-Key2'])
|
||||
|
||||
def test_disk_file_reader_iter(self):
|
||||
df = self._create_test_file('1234567890')
|
||||
quarantine_msgs = []
|
||||
reader = df.reader(_quarantine_hook=quarantine_msgs.append)
|
||||
self.assertEqual(''.join(reader), '1234567890')
|
||||
self.assertEqual(quarantine_msgs, [])
|
||||
|
||||
def test_disk_file_reader_iter_w_quarantine(self):
|
||||
df = self._create_test_file('1234567890')
|
||||
|
||||
def raise_dfq(m):
|
||||
raise DiskFileQuarantined(m)
|
||||
|
||||
reader = df.reader(_quarantine_hook=raise_dfq)
|
||||
reader._obj_size += 1
|
||||
self.assertRaises(DiskFileQuarantined, ''.join, reader)
|
||||
|
||||
def test_disk_file_app_iter_corners(self):
|
||||
df = self._create_test_file('1234567890')
|
||||
quarantine_msgs = []
|
||||
|
@ -637,13 +902,21 @@ class TestDiskFile(unittest.TestCase):
|
|||
reader = df.reader()
|
||||
self.assertEqual(''.join(reader.app_iter_range(5, None)), '67890')
|
||||
|
||||
def test_disk_file_app_iter_range_w_none(self):
|
||||
df = self._create_test_file('1234567890')
|
||||
quarantine_msgs = []
|
||||
reader = df.reader(_quarantine_hook=quarantine_msgs.append)
|
||||
self.assertEqual(''.join(reader.app_iter_range(None, None)),
|
||||
'1234567890')
|
||||
self.assertEqual(quarantine_msgs, [])
|
||||
|
||||
def test_disk_file_app_iter_partial_closes(self):
|
||||
df = self._create_test_file('1234567890')
|
||||
quarantine_msgs = []
|
||||
reader = df.reader(_quarantine_hook=quarantine_msgs.append)
|
||||
it = reader.app_iter_range(0, 5)
|
||||
self.assertEquals(quarantine_msgs, [])
|
||||
self.assertEqual(''.join(it), '12345')
|
||||
self.assertEqual(quarantine_msgs, [])
|
||||
self.assertTrue(reader._fp is None)
|
||||
|
||||
def test_disk_file_app_iter_ranges(self):
|
||||
|
@ -653,11 +926,37 @@ class TestDiskFile(unittest.TestCase):
|
|||
it = reader.app_iter_ranges([(0, 10), (10, 20), (20, 30)],
|
||||
'plain/text',
|
||||
'\r\n--someheader\r\n', 30)
|
||||
self.assertEquals(quarantine_msgs, [])
|
||||
value = ''.join(it)
|
||||
self.assert_('0123456789' in value)
|
||||
self.assert_('1123456789' in value)
|
||||
self.assert_('2123456789' in value)
|
||||
self.assertTrue('0123456789' in value)
|
||||
self.assertTrue('1123456789' in value)
|
||||
self.assertTrue('2123456789' in value)
|
||||
self.assertEqual(quarantine_msgs, [])
|
||||
|
||||
def test_disk_file_app_iter_ranges_w_quarantine(self):
|
||||
df = self._create_test_file('012345678911234567892123456789')
|
||||
quarantine_msgs = []
|
||||
reader = df.reader(_quarantine_hook=quarantine_msgs.append)
|
||||
reader._obj_size += 1
|
||||
it = reader.app_iter_ranges([(0, 30)],
|
||||
'plain/text',
|
||||
'\r\n--someheader\r\n', 30)
|
||||
value = ''.join(it)
|
||||
self.assertTrue('0123456789' in value)
|
||||
self.assertTrue('1123456789' in value)
|
||||
self.assertTrue('2123456789' in value)
|
||||
self.assertEqual(quarantine_msgs,
|
||||
["Bytes read: 30, does not match metadata: 31"])
|
||||
|
||||
def test_disk_file_app_iter_ranges_w_no_etag_quarantine(self):
|
||||
df = self._create_test_file('012345678911234567892123456789')
|
||||
quarantine_msgs = []
|
||||
reader = df.reader(_quarantine_hook=quarantine_msgs.append)
|
||||
it = reader.app_iter_ranges([(0, 10)],
|
||||
'plain/text',
|
||||
'\r\n--someheader\r\n', 30)
|
||||
value = ''.join(it)
|
||||
self.assertTrue('0123456789' in value)
|
||||
self.assertEqual(quarantine_msgs, [])
|
||||
|
||||
def test_disk_file_app_iter_ranges_edges(self):
|
||||
df = self._create_test_file('012345678911234567892123456789')
|
||||
|
@ -666,9 +965,9 @@ class TestDiskFile(unittest.TestCase):
|
|||
it = reader.app_iter_ranges([(3, 10), (0, 2)], 'application/whatever',
|
||||
'\r\n--someheader\r\n', 30)
|
||||
value = ''.join(it)
|
||||
self.assertEquals(quarantine_msgs, [])
|
||||
self.assert_('3456789' in value)
|
||||
self.assert_('01' in value)
|
||||
self.assertTrue('3456789' in value)
|
||||
self.assertTrue('01' in value)
|
||||
self.assertEqual(quarantine_msgs, [])
|
||||
|
||||
def test_disk_file_large_app_iter_ranges(self):
|
||||
# This test case is to make sure that the disk file app_iter_ranges
|
||||
|
@ -712,8 +1011,8 @@ class TestDiskFile(unittest.TestCase):
|
|||
reader = df.reader()
|
||||
it = reader.app_iter_ranges(None, 'app/something',
|
||||
'\r\n--someheader\r\n', 150)
|
||||
self.assertEquals(quarantine_msgs, [])
|
||||
self.assertEqual(''.join(it), '')
|
||||
self.assertEqual(quarantine_msgs, [])
|
||||
|
||||
def test_disk_file_mkstemp_creates_dir(self):
|
||||
tmpdir = os.path.join(self.testdir, 'sda1', 'tmp')
|
||||
|
@ -723,8 +1022,8 @@ class TestDiskFile(unittest.TestCase):
|
|||
self.assert_(os.path.exists(tmpdir))
|
||||
|
||||
def _get_open_disk_file(self, invalid_type=None, obj_name='o', fsize=1024,
|
||||
csize=8, mark_deleted=False, ts=None,
|
||||
mount_check=False, extra_metadata=None):
|
||||
csize=8, mark_deleted=False, prealloc=False,
|
||||
ts=None, mount_check=False, extra_metadata=None):
|
||||
'''returns a DiskFile'''
|
||||
df = self.df_mgr.get_diskfile('sda1', '0', 'a', 'c', obj_name)
|
||||
data = '0' * fsize
|
||||
|
@ -733,7 +1032,11 @@ class TestDiskFile(unittest.TestCase):
|
|||
timestamp = ts
|
||||
else:
|
||||
timestamp = normalize_timestamp(time())
|
||||
with df.create() as writer:
|
||||
if prealloc:
|
||||
prealloc_size = fsize
|
||||
else:
|
||||
prealloc_size = None
|
||||
with df.create(size=prealloc_size) as writer:
|
||||
upload_size = writer.write(data)
|
||||
etag.update(data)
|
||||
etag = etag.hexdigest()
|
||||
|
@ -759,6 +1062,9 @@ class TestDiskFile(unittest.TestCase):
|
|||
elif invalid_type == 'Missing-Content-Length':
|
||||
del metadata['Content-Length']
|
||||
diskfile.write_metadata(writer._fd, metadata)
|
||||
elif invalid_type == 'Bad-X-Delete-At':
|
||||
metadata['X-Delete-At'] = 'bad integer'
|
||||
diskfile.write_metadata(writer._fd, metadata)
|
||||
|
||||
if mark_deleted:
|
||||
df.delete(timestamp)
|
||||
|
@ -778,6 +1084,14 @@ class TestDiskFile(unittest.TestCase):
|
|||
meta_xattr = xattr.getxattr(data_files[0], "user.swift.metadata")
|
||||
xattr.setxattr(data_files[0], "user.swift.metadata",
|
||||
meta_xattr[:-1])
|
||||
elif invalid_type == 'Missing-Name':
|
||||
md = diskfile.read_metadata(data_files[0])
|
||||
del md['name']
|
||||
diskfile.write_metadata(data_files[0], md)
|
||||
elif invalid_type == 'Bad-Name':
|
||||
md = diskfile.read_metadata(data_files[0])
|
||||
md['name'] = md['name'] + 'garbage'
|
||||
diskfile.write_metadata(data_files[0], md)
|
||||
|
||||
self.conf['disk_chunk_size'] = csize
|
||||
self.conf['mount_check'] = mount_check
|
||||
|
@ -840,7 +1154,9 @@ class TestDiskFile(unittest.TestCase):
|
|||
|
||||
def verify(*args, **kwargs):
|
||||
open_exc = invalid_type in ('Content-Length', 'Bad-Content-Length',
|
||||
'Corrupt-Xattrs', 'Truncated-Xattrs')
|
||||
'Corrupt-Xattrs', 'Truncated-Xattrs',
|
||||
'Missing-Name', 'Bad-X-Delete-At')
|
||||
open_collision = invalid_type == 'Bad-Name'
|
||||
reader = None
|
||||
quarantine_msgs = []
|
||||
try:
|
||||
|
@ -849,7 +1165,12 @@ class TestDiskFile(unittest.TestCase):
|
|||
except DiskFileQuarantined as err:
|
||||
if not open_exc:
|
||||
self.fail(
|
||||
"Unexpected DiskFileQuarantine raised: :%r" % err)
|
||||
"Unexpected DiskFileQuarantine raised: %r" % err)
|
||||
return
|
||||
except DiskFileCollision as err:
|
||||
if not open_collision:
|
||||
self.fail(
|
||||
"Unexpected DiskFileCollision raised: %r" % err)
|
||||
return
|
||||
else:
|
||||
if open_exc:
|
||||
|
@ -874,7 +1195,9 @@ class TestDiskFile(unittest.TestCase):
|
|||
def verify_air(params, start=0, adjustment=0):
|
||||
"""verify (a)pp (i)ter (r)ange"""
|
||||
open_exc = invalid_type in ('Content-Length', 'Bad-Content-Length',
|
||||
'Corrupt-Xattrs', 'Truncated-Xattrs')
|
||||
'Corrupt-Xattrs', 'Truncated-Xattrs',
|
||||
'Missing-Name', 'Bad-X-Delete-At')
|
||||
open_collision = invalid_type == 'Bad-Name'
|
||||
reader = None
|
||||
try:
|
||||
df = self._get_open_disk_file(**params)
|
||||
|
@ -882,7 +1205,12 @@ class TestDiskFile(unittest.TestCase):
|
|||
except DiskFileQuarantined as err:
|
||||
if not open_exc:
|
||||
self.fail(
|
||||
"Unexpected DiskFileQuarantine raised: :%r" % err)
|
||||
"Unexpected DiskFileQuarantine raised: %r" % err)
|
||||
return
|
||||
except DiskFileCollision as err:
|
||||
if not open_collision:
|
||||
self.fail(
|
||||
"Unexpected DiskFileCollision raised: %r" % err)
|
||||
return
|
||||
else:
|
||||
if open_exc:
|
||||
|
@ -914,6 +1242,15 @@ class TestDiskFile(unittest.TestCase):
|
|||
def test_quarantine_invalid_etag(self):
|
||||
self.run_quarantine_invalids('ETag')
|
||||
|
||||
def test_quarantine_invalid_missing_name(self):
|
||||
self.run_quarantine_invalids('Missing-Name')
|
||||
|
||||
def test_quarantine_invalid_bad_name(self):
|
||||
self.run_quarantine_invalids('Bad-Name')
|
||||
|
||||
def test_quarantine_invalid_bad_x_delete_at(self):
|
||||
self.run_quarantine_invalids('Bad-X-Delete-At')
|
||||
|
||||
def test_quarantine_invalid_content_length(self):
|
||||
self.run_quarantine_invalids('Content-Length')
|
||||
|
||||
|
@ -949,20 +1286,16 @@ class TestDiskFile(unittest.TestCase):
|
|||
self.fail("Expected DiskFileNotExist exception")
|
||||
|
||||
def test_quarantine_missing_content_length(self):
|
||||
try:
|
||||
self._get_open_disk_file(
|
||||
invalid_type='Missing-Content-Length')
|
||||
except DiskFileQuarantined:
|
||||
pass
|
||||
self.assertRaises(
|
||||
DiskFileQuarantined,
|
||||
self._get_open_disk_file,
|
||||
invalid_type='Missing-Content-Length')
|
||||
|
||||
def test_quarantine_bad_content_length(self):
|
||||
try:
|
||||
self._get_open_disk_file(
|
||||
invalid_type='Bad-Content-Length')
|
||||
except DiskFileQuarantined:
|
||||
pass
|
||||
else:
|
||||
self.fail("Expected DiskFileQuarantined exception")
|
||||
self.assertRaises(
|
||||
DiskFileQuarantined,
|
||||
self._get_open_disk_file,
|
||||
invalid_type='Bad-Content-Length')
|
||||
|
||||
def test_quarantine_fstat_oserror(self):
|
||||
invocations = [0]
|
||||
|
@ -977,34 +1310,59 @@ class TestDiskFile(unittest.TestCase):
|
|||
return orig_os_fstat(fd)
|
||||
|
||||
with mock.patch('os.fstat', bad_fstat):
|
||||
try:
|
||||
self._get_open_disk_file()
|
||||
except DiskFileQuarantined:
|
||||
pass
|
||||
else:
|
||||
self.fail("Expected DiskFileQuarantined exception")
|
||||
self.assertRaises(
|
||||
DiskFileQuarantined,
|
||||
self._get_open_disk_file)
|
||||
|
||||
def test_quarantine_hashdir_not_a_directory(self):
|
||||
df = self._create_test_file('1234567890', account="abc",
|
||||
container='123', object='xyz')
|
||||
container='123', obj='xyz')
|
||||
hashdir = df._datadir
|
||||
rmtree(hashdir)
|
||||
with open(hashdir, 'w'):
|
||||
pass
|
||||
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'abc', '123', 'xyz')
|
||||
try:
|
||||
df.open()
|
||||
except DiskFileQuarantined:
|
||||
pass
|
||||
else:
|
||||
self.fail("Expected DiskFileQuarantined, didn't get it")
|
||||
self.assertRaises(DiskFileQuarantined, df.open)
|
||||
|
||||
# make sure the right thing got quarantined; the suffix dir should not
|
||||
# have moved, as that could have many objects in it
|
||||
self.assertFalse(os.path.exists(hashdir))
|
||||
self.assertTrue(os.path.exists(os.path.dirname(hashdir)))
|
||||
|
||||
def test_create_prealloc(self):
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'abc', '123', 'xyz')
|
||||
with mock.patch("swift.obj.diskfile.fallocate") as fa:
|
||||
with df.create(size=200) as writer:
|
||||
used_fd = writer._fd
|
||||
fa.assert_called_with(used_fd, 200)
|
||||
|
||||
def test_create_prealloc_oserror(self):
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'abc', '123', 'xyz')
|
||||
with mock.patch("swift.obj.diskfile.fallocate",
|
||||
mock.MagicMock(side_effect=OSError(
|
||||
errno.EACCES, os.strerror(errno.EACCES)))):
|
||||
try:
|
||||
with df.create(size=200):
|
||||
pass
|
||||
except DiskFileNoSpace:
|
||||
pass
|
||||
else:
|
||||
self.fail("Expected exception DiskFileNoSpace")
|
||||
|
||||
def test_create_close_oserror(self):
|
||||
df = self.df_mgr.get_diskfile('sda', '0', 'abc', '123', 'xyz')
|
||||
with mock.patch("swift.obj.diskfile.os.close",
|
||||
mock.MagicMock(side_effect=OSError(
|
||||
errno.EACCES, os.strerror(errno.EACCES)))):
|
||||
try:
|
||||
with df.create(size=200):
|
||||
pass
|
||||
except Exception as err:
|
||||
self.fail("Unexpected exception raised: %r" % err)
|
||||
else:
|
||||
pass
|
||||
|
||||
def test_write_metadata(self):
|
||||
df = self._create_test_file('1234567890')
|
||||
timestamp = normalize_timestamp(time())
|
||||
|
@ -1057,7 +1415,7 @@ class TestDiskFile(unittest.TestCase):
|
|||
def test_from_audit_location(self):
|
||||
hashdir = self._create_test_file(
|
||||
'blah blah',
|
||||
account='three', container='blind', object='mice')._datadir
|
||||
account='three', container='blind', obj='mice')._datadir
|
||||
df = self.df_mgr.get_diskfile_from_audit_location(
|
||||
diskfile.AuditLocation(hashdir, 'sda1', '0'))
|
||||
df.open()
|
||||
|
@ -1066,7 +1424,7 @@ class TestDiskFile(unittest.TestCase):
|
|||
def test_from_audit_location_with_mismatched_hash(self):
|
||||
hashdir = self._create_test_file(
|
||||
'blah blah',
|
||||
account='this', container='is', object='right')._datadir
|
||||
account='this', container='is', obj='right')._datadir
|
||||
|
||||
datafile = os.path.join(hashdir, os.listdir(hashdir)[0])
|
||||
meta = diskfile.read_metadata(datafile)
|
||||
|
@ -1097,12 +1455,10 @@ class TestDiskFile(unittest.TestCase):
|
|||
return False
|
||||
|
||||
with mock.patch("swift.common.constraints.check_mount", _mock_cm):
|
||||
try:
|
||||
self._get_open_disk_file(mount_check=True)
|
||||
except DiskFileDeviceUnavailable:
|
||||
pass
|
||||
else:
|
||||
self.fail("Expected DiskFileDeviceUnavailable exception")
|
||||
self.assertRaises(
|
||||
DiskFileDeviceUnavailable,
|
||||
self._get_open_disk_file,
|
||||
mount_check=True)
|
||||
|
||||
def test_ondisk_search_loop_ts_meta_data(self):
|
||||
df = self.df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o')
|
||||
|
@ -1577,7 +1933,7 @@ class TestDiskFile(unittest.TestCase):
|
|||
with df.open():
|
||||
self.assertEqual(df.timestamp, '1383181759.12345')
|
||||
|
||||
def test_error_in_hashdir_cleanup_listdir(self):
|
||||
def test_error_in_hash_cleanup_listdir(self):
|
||||
|
||||
def mock_hcl(*args, **kwargs):
|
||||
raise OSError()
|
||||
|
|
|
@ -157,7 +157,7 @@ class TestObjectExpirer(TestCase):
|
|||
x.swift = InternalClient(containers)
|
||||
|
||||
deleted_objects = {}
|
||||
for i in xrange(0, 3):
|
||||
for i in xrange(3):
|
||||
x.process = i
|
||||
x.run_once()
|
||||
self.assertNotEqual(deleted_objects, x.deleted_objects)
|
||||
|
|
|
@ -21,6 +21,7 @@ import operator
|
|||
import os
|
||||
import mock
|
||||
import unittest
|
||||
import math
|
||||
from shutil import rmtree
|
||||
from StringIO import StringIO
|
||||
from time import gmtime, strftime, time
|
||||
|
@ -41,6 +42,7 @@ from swift.common.utils import hash_path, mkdirs, normalize_timestamp, \
|
|||
from swift.common import constraints
|
||||
from swift.common.swob import Request, HeaderKeyDict
|
||||
from swift.common.storage_policy import POLICY_INDEX
|
||||
from swift.common.exceptions import DiskFileDeviceUnavailable
|
||||
|
||||
|
||||
def mock_time(*args, **kwargs):
|
||||
|
@ -63,59 +65,58 @@ class TestObjectController(unittest.TestCase):
|
|||
self.object_controller.bytes_per_sync = 1
|
||||
self._orig_tpool_exc = tpool.execute
|
||||
tpool.execute = lambda f, *args, **kwargs: f(*args, **kwargs)
|
||||
self.df_mgr = diskfile.DiskFileManager(conf, FakeLogger())
|
||||
self.df_mgr = diskfile.DiskFileManager(conf,
|
||||
self.object_controller.logger)
|
||||
|
||||
def tearDown(self):
|
||||
"""Tear down for testing swift.object.server.ObjectController"""
|
||||
rmtree(os.path.dirname(self.testdir))
|
||||
tpool.execute = self._orig_tpool_exc
|
||||
|
||||
def test_REQUEST_SPECIAL_CHARS(self):
|
||||
obj = 'special昆%20/%'
|
||||
path = '/sda1/p/a/c/%s' % obj
|
||||
def check_all_api_methods(self, obj_name='o', alt_res=None):
|
||||
path = '/sda1/p/a/c/%s' % obj_name
|
||||
body = 'SPECIAL_STRING'
|
||||
|
||||
# create one
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank(path, environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': timestamp,
|
||||
'Content-Type': 'application/x-test'})
|
||||
req.body = body
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 201)
|
||||
op_table = {
|
||||
"PUT": (body, alt_res or 201, ''), # create one
|
||||
"GET": ('', alt_res or 200, body), # check it
|
||||
"POST": ('', alt_res or 202, ''), # update it
|
||||
"HEAD": ('', alt_res or 200, ''), # head it
|
||||
"DELETE": ('', alt_res or 204, '') # delete it
|
||||
}
|
||||
|
||||
# check it
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank(path, environ={'REQUEST_METHOD': 'GET'},
|
||||
headers={'X-Timestamp': timestamp,
|
||||
'Content-Type': 'application/x-test'})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
self.assertEquals(resp.body, body)
|
||||
for method in ["PUT", "GET", "POST", "HEAD", "DELETE"]:
|
||||
in_body, res, out_body = op_table[method]
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank(
|
||||
path, environ={'REQUEST_METHOD': method},
|
||||
headers={'X-Timestamp': timestamp,
|
||||
'Content-Type': 'application/x-test'})
|
||||
req.body = in_body
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEqual(resp.status_int, res)
|
||||
if out_body and (200 <= res < 300):
|
||||
self.assertEqual(resp.body, out_body)
|
||||
|
||||
# update it
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank(path, environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': timestamp,
|
||||
'Content-Type': 'application/x-test'})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 202)
|
||||
def test_REQUEST_SPECIAL_CHARS(self):
|
||||
obj = 'special昆%20/%'
|
||||
self.check_all_api_methods(obj)
|
||||
|
||||
# head it
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank(path, environ={'REQUEST_METHOD': 'HEAD'},
|
||||
headers={'X-Timestamp': timestamp,
|
||||
'Content-Type': 'application/x-test'})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
def test_device_unavailable(self):
|
||||
def raise_disk_unavail(*args, **kwargs):
|
||||
raise DiskFileDeviceUnavailable()
|
||||
|
||||
#delete it
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank(path, environ={'REQUEST_METHOD': 'DELETE'},
|
||||
headers={'X-Timestamp': timestamp,
|
||||
'Content-Type': 'application/x-test'})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
self.object_controller.get_diskfile = raise_disk_unavail
|
||||
self.check_all_api_methods(alt_res=507)
|
||||
|
||||
def test_allowed_headers(self):
|
||||
dah = ['content-disposition', 'content-encoding', 'x-delete-at',
|
||||
'x-object-manifest', 'x-static-large-object']
|
||||
conf = {'devices': self.testdir, 'mount_check': 'false',
|
||||
'allowed_headers': ','.join(['content-type'] + dah)}
|
||||
self.object_controller = object_server.ObjectController(
|
||||
conf, logger=debug_logger())
|
||||
self.assertEqual(self.object_controller.allowed_headers, set(dah))
|
||||
|
||||
def test_POST_update_meta(self):
|
||||
# Test swift.obj.server.ObjectController.POST
|
||||
|
@ -287,6 +288,25 @@ class TestObjectController(unittest.TestCase):
|
|||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 400)
|
||||
|
||||
def test_POST_no_timestamp(self):
|
||||
req = Request.blank('/sda1/p/a/c/o',
|
||||
environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Object-Meta-1': 'One',
|
||||
'X-Object-Meta-2': 'Two',
|
||||
'Content-Type': 'text/plain'})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEqual(resp.status_int, 400)
|
||||
|
||||
def test_POST_bad_timestamp(self):
|
||||
req = Request.blank('/sda1/p/a/c/o',
|
||||
environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': 'bad',
|
||||
'X-Object-Meta-1': 'One',
|
||||
'X-Object-Meta-2': 'Two',
|
||||
'Content-Type': 'text/plain'})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEqual(resp.status_int, 400)
|
||||
|
||||
def test_POST_container_connection(self):
|
||||
|
||||
def mock_http_connect(response, with_exc=False):
|
||||
|
@ -361,7 +381,6 @@ class TestObjectController(unittest.TestCase):
|
|||
object_server.http_connect = old_http_connect
|
||||
|
||||
def test_POST_quarantine_zbyte(self):
|
||||
# Test swift.obj.server.ObjectController.GET
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': timestamp,
|
||||
|
@ -369,19 +388,20 @@ class TestObjectController(unittest.TestCase):
|
|||
req.body = 'VERIFY'
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 201)
|
||||
|
||||
objfile = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o')
|
||||
objfile.open()
|
||||
|
||||
file_name = os.path.basename(objfile._data_file)
|
||||
with open(objfile._data_file) as fp:
|
||||
metadata = diskfile.read_metadata(fp)
|
||||
os.unlink(objfile._data_file)
|
||||
with open(objfile._data_file, 'w') as fp:
|
||||
diskfile.write_metadata(fp, metadata)
|
||||
|
||||
self.assertEquals(os.listdir(objfile._datadir)[0], file_name)
|
||||
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/c/o',
|
||||
environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': normalize_timestamp(time())})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 404)
|
||||
|
@ -442,6 +462,16 @@ class TestObjectController(unittest.TestCase):
|
|||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 201)
|
||||
|
||||
def test_PUT_bad_transfer_encoding(self):
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': normalize_timestamp(time()),
|
||||
'Content-Type': 'application/octet-stream'})
|
||||
req.body = 'VERIFY'
|
||||
req.headers['Transfer-Encoding'] = 'bad'
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEqual(resp.status_int, 400)
|
||||
|
||||
def test_PUT_common(self):
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank(
|
||||
|
@ -500,6 +530,41 @@ class TestObjectController(unittest.TestCase):
|
|||
'name': '/a/c/o',
|
||||
'Content-Encoding': 'gzip'})
|
||||
|
||||
def test_PUT_overwrite_w_delete_at(self):
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': normalize_timestamp(time()),
|
||||
'X-Delete-At': 9999999999,
|
||||
'Content-Length': '6',
|
||||
'Content-Type': 'application/octet-stream'})
|
||||
req.body = 'VERIFY'
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEqual(resp.status_int, 201)
|
||||
sleep(.00001)
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': timestamp,
|
||||
'Content-Type': 'text/plain',
|
||||
'Content-Encoding': 'gzip'})
|
||||
req.body = 'VERIFY TWO'
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEqual(resp.status_int, 201)
|
||||
objfile = os.path.join(
|
||||
self.testdir, 'sda1',
|
||||
storage_directory(diskfile.DATADIR_REPL, 'p',
|
||||
hash_path('a', 'c', 'o')),
|
||||
timestamp + '.data')
|
||||
self.assertTrue(os.path.isfile(objfile))
|
||||
self.assertEqual(open(objfile).read(), 'VERIFY TWO')
|
||||
self.assertEqual(diskfile.read_metadata(objfile),
|
||||
{'X-Timestamp': timestamp,
|
||||
'Content-Length': '10',
|
||||
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
|
||||
'Content-Type': 'text/plain',
|
||||
'name': '/a/c/o',
|
||||
'Content-Encoding': 'gzip'})
|
||||
|
||||
def test_PUT_old_timestamp(self):
|
||||
ts = time()
|
||||
req = Request.blank(
|
||||
|
@ -676,7 +741,8 @@ class TestObjectController(unittest.TestCase):
|
|||
self.assertEquals(resp.headers['content-type'], 'application/x-test')
|
||||
self.assertEquals(
|
||||
resp.headers['last-modified'],
|
||||
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp))))
|
||||
strftime('%a, %d %b %Y %H:%M:%S GMT',
|
||||
gmtime(math.ceil(float(timestamp)))))
|
||||
self.assertEquals(resp.headers['etag'],
|
||||
'"0b4c12d7e0a73840c1c4f148fda3b037"')
|
||||
self.assertEquals(resp.headers['x-object-meta-1'], 'One')
|
||||
|
@ -778,7 +844,8 @@ class TestObjectController(unittest.TestCase):
|
|||
self.assertEquals(resp.headers['content-type'], 'application/x-test')
|
||||
self.assertEquals(
|
||||
resp.headers['last-modified'],
|
||||
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp))))
|
||||
strftime('%a, %d %b %Y %H:%M:%S GMT',
|
||||
gmtime(math.ceil(float(timestamp)))))
|
||||
self.assertEquals(resp.headers['etag'],
|
||||
'"0b4c12d7e0a73840c1c4f148fda3b037"')
|
||||
self.assertEquals(resp.headers['x-object-meta-1'], 'One')
|
||||
|
@ -980,6 +1047,37 @@ class TestObjectController(unittest.TestCase):
|
|||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 304)
|
||||
|
||||
req = Request.blank('/sda1/p/a/c/o',
|
||||
environ={'REQUEST_METHOD': 'HEAD'})
|
||||
resp = req.get_response(self.object_controller)
|
||||
since = resp.headers['Last-Modified']
|
||||
self.assertEquals(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
|
||||
gmtime(math.ceil(float(timestamp)))))
|
||||
|
||||
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
|
||||
headers={'If-Modified-Since': since})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 304)
|
||||
|
||||
timestamp = normalize_timestamp(int(time()))
|
||||
req = Request.blank('/sda1/p/a/c/o2',
|
||||
environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={
|
||||
'X-Timestamp': timestamp,
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Length': '4'})
|
||||
req.body = 'test'
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 201)
|
||||
|
||||
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
|
||||
gmtime(float(timestamp)))
|
||||
req = Request.blank('/sda1/p/a/c/o2',
|
||||
environ={'REQUEST_METHOD': 'GET'},
|
||||
headers={'If-Modified-Since': since})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 304)
|
||||
|
||||
def test_GET_if_unmodified_since(self):
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
||||
|
@ -1016,6 +1114,18 @@ class TestObjectController(unittest.TestCase):
|
|||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
|
||||
req = Request.blank('/sda1/p/a/c/o',
|
||||
environ={'REQUEST_METHOD': 'HEAD'})
|
||||
resp = req.get_response(self.object_controller)
|
||||
since = resp.headers['Last-Modified']
|
||||
self.assertEquals(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
|
||||
gmtime(math.ceil(float(timestamp)))))
|
||||
|
||||
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
|
||||
headers={'If-Unmodified-Since': since})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
|
||||
def test_GET_quarantine(self):
|
||||
# Test swift.obj.server.ObjectController.GET
|
||||
timestamp = normalize_timestamp(time())
|
||||
|
@ -2009,6 +2119,8 @@ class TestObjectController(unittest.TestCase):
|
|||
utils.HASH_PATH_PREFIX = _prefix
|
||||
|
||||
def test_async_update_does_not_save_on_2xx(self):
|
||||
_prefix = utils.HASH_PATH_PREFIX
|
||||
utils.HASH_PATH_PREFIX = ''
|
||||
|
||||
def fake_http_connect(status):
|
||||
|
||||
|
@ -2038,6 +2150,36 @@ class TestObjectController(unittest.TestCase):
|
|||
'06fbf0b514e5199dfc4e00f42eb5ea83-0000000001.00000')))
|
||||
finally:
|
||||
object_server.http_connect = orig_http_connect
|
||||
utils.HASH_PATH_PREFIX = _prefix
|
||||
|
||||
def test_async_update_saves_on_timeout(self):
|
||||
_prefix = utils.HASH_PATH_PREFIX
|
||||
utils.HASH_PATH_PREFIX = ''
|
||||
|
||||
def fake_http_connect():
|
||||
|
||||
class FakeConn(object):
|
||||
|
||||
def getresponse(self):
|
||||
return sleep(1)
|
||||
|
||||
return lambda *args: FakeConn()
|
||||
|
||||
orig_http_connect = object_server.http_connect
|
||||
try:
|
||||
for status in (200, 299):
|
||||
object_server.http_connect = fake_http_connect()
|
||||
self.object_controller.node_timeout = 0.001
|
||||
self.object_controller.async_update(
|
||||
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
|
||||
{'x-timestamp': '1', 'x-out': str(status)}, 'sda1')
|
||||
self.assertTrue(
|
||||
os.path.exists(os.path.join(
|
||||
self.testdir, 'sda1', 'async_pending', 'a83',
|
||||
'06fbf0b514e5199dfc4e00f42eb5ea83-0000000001.00000')))
|
||||
finally:
|
||||
object_server.http_connect = orig_http_connect
|
||||
utils.HASH_PATH_PREFIX = _prefix
|
||||
|
||||
def test_container_update_no_async_update(self):
|
||||
given_args = []
|
||||
|
@ -2089,6 +2231,28 @@ class TestObjectController(unittest.TestCase):
|
|||
'referer': 'PUT http://localhost/v1/a/c/o'},
|
||||
'sda1'])
|
||||
|
||||
def test_container_update_bad_args(self):
|
||||
given_args = []
|
||||
|
||||
def fake_async_update(*args):
|
||||
given_args.extend(args)
|
||||
|
||||
self.object_controller.async_update = fake_async_update
|
||||
req = Request.blank(
|
||||
'/v1/a/c/o',
|
||||
environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': 1,
|
||||
'X-Trans-Id': '123',
|
||||
'X-Container-Host': 'chost,badhost',
|
||||
'X-Container-Partition': 'cpartition',
|
||||
'X-Container-Device': 'cdevice'})
|
||||
self.object_controller.container_update(
|
||||
'PUT', 'a', 'c', 'o', req, {
|
||||
'x-size': '0', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
|
||||
'x-content-type': 'text/plain', 'x-timestamp': '1'},
|
||||
'sda1')
|
||||
self.assertEqual(given_args, [])
|
||||
|
||||
def test_delete_at_update_on_put(self):
|
||||
# Test how delete_at_update works when issued a delete for old
|
||||
# expiration info after a new put with no new expiration info.
|
||||
|
@ -2723,6 +2887,13 @@ class TestObjectController(unittest.TestCase):
|
|||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 204)
|
||||
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
|
||||
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
|
||||
'X-If-Delete-At': 'abc'})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEquals(resp.status_int, 400)
|
||||
|
||||
def test_DELETE_calls_delete_at(self):
|
||||
given_args = []
|
||||
|
||||
|
@ -2844,6 +3015,29 @@ class TestObjectController(unittest.TestCase):
|
|||
tpool.execute = was_tpool_exe
|
||||
diskfile.get_hashes = was_get_hashes
|
||||
|
||||
def test_REPLICATE_insufficient_storage(self):
|
||||
conf = {'devices': self.testdir, 'mount_check': 'true'}
|
||||
self.object_controller = object_server.ObjectController(
|
||||
conf, logger=debug_logger())
|
||||
self.object_controller.bytes_per_sync = 1
|
||||
|
||||
def fake_check_mount(*args, **kwargs):
|
||||
return False
|
||||
|
||||
with mock.patch("swift.obj.diskfile.check_mount", fake_check_mount):
|
||||
req = Request.blank('/sda1/p/suff',
|
||||
environ={'REQUEST_METHOD': 'REPLICATE'},
|
||||
headers={})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEqual(resp.status_int, 507)
|
||||
|
||||
def test_REPLICATION_can_be_called(self):
|
||||
req = Request.blank('/sda1/p/other/suff',
|
||||
environ={'REQUEST_METHOD': 'REPLICATION'},
|
||||
headers={})
|
||||
resp = req.get_response(self.object_controller)
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
|
||||
def test_PUT_with_full_drive(self):
|
||||
|
||||
class IgnoredBody():
|
||||
|
@ -2905,6 +3099,14 @@ class TestObjectController(unittest.TestCase):
|
|||
self.assertEqual(global_conf, {'replication_semaphore': ['test1']})
|
||||
mocked_Semaphore.assert_called_once_with(123)
|
||||
|
||||
def test_handling_of_replication_semaphore_config(self):
|
||||
conf = {'devices': self.testdir, 'mount_check': 'false'}
|
||||
objsrv = object_server.ObjectController(conf)
|
||||
self.assertTrue(objsrv.replication_semaphore is None)
|
||||
conf['replication_semaphore'] = ['sema']
|
||||
objsrv = object_server.ObjectController(conf)
|
||||
self.assertEqual(objsrv.replication_semaphore, 'sema')
|
||||
|
||||
def test_serv_reserv(self):
|
||||
# Test replication_server flag was set from configuration file.
|
||||
conf = {'devices': self.testdir, 'mount_check': 'false'}
|
||||
|
@ -2936,7 +3138,7 @@ class TestObjectController(unittest.TestCase):
|
|||
inbuf = StringIO()
|
||||
errbuf = StringIO()
|
||||
outbuf = StringIO()
|
||||
self.object_controller = object_server.ObjectController(
|
||||
self.object_controller = object_server.app_factory(
|
||||
{'devices': self.testdir, 'mount_check': 'false',
|
||||
'replication_server': 'false'})
|
||||
|
||||
|
@ -2947,7 +3149,7 @@ class TestObjectController(unittest.TestCase):
|
|||
method = 'PUT'
|
||||
env = {'REQUEST_METHOD': method,
|
||||
'SCRIPT_NAME': '',
|
||||
'PATH_INFO': '/sda1/p/a/c',
|
||||
'PATH_INFO': '/sda1/p/a/c/o',
|
||||
'SERVER_NAME': '127.0.0.1',
|
||||
'SERVER_PORT': '8080',
|
||||
'SERVER_PROTOCOL': 'HTTP/1.0',
|
||||
|
@ -2976,7 +3178,7 @@ class TestObjectController(unittest.TestCase):
|
|||
outbuf = StringIO()
|
||||
self.object_controller = object_server.ObjectController(
|
||||
{'devices': self.testdir, 'mount_check': 'false',
|
||||
'replication_server': 'false'})
|
||||
'replication_server': 'false'}, logger=FakeLogger())
|
||||
|
||||
def start_response(*args):
|
||||
# Sends args to outbuf
|
||||
|
@ -2986,7 +3188,7 @@ class TestObjectController(unittest.TestCase):
|
|||
|
||||
env = {'REQUEST_METHOD': method,
|
||||
'SCRIPT_NAME': '',
|
||||
'PATH_INFO': '/sda1/p/a/c',
|
||||
'PATH_INFO': '/sda1/p/a/c/o',
|
||||
'SERVER_NAME': '127.0.0.1',
|
||||
'SERVER_PORT': '8080',
|
||||
'SERVER_PROTOCOL': 'HTTP/1.0',
|
||||
|
@ -3005,8 +3207,150 @@ class TestObjectController(unittest.TestCase):
|
|||
with mock.patch.object(self.object_controller, method,
|
||||
new=mock_method):
|
||||
mock_method.replication = True
|
||||
with mock.patch('time.gmtime',
|
||||
mock.MagicMock(side_effect=[gmtime(10001.0)])):
|
||||
with mock.patch('time.time',
|
||||
mock.MagicMock(side_effect=[10000.0,
|
||||
10001.0])):
|
||||
response = self.object_controller.__call__(
|
||||
env, start_response)
|
||||
self.assertEqual(response, answer)
|
||||
self.assertEqual(
|
||||
self.object_controller.logger.log_dict['info'],
|
||||
[(('None - - [01/Jan/1970:02:46:41 +0000] "PUT'
|
||||
' /sda1/p/a/c/o" 405 - "-" "-" "-" 1.0000',),
|
||||
{})])
|
||||
|
||||
def test_not_utf8_and_not_logging_requests(self):
|
||||
inbuf = StringIO()
|
||||
errbuf = StringIO()
|
||||
outbuf = StringIO()
|
||||
self.object_controller = object_server.ObjectController(
|
||||
{'devices': self.testdir, 'mount_check': 'false',
|
||||
'replication_server': 'false', 'log_requests': 'false'},
|
||||
logger=FakeLogger())
|
||||
|
||||
def start_response(*args):
|
||||
# Sends args to outbuf
|
||||
outbuf.writelines(args)
|
||||
|
||||
method = 'PUT'
|
||||
|
||||
env = {'REQUEST_METHOD': method,
|
||||
'SCRIPT_NAME': '',
|
||||
'PATH_INFO': '/sda1/p/a/c/\x00%20/%',
|
||||
'SERVER_NAME': '127.0.0.1',
|
||||
'SERVER_PORT': '8080',
|
||||
'SERVER_PROTOCOL': 'HTTP/1.0',
|
||||
'CONTENT_LENGTH': '0',
|
||||
'wsgi.version': (1, 0),
|
||||
'wsgi.url_scheme': 'http',
|
||||
'wsgi.input': inbuf,
|
||||
'wsgi.errors': errbuf,
|
||||
'wsgi.multithread': False,
|
||||
'wsgi.multiprocess': False,
|
||||
'wsgi.run_once': False}
|
||||
|
||||
answer = ['Invalid UTF8 or contains NULL']
|
||||
mock_method = public(lambda x: mock.MagicMock())
|
||||
with mock.patch.object(self.object_controller, method,
|
||||
new=mock_method):
|
||||
response = self.object_controller.__call__(env, start_response)
|
||||
self.assertEqual(response, answer)
|
||||
self.assertEqual(self.object_controller.logger.log_dict['info'],
|
||||
[])
|
||||
|
||||
def test__call__returns_500(self):
|
||||
inbuf = StringIO()
|
||||
errbuf = StringIO()
|
||||
outbuf = StringIO()
|
||||
self.object_controller = object_server.ObjectController(
|
||||
{'devices': self.testdir, 'mount_check': 'false',
|
||||
'replication_server': 'false', 'log_requests': 'false'},
|
||||
logger=FakeLogger())
|
||||
|
||||
def start_response(*args):
|
||||
# Sends args to outbuf
|
||||
outbuf.writelines(args)
|
||||
|
||||
method = 'PUT'
|
||||
|
||||
env = {'REQUEST_METHOD': method,
|
||||
'SCRIPT_NAME': '',
|
||||
'PATH_INFO': '/sda1/p/a/c/o',
|
||||
'SERVER_NAME': '127.0.0.1',
|
||||
'SERVER_PORT': '8080',
|
||||
'SERVER_PROTOCOL': 'HTTP/1.0',
|
||||
'CONTENT_LENGTH': '0',
|
||||
'wsgi.version': (1, 0),
|
||||
'wsgi.url_scheme': 'http',
|
||||
'wsgi.input': inbuf,
|
||||
'wsgi.errors': errbuf,
|
||||
'wsgi.multithread': False,
|
||||
'wsgi.multiprocess': False,
|
||||
'wsgi.run_once': False}
|
||||
|
||||
@public
|
||||
def mock_put_method(*args, **kwargs):
|
||||
raise Exception()
|
||||
|
||||
with mock.patch.object(self.object_controller, method,
|
||||
new=mock_put_method):
|
||||
response = self.object_controller.__call__(env, start_response)
|
||||
self.assertTrue(response[0].startswith(
|
||||
'Traceback (most recent call last):'))
|
||||
self.assertEqual(
|
||||
self.object_controller.logger.log_dict['exception'],
|
||||
[(('ERROR __call__ error with %(method)s %(path)s ',
|
||||
{'method': 'PUT', 'path': '/sda1/p/a/c/o'}),
|
||||
{},
|
||||
'')])
|
||||
self.assertEqual(self.object_controller.logger.log_dict['INFO'],
|
||||
[])
|
||||
|
||||
def test_PUT_slow(self):
|
||||
inbuf = StringIO()
|
||||
errbuf = StringIO()
|
||||
outbuf = StringIO()
|
||||
self.object_controller = object_server.ObjectController(
|
||||
{'devices': self.testdir, 'mount_check': 'false',
|
||||
'replication_server': 'false', 'log_requests': 'false',
|
||||
'slow': '10'},
|
||||
logger=FakeLogger())
|
||||
|
||||
def start_response(*args):
|
||||
# Sends args to outbuf
|
||||
outbuf.writelines(args)
|
||||
|
||||
method = 'PUT'
|
||||
|
||||
env = {'REQUEST_METHOD': method,
|
||||
'SCRIPT_NAME': '',
|
||||
'PATH_INFO': '/sda1/p/a/c/o',
|
||||
'SERVER_NAME': '127.0.0.1',
|
||||
'SERVER_PORT': '8080',
|
||||
'SERVER_PROTOCOL': 'HTTP/1.0',
|
||||
'CONTENT_LENGTH': '0',
|
||||
'wsgi.version': (1, 0),
|
||||
'wsgi.url_scheme': 'http',
|
||||
'wsgi.input': inbuf,
|
||||
'wsgi.errors': errbuf,
|
||||
'wsgi.multithread': False,
|
||||
'wsgi.multiprocess': False,
|
||||
'wsgi.run_once': False}
|
||||
|
||||
mock_method = public(lambda x: mock.MagicMock())
|
||||
with mock.patch.object(self.object_controller, method,
|
||||
new=mock_method):
|
||||
with mock.patch('time.time',
|
||||
mock.MagicMock(side_effect=[10000.0,
|
||||
10001.0])):
|
||||
with mock.patch('swift.obj.server.sleep',
|
||||
mock.MagicMock()) as ms:
|
||||
self.object_controller.__call__(env, start_response)
|
||||
ms.assert_called_with(9)
|
||||
self.assertEqual(
|
||||
self.object_controller.logger.log_dict['info'], [])
|
||||
|
||||
def test_dynamic_datadir(self):
|
||||
timestamp = normalize_timestamp(time())
|
||||
|
|
|
@ -49,7 +49,8 @@ class TestReceiver(unittest.TestCase):
|
|||
conf = {
|
||||
'devices': self.testdir,
|
||||
'mount_check': 'false',
|
||||
'replication_one_per_device': 'false'}
|
||||
'replication_one_per_device': 'false',
|
||||
'log_requests': 'false'}
|
||||
self.controller = server.ObjectController(conf)
|
||||
self.controller.bytes_per_sync = 1
|
||||
|
||||
|
@ -98,7 +99,7 @@ class TestReceiver(unittest.TestCase):
|
|||
mocked_replication_semaphore.acquire.return_value = False
|
||||
req = swob.Request.blank(
|
||||
'/device/partition', environ={'REQUEST_METHOD': 'REPLICATION'})
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[":ERROR: 503 '<html><h1>Service Unavailable</h1><p>The "
|
||||
|
@ -118,7 +119,7 @@ class TestReceiver(unittest.TestCase):
|
|||
body=':MISSING_CHECK: START\r\n'
|
||||
':MISSING_CHECK: END\r\n'
|
||||
':UPDATES: START\r\n:UPDATES: END\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -140,7 +141,7 @@ class TestReceiver(unittest.TestCase):
|
|||
body=':MISSING_CHECK: START\r\n'
|
||||
':MISSING_CHECK: END\r\n'
|
||||
':UPDATES: START\r\n:UPDATES: END\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[":ERROR: 0 '0.01 seconds: /somewhere/sda1'"])
|
||||
|
@ -154,7 +155,7 @@ class TestReceiver(unittest.TestCase):
|
|||
mocked_replication_semaphore:
|
||||
req = swob.Request.blank(
|
||||
'/device', environ={'REQUEST_METHOD': 'REPLICATION'})
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[":ERROR: 0 'Invalid path: /device'"])
|
||||
|
@ -167,7 +168,7 @@ class TestReceiver(unittest.TestCase):
|
|||
mocked_replication_semaphore:
|
||||
req = swob.Request.blank(
|
||||
'/device/', environ={'REQUEST_METHOD': 'REPLICATION'})
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[":ERROR: 0 'Invalid path: /device/'"])
|
||||
|
@ -180,7 +181,7 @@ class TestReceiver(unittest.TestCase):
|
|||
mocked_replication_semaphore:
|
||||
req = swob.Request.blank(
|
||||
'/device/partition', environ={'REQUEST_METHOD': 'REPLICATION'})
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':ERROR: 0 "Looking for :MISSING_CHECK: START got \'\'"'])
|
||||
|
@ -194,7 +195,7 @@ class TestReceiver(unittest.TestCase):
|
|||
req = swob.Request.blank(
|
||||
'/device/partition/junk',
|
||||
environ={'REQUEST_METHOD': 'REPLICATION'})
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[":ERROR: 0 'Invalid path: /device/partition/junk'"])
|
||||
|
@ -215,7 +216,7 @@ class TestReceiver(unittest.TestCase):
|
|||
mocked_check_mount):
|
||||
req = swob.Request.blank(
|
||||
'/device/partition', environ={'REQUEST_METHOD': 'REPLICATION'})
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':ERROR: 0 "Looking for :MISSING_CHECK: START got \'\'"'])
|
||||
|
@ -234,7 +235,7 @@ class TestReceiver(unittest.TestCase):
|
|||
mocked_check_mount):
|
||||
req = swob.Request.blank(
|
||||
'/device/partition', environ={'REQUEST_METHOD': 'REPLICATION'})
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[":ERROR: 507 '<html><h1>Insufficient Storage</h1><p>There "
|
||||
|
@ -248,7 +249,7 @@ class TestReceiver(unittest.TestCase):
|
|||
mocked_check_mount.return_value = True
|
||||
req = swob.Request.blank(
|
||||
'/device/partition', environ={'REQUEST_METHOD': 'REPLICATION'})
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':ERROR: 0 "Looking for :MISSING_CHECK: START got \'\'"'])
|
||||
|
@ -279,7 +280,7 @@ class TestReceiver(unittest.TestCase):
|
|||
req.remote_addr = '1.2.3.4'
|
||||
mock_wsgi_input = _Wrapper(req.body)
|
||||
req.environ['wsgi.input'] = mock_wsgi_input
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -316,7 +317,7 @@ class TestReceiver(unittest.TestCase):
|
|||
side_effect=Exception("can't stringify this"))
|
||||
mock_wsgi_input = _Wrapper(req.body)
|
||||
req.environ['wsgi.input'] = mock_wsgi_input
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END'])
|
||||
|
@ -359,12 +360,12 @@ class TestReceiver(unittest.TestCase):
|
|||
req.remote_addr = '2.3.4.5'
|
||||
mock_wsgi_input = _Wrapper(req.body)
|
||||
req.environ['wsgi.input'] = mock_wsgi_input
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[":ERROR: 408 '0.01 seconds: missing_check line'"])
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertFalse(mock_shutdown_safe.called)
|
||||
self.assertTrue(mock_shutdown_safe.called)
|
||||
self.controller.logger.error.assert_called_once_with(
|
||||
'2.3.4.5/sda1/1 TIMEOUT in replication.Receiver: '
|
||||
'0.01 seconds: missing_check line')
|
||||
|
@ -401,12 +402,12 @@ class TestReceiver(unittest.TestCase):
|
|||
req.remote_addr = '3.4.5.6'
|
||||
mock_wsgi_input = _Wrapper(req.body)
|
||||
req.environ['wsgi.input'] = mock_wsgi_input
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[":ERROR: 0 'test exception'"])
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertFalse(mock_shutdown_safe.called)
|
||||
self.assertTrue(mock_shutdown_safe.called)
|
||||
self.controller.logger.exception.assert_called_once_with(
|
||||
'3.4.5.6/sda1/1 EXCEPTION in replication.Receiver')
|
||||
|
||||
|
@ -419,7 +420,7 @@ class TestReceiver(unittest.TestCase):
|
|||
body=':MISSING_CHECK: START\r\n'
|
||||
':MISSING_CHECK: END\r\n'
|
||||
':UPDATES: START\r\n:UPDATES: END\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -439,7 +440,7 @@ class TestReceiver(unittest.TestCase):
|
|||
self.hash2 + ' ' + self.ts2 + '\r\n'
|
||||
':MISSING_CHECK: END\r\n'
|
||||
':UPDATES: START\r\n:UPDATES: END\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START',
|
||||
|
@ -471,7 +472,7 @@ class TestReceiver(unittest.TestCase):
|
|||
self.hash2 + ' ' + self.ts2 + '\r\n'
|
||||
':MISSING_CHECK: END\r\n'
|
||||
':UPDATES: START\r\n:UPDATES: END\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START',
|
||||
|
@ -504,7 +505,7 @@ class TestReceiver(unittest.TestCase):
|
|||
self.hash2 + ' ' + self.ts2 + '\r\n'
|
||||
':MISSING_CHECK: END\r\n'
|
||||
':UPDATES: START\r\n:UPDATES: END\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START',
|
||||
|
@ -537,7 +538,7 @@ class TestReceiver(unittest.TestCase):
|
|||
self.hash2 + ' ' + self.ts2 + '\r\n'
|
||||
':MISSING_CHECK: END\r\n'
|
||||
':UPDATES: START\r\n:UPDATES: END\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START',
|
||||
|
@ -583,7 +584,7 @@ class TestReceiver(unittest.TestCase):
|
|||
req.remote_addr = '2.3.4.5'
|
||||
mock_wsgi_input = _Wrapper(req.body)
|
||||
req.environ['wsgi.input'] = mock_wsgi_input
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -630,7 +631,7 @@ class TestReceiver(unittest.TestCase):
|
|||
req.remote_addr = '3.4.5.6'
|
||||
mock_wsgi_input = _Wrapper(req.body)
|
||||
req.environ['wsgi.input'] = mock_wsgi_input
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -672,7 +673,7 @@ class TestReceiver(unittest.TestCase):
|
|||
':UPDATES: END\r\n')
|
||||
mock_wsgi_input = _Wrapper(req.body)
|
||||
req.environ['wsgi.input'] = mock_wsgi_input
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -689,7 +690,7 @@ class TestReceiver(unittest.TestCase):
|
|||
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
|
||||
':UPDATES: START\r\n'
|
||||
'bad_subrequest_line\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -711,7 +712,7 @@ class TestReceiver(unittest.TestCase):
|
|||
'X-Timestamp: 1364456113.76334\r\n'
|
||||
'\r\n'
|
||||
'bad_subrequest_line2')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -728,7 +729,7 @@ class TestReceiver(unittest.TestCase):
|
|||
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
|
||||
':UPDATES: START\r\n'
|
||||
'DELETE /a/c/o\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -746,7 +747,7 @@ class TestReceiver(unittest.TestCase):
|
|||
':UPDATES: START\r\n'
|
||||
'DELETE /a/c/o\r\n'
|
||||
'Bad-Header Test\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -764,7 +765,7 @@ class TestReceiver(unittest.TestCase):
|
|||
'DELETE /a/c/o\r\n'
|
||||
'Good-Header: Test\r\n'
|
||||
'Bad-Header Test\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -782,7 +783,7 @@ class TestReceiver(unittest.TestCase):
|
|||
':UPDATES: START\r\n'
|
||||
'PUT /a/c/o\r\n'
|
||||
'Content-Length: a\r\n\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -800,7 +801,7 @@ class TestReceiver(unittest.TestCase):
|
|||
':UPDATES: START\r\n'
|
||||
'DELETE /a/c/o\r\n'
|
||||
'Content-Length: 1\r\n\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -817,7 +818,7 @@ class TestReceiver(unittest.TestCase):
|
|||
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
|
||||
':UPDATES: START\r\n'
|
||||
'PUT /a/c/o\r\n\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -835,7 +836,7 @@ class TestReceiver(unittest.TestCase):
|
|||
':UPDATES: START\r\n'
|
||||
'PUT /a/c/o\r\n'
|
||||
'Content-Length: 1\r\n\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -866,7 +867,7 @@ class TestReceiver(unittest.TestCase):
|
|||
'DELETE /a/c/o\r\n\r\n'
|
||||
'DELETE /a/c/o\r\n\r\n'
|
||||
'DELETE /a/c/o\r\n\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -892,7 +893,7 @@ class TestReceiver(unittest.TestCase):
|
|||
'DELETE /a/c/o\r\n\r\n'
|
||||
'DELETE /a/c/o\r\n\r\n'
|
||||
':UPDATES: END\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -920,7 +921,7 @@ class TestReceiver(unittest.TestCase):
|
|||
'DELETE /a/c/o\r\n\r\n'
|
||||
'DELETE /a/c/o\r\n\r\n'
|
||||
':UPDATES: END\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -947,7 +948,7 @@ class TestReceiver(unittest.TestCase):
|
|||
'DELETE /a/c/o\r\n\r\n'
|
||||
'DELETE /a/c/o\r\n\r\n'
|
||||
':UPDATES: END\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -981,7 +982,7 @@ class TestReceiver(unittest.TestCase):
|
|||
'Specialty-Header: value\r\n'
|
||||
'\r\n'
|
||||
'1')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -1023,7 +1024,7 @@ class TestReceiver(unittest.TestCase):
|
|||
'DELETE /a/c/o\r\n'
|
||||
'X-Timestamp: 1364456113.76334\r\n'
|
||||
'\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -1057,7 +1058,7 @@ class TestReceiver(unittest.TestCase):
|
|||
'BONK /a/c/o\r\n'
|
||||
'X-Timestamp: 1364456113.76334\r\n'
|
||||
'\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -1117,7 +1118,7 @@ class TestReceiver(unittest.TestCase):
|
|||
'DELETE /a/c/o6\r\n'
|
||||
'X-Timestamp: 1364456113.00006\r\n'
|
||||
'\r\n')
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
@ -1235,7 +1236,7 @@ class TestReceiver(unittest.TestCase):
|
|||
'\r\n'
|
||||
'1')
|
||||
req.environ['wsgi.input'] = _IgnoreReadlineHint(req.body)
|
||||
resp = self.controller.REPLICATION(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
self.body_lines(resp.body),
|
||||
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
|
||||
import hashlib
|
||||
import os
|
||||
import shutil
|
||||
import StringIO
|
||||
import tempfile
|
||||
import time
|
||||
|
@ -92,11 +93,14 @@ class FakeConnection(object):
|
|||
class TestSender(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.testdir = os.path.join(
|
||||
tempfile.mkdtemp(), 'tmp_test_ssync_sender')
|
||||
self.tmpdir = tempfile.mkdtemp()
|
||||
self.testdir = os.path.join(self.tmpdir, 'tmp_test_ssync_sender')
|
||||
self.replicator = FakeReplicator(self.testdir)
|
||||
self.sender = ssync_sender.Sender(self.replicator, None, None, None)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmpdir, ignore_errors=1)
|
||||
|
||||
def _make_open_diskfile(self, device='dev', partition='9',
|
||||
account='a', container='c', obj='o', body='test',
|
||||
extra_metadata=None):
|
||||
|
|
|
@ -22,6 +22,7 @@ from swift.proxy.controllers.base import headers_to_account_info
|
|||
from swift.common.constraints import MAX_ACCOUNT_NAME_LENGTH as MAX_ANAME_LEN
|
||||
from test.unit import fake_http_connect, FakeRing, FakeMemcache
|
||||
from swift.common.storage_policy import StoragePolicy, StoragePolicyCollection
|
||||
from swift.common.request_helpers import get_sys_meta_prefix
|
||||
|
||||
|
||||
class TestAccountController(unittest.TestCase):
|
||||
|
@ -98,6 +99,62 @@ class TestAccountController(unittest.TestCase):
|
|||
resp = controller.POST(req)
|
||||
self.assertEquals(400, resp.status_int)
|
||||
|
||||
def _make_callback_func(self, context):
|
||||
def callback(ipaddr, port, device, partition, method, path,
|
||||
headers=None, query_string=None, ssl=False):
|
||||
context['method'] = method
|
||||
context['path'] = path
|
||||
context['headers'] = headers or {}
|
||||
return callback
|
||||
|
||||
def test_sys_meta_headers_PUT(self):
|
||||
# check that headers in sys meta namespace make it through
|
||||
# the proxy controller
|
||||
sys_meta_key = '%stest' % get_sys_meta_prefix('account')
|
||||
sys_meta_key = sys_meta_key.title()
|
||||
user_meta_key = 'X-Account-Meta-Test'
|
||||
# allow PUTs to account...
|
||||
self.app.allow_account_management = True
|
||||
controller = proxy_server.AccountController(self.app, 'a')
|
||||
context = {}
|
||||
callback = self._make_callback_func(context)
|
||||
hdrs_in = {sys_meta_key: 'foo',
|
||||
user_meta_key: 'bar',
|
||||
'x-timestamp': '1.0'}
|
||||
req = Request.blank('/v1/a', headers=hdrs_in)
|
||||
with mock.patch('swift.proxy.controllers.base.http_connect',
|
||||
fake_http_connect(200, 200, give_connect=callback)):
|
||||
controller.PUT(req)
|
||||
self.assertEqual(context['method'], 'PUT')
|
||||
self.assertTrue(sys_meta_key in context['headers'])
|
||||
self.assertEqual(context['headers'][sys_meta_key], 'foo')
|
||||
self.assertTrue(user_meta_key in context['headers'])
|
||||
self.assertEqual(context['headers'][user_meta_key], 'bar')
|
||||
self.assertNotEqual(context['headers']['x-timestamp'], '1.0')
|
||||
|
||||
def test_sys_meta_headers_POST(self):
|
||||
# check that headers in sys meta namespace make it through
|
||||
# the proxy controller
|
||||
sys_meta_key = '%stest' % get_sys_meta_prefix('account')
|
||||
sys_meta_key = sys_meta_key.title()
|
||||
user_meta_key = 'X-Account-Meta-Test'
|
||||
controller = proxy_server.AccountController(self.app, 'a')
|
||||
context = {}
|
||||
callback = self._make_callback_func(context)
|
||||
hdrs_in = {sys_meta_key: 'foo',
|
||||
user_meta_key: 'bar',
|
||||
'x-timestamp': '1.0'}
|
||||
req = Request.blank('/v1/a', headers=hdrs_in)
|
||||
with mock.patch('swift.proxy.controllers.base.http_connect',
|
||||
fake_http_connect(200, 200, give_connect=callback)):
|
||||
controller.POST(req)
|
||||
self.assertEqual(context['method'], 'POST')
|
||||
self.assertTrue(sys_meta_key in context['headers'])
|
||||
self.assertEqual(context['headers'][sys_meta_key], 'foo')
|
||||
self.assertTrue(user_meta_key in context['headers'])
|
||||
self.assertEqual(context['headers'][user_meta_key], 'bar')
|
||||
self.assertNotEqual(context['headers']['x-timestamp'], '1.0')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -20,11 +20,12 @@ from swift.proxy.controllers.base import headers_to_container_info, \
|
|||
get_container_memcache_key, get_account_info, get_account_memcache_key, \
|
||||
get_object_env_key, _get_cache_key, get_info, get_object_info, \
|
||||
Controller, GetOrHeadHandler
|
||||
from swift.common.swob import Request, HTTPException
|
||||
from swift.common.swob import Request, HTTPException, HeaderKeyDict
|
||||
from swift.common.utils import split_path
|
||||
from swift.common.storage_policy import StoragePolicy, StoragePolicyCollection
|
||||
from test.unit import fake_http_connect, FakeRing, FakeMemcache
|
||||
from swift.proxy import server as proxy_server
|
||||
from swift.common.request_helpers import get_sys_meta_prefix
|
||||
|
||||
|
||||
FakeResponse_status_int = 201
|
||||
|
@ -254,7 +255,9 @@ class TestFuncs(unittest.TestCase):
|
|||
def test_get_container_info_cache(self):
|
||||
cached = {'status': 404,
|
||||
'bytes': 3333,
|
||||
'object_count': 10}
|
||||
'object_count': 10,
|
||||
# simplejson sometimes hands back strings, sometimes unicodes
|
||||
'versions': u"\u1F4A9"}
|
||||
req = Request.blank("/v1/account/cont",
|
||||
environ={'swift.cache': FakeCache(cached)})
|
||||
with patch('swift.proxy.controllers.base.'
|
||||
|
@ -263,6 +266,7 @@ class TestFuncs(unittest.TestCase):
|
|||
self.assertEquals(resp['bytes'], 3333)
|
||||
self.assertEquals(resp['object_count'], 10)
|
||||
self.assertEquals(resp['status'], 404)
|
||||
self.assertEquals(resp['versions'], "\xe1\xbd\x8a\x39")
|
||||
|
||||
def test_get_container_info_env(self):
|
||||
cache_key = get_container_memcache_key("account", "cont")
|
||||
|
@ -365,6 +369,15 @@ class TestFuncs(unittest.TestCase):
|
|||
self.assertEquals(resp['meta']['whatevs'], 14)
|
||||
self.assertEquals(resp['meta']['somethingelse'], 0)
|
||||
|
||||
def test_headers_to_container_info_sys_meta(self):
|
||||
prefix = get_sys_meta_prefix('container')
|
||||
headers = {'%sWhatevs' % prefix: 14,
|
||||
'%ssomethingelse' % prefix: 0}
|
||||
resp = headers_to_container_info(headers.items(), 200)
|
||||
self.assertEquals(len(resp['sysmeta']), 2)
|
||||
self.assertEquals(resp['sysmeta']['whatevs'], 14)
|
||||
self.assertEquals(resp['sysmeta']['somethingelse'], 0)
|
||||
|
||||
def test_headers_to_container_info_values(self):
|
||||
headers = {
|
||||
'x-container-read': 'readvalue',
|
||||
|
@ -396,6 +409,15 @@ class TestFuncs(unittest.TestCase):
|
|||
self.assertEquals(resp['meta']['whatevs'], 14)
|
||||
self.assertEquals(resp['meta']['somethingelse'], 0)
|
||||
|
||||
def test_headers_to_account_info_sys_meta(self):
|
||||
prefix = get_sys_meta_prefix('account')
|
||||
headers = {'%sWhatevs' % prefix: 14,
|
||||
'%ssomethingelse' % prefix: 0}
|
||||
resp = headers_to_account_info(headers.items(), 200)
|
||||
self.assertEquals(len(resp['sysmeta']), 2)
|
||||
self.assertEquals(resp['sysmeta']['whatevs'], 14)
|
||||
self.assertEquals(resp['sysmeta']['somethingelse'], 0)
|
||||
|
||||
def test_headers_to_account_info_values(self):
|
||||
headers = {
|
||||
'x-account-object-count': '10',
|
||||
|
@ -473,3 +495,43 @@ class TestFuncs(unittest.TestCase):
|
|||
{'Range': 'bytes=-100'})
|
||||
handler.fast_forward(20)
|
||||
self.assertEquals(handler.backend_headers['Range'], 'bytes=-80')
|
||||
|
||||
def test_transfer_headers_with_sysmeta(self):
|
||||
base = Controller(self.app)
|
||||
good_hdrs = {'x-base-sysmeta-foo': 'ok',
|
||||
'X-Base-sysmeta-Bar': 'also ok'}
|
||||
bad_hdrs = {'x-base-sysmeta-': 'too short'}
|
||||
hdrs = dict(good_hdrs)
|
||||
hdrs.update(bad_hdrs)
|
||||
dst_hdrs = HeaderKeyDict()
|
||||
base.transfer_headers(hdrs, dst_hdrs)
|
||||
self.assertEqual(HeaderKeyDict(good_hdrs), dst_hdrs)
|
||||
|
||||
def test_generate_request_headers(self):
|
||||
base = Controller(self.app)
|
||||
src_headers = {'x-remove-base-meta-owner': 'x',
|
||||
'x-base-meta-size': '151M',
|
||||
'new-owner': 'Kun'}
|
||||
req = Request.blank('/v1/a/c/o', headers=src_headers)
|
||||
dst_headers = base.generate_request_headers(req, transfer=True)
|
||||
expected_headers = {'x-base-meta-owner': '',
|
||||
'x-base-meta-size': '151M'}
|
||||
for k, v in expected_headers.iteritems():
|
||||
self.assertTrue(k in dst_headers)
|
||||
self.assertEqual(v, dst_headers[k])
|
||||
self.assertFalse('new-owner' in dst_headers)
|
||||
|
||||
def test_generate_request_headers_with_sysmeta(self):
|
||||
base = Controller(self.app)
|
||||
good_hdrs = {'x-base-sysmeta-foo': 'ok',
|
||||
'X-Base-sysmeta-Bar': 'also ok'}
|
||||
bad_hdrs = {'x-base-sysmeta-': 'too short'}
|
||||
hdrs = dict(good_hdrs)
|
||||
hdrs.update(bad_hdrs)
|
||||
req = Request.blank('/v1/a/c/o', headers=hdrs)
|
||||
dst_headers = base.generate_request_headers(req, transfer=True)
|
||||
for k, v in good_hdrs.iteritems():
|
||||
self.assertTrue(k.lower() in dst_headers)
|
||||
self.assertEqual(v, dst_headers[k.lower()])
|
||||
for k, v in bad_hdrs.iteritems():
|
||||
self.assertFalse(k.lower() in dst_headers)
|
||||
|
|
|
@ -21,6 +21,7 @@ from swift.proxy import server as proxy_server
|
|||
from swift.proxy.controllers.base import headers_to_container_info
|
||||
from test.unit import fake_http_connect, FakeRing, FakeMemcache
|
||||
from swift.common.storage_policy import StoragePolicy, StoragePolicyCollection
|
||||
from swift.common.request_helpers import get_sys_meta_prefix
|
||||
|
||||
|
||||
class TestContainerController(unittest.TestCase):
|
||||
|
@ -65,6 +66,61 @@ class TestContainerController(unittest.TestCase):
|
|||
for key in owner_headers:
|
||||
self.assertTrue(key in resp.headers)
|
||||
|
||||
def _make_callback_func(self, context):
|
||||
def callback(ipaddr, port, device, partition, method, path,
|
||||
headers=None, query_string=None, ssl=False):
|
||||
context['method'] = method
|
||||
context['path'] = path
|
||||
context['headers'] = headers or {}
|
||||
return callback
|
||||
|
||||
def test_sys_meta_headers_PUT(self):
|
||||
# check that headers in sys meta namespace make it through
|
||||
# the container controller
|
||||
sys_meta_key = '%stest' % get_sys_meta_prefix('container')
|
||||
sys_meta_key = sys_meta_key.title()
|
||||
user_meta_key = 'X-Container-Meta-Test'
|
||||
controller = proxy_server.ContainerController(self.app, 'a', 'c')
|
||||
|
||||
context = {}
|
||||
callback = self._make_callback_func(context)
|
||||
hdrs_in = {sys_meta_key: 'foo',
|
||||
user_meta_key: 'bar',
|
||||
'x-timestamp': '1.0'}
|
||||
req = Request.blank('/v1/a/c', headers=hdrs_in)
|
||||
with mock.patch('swift.proxy.controllers.base.http_connect',
|
||||
fake_http_connect(200, 200, give_connect=callback)):
|
||||
controller.PUT(req)
|
||||
self.assertEqual(context['method'], 'PUT')
|
||||
self.assertTrue(sys_meta_key in context['headers'])
|
||||
self.assertEqual(context['headers'][sys_meta_key], 'foo')
|
||||
self.assertTrue(user_meta_key in context['headers'])
|
||||
self.assertEqual(context['headers'][user_meta_key], 'bar')
|
||||
self.assertNotEqual(context['headers']['x-timestamp'], '1.0')
|
||||
|
||||
def test_sys_meta_headers_POST(self):
|
||||
# check that headers in sys meta namespace make it through
|
||||
# the container controller
|
||||
sys_meta_key = '%stest' % get_sys_meta_prefix('container')
|
||||
sys_meta_key = sys_meta_key.title()
|
||||
user_meta_key = 'X-Container-Meta-Test'
|
||||
controller = proxy_server.ContainerController(self.app, 'a', 'c')
|
||||
context = {}
|
||||
callback = self._make_callback_func(context)
|
||||
hdrs_in = {sys_meta_key: 'foo',
|
||||
user_meta_key: 'bar',
|
||||
'x-timestamp': '1.0'}
|
||||
req = Request.blank('/v1/a/c', headers=hdrs_in)
|
||||
with mock.patch('swift.proxy.controllers.base.http_connect',
|
||||
fake_http_connect(200, 200, give_connect=callback)):
|
||||
controller.POST(req)
|
||||
self.assertEqual(context['method'], 'POST')
|
||||
self.assertTrue(sys_meta_key in context['headers'])
|
||||
self.assertEqual(context['headers'][sys_meta_key], 'foo')
|
||||
self.assertTrue(user_meta_key in context['headers'])
|
||||
self.assertEqual(context['headers'][user_meta_key], 'bar')
|
||||
self.assertNotEqual(context['headers']['x-timestamp'], '1.0')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -45,7 +45,8 @@ from swift.common.middleware import proxy_logging
|
|||
from swift.common.exceptions import ChunkReadTimeout, SegmentError
|
||||
from swift.common.constraints import MAX_META_NAME_LENGTH, \
|
||||
MAX_META_VALUE_LENGTH, MAX_META_COUNT, MAX_META_OVERALL_SIZE, \
|
||||
MAX_FILE_SIZE, MAX_ACCOUNT_NAME_LENGTH, MAX_CONTAINER_NAME_LENGTH
|
||||
MAX_FILE_SIZE, MAX_ACCOUNT_NAME_LENGTH, MAX_CONTAINER_NAME_LENGTH, \
|
||||
ACCOUNT_LISTING_LIMIT, CONTAINER_LISTING_LIMIT, MAX_OBJECT_NAME_LENGTH
|
||||
from swift.common import utils
|
||||
from swift.common.utils import mkdirs, normalize_timestamp, NullLogger
|
||||
from swift.common.wsgi import monkey_patch_mimetools
|
||||
|
@ -399,7 +400,8 @@ class TestController(unittest.TestCase):
|
|||
'container_count': '12345',
|
||||
'total_object_count': None,
|
||||
'bytes': None,
|
||||
'meta': {}}
|
||||
'meta': {},
|
||||
'sysmeta': {}}
|
||||
self.assertEquals(container_info,
|
||||
self.memcache.get(cache_key))
|
||||
|
||||
|
@ -425,7 +427,8 @@ class TestController(unittest.TestCase):
|
|||
'container_count': None, # internally keep None
|
||||
'total_object_count': None,
|
||||
'bytes': None,
|
||||
'meta': {}}
|
||||
'meta': {},
|
||||
'sysmeta': {}}
|
||||
self.assertEquals(account_info,
|
||||
self.memcache.get(cache_key))
|
||||
|
||||
|
@ -1250,6 +1253,56 @@ class TestObjectController(unittest.TestCase):
|
|||
finally:
|
||||
swift.proxy.controllers.obj.MAX_FILE_SIZE = MAX_FILE_SIZE
|
||||
|
||||
def test_PUT_last_modified(self):
|
||||
prolis = _test_sockets[0]
|
||||
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
||||
fd = sock.makefile()
|
||||
fd.write('PUT /v1/a/c/o.last_modified HTTP/1.1\r\n'
|
||||
'Host: localhost\r\nConnection: close\r\n'
|
||||
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n')
|
||||
fd.flush()
|
||||
headers = readuntil2crlfs(fd)
|
||||
exp = 'HTTP/1.1 201'
|
||||
lm_hdr = 'Last-Modified: '
|
||||
self.assertEqual(headers[:len(exp)], exp)
|
||||
|
||||
last_modified_put = [line for line in headers.split('\r\n')
|
||||
if lm_hdr in line][0][len(lm_hdr):]
|
||||
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
||||
fd = sock.makefile()
|
||||
fd.write('HEAD /v1/a/c/o.last_modified HTTP/1.1\r\n'
|
||||
'Host: localhost\r\nConnection: close\r\n'
|
||||
'X-Storage-Token: t\r\n\r\n')
|
||||
fd.flush()
|
||||
headers = readuntil2crlfs(fd)
|
||||
exp = 'HTTP/1.1 200'
|
||||
self.assertEqual(headers[:len(exp)], exp)
|
||||
last_modified_head = [line for line in headers.split('\r\n')
|
||||
if lm_hdr in line][0][len(lm_hdr):]
|
||||
self.assertEqual(last_modified_put, last_modified_head)
|
||||
|
||||
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
||||
fd = sock.makefile()
|
||||
fd.write('GET /v1/a/c/o.last_modified HTTP/1.1\r\n'
|
||||
'Host: localhost\r\nConnection: close\r\n'
|
||||
'If-Modified-Since: %s\r\n'
|
||||
'X-Storage-Token: t\r\n\r\n' % last_modified_put)
|
||||
fd.flush()
|
||||
headers = readuntil2crlfs(fd)
|
||||
exp = 'HTTP/1.1 304'
|
||||
self.assertEqual(headers[:len(exp)], exp)
|
||||
|
||||
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
||||
fd = sock.makefile()
|
||||
fd.write('GET /v1/a/c/o.last_modified HTTP/1.1\r\n'
|
||||
'Host: localhost\r\nConnection: close\r\n'
|
||||
'If-Unmodified-Since: %s\r\n'
|
||||
'X-Storage-Token: t\r\n\r\n' % last_modified_put)
|
||||
fd.flush()
|
||||
headers = readuntil2crlfs(fd)
|
||||
exp = 'HTTP/1.1 200'
|
||||
self.assertEqual(headers[:len(exp)], exp)
|
||||
|
||||
def test_expirer_DELETE_on_versioned_object(self):
|
||||
test_errors = []
|
||||
|
||||
|
@ -6255,6 +6308,19 @@ class TestContainerController(unittest.TestCase):
|
|||
'X-Account-Device': 'sdc'}
|
||||
])
|
||||
|
||||
def test_node_read_timeout_retry_to_container(self):
|
||||
with save_globals():
|
||||
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'GET'})
|
||||
self.app.node_timeout = 0.1
|
||||
set_http_connect(200, 200, 200, body='abcdef', slow=[2])
|
||||
resp = req.get_response(self.app)
|
||||
got_exc = False
|
||||
try:
|
||||
resp.body
|
||||
except ChunkReadTimeout:
|
||||
got_exc = True
|
||||
self.assert_(got_exc)
|
||||
|
||||
|
||||
class TestAccountController(unittest.TestCase):
|
||||
|
||||
|
@ -7200,7 +7266,7 @@ class TestProxyObjectPerformance(unittest.TestCase):
|
|||
self.obj_len = obj_len
|
||||
|
||||
def test_GET_debug_large_file(self):
|
||||
for i in range(0, 10):
|
||||
for i in range(10):
|
||||
start = time.time()
|
||||
|
||||
prolis = _test_sockets[0]
|
||||
|
@ -7229,6 +7295,32 @@ class TestProxyObjectPerformance(unittest.TestCase):
|
|||
print "Run %02d took %07.03f" % (i, end - start)
|
||||
|
||||
|
||||
class TestSwiftInfo(unittest.TestCase):
|
||||
def setUp(self):
|
||||
utils._swift_info = {}
|
||||
utils._swift_admin_info = {}
|
||||
|
||||
def test_registered_defaults(self):
|
||||
proxy_server.Application({}, FakeMemcache(),
|
||||
account_ring=FakeRing(),
|
||||
container_ring=FakeRing())
|
||||
|
||||
si = utils.get_swift_info()['swift']
|
||||
self.assertTrue('version' in si)
|
||||
self.assertEqual(si['max_file_size'], MAX_FILE_SIZE)
|
||||
self.assertEqual(si['max_meta_name_length'], MAX_META_NAME_LENGTH)
|
||||
self.assertEqual(si['max_meta_value_length'], MAX_META_VALUE_LENGTH)
|
||||
self.assertEqual(si['max_meta_count'], MAX_META_COUNT)
|
||||
self.assertEqual(si['account_listing_limit'], ACCOUNT_LISTING_LIMIT)
|
||||
self.assertEqual(si['container_listing_limit'],
|
||||
CONTAINER_LISTING_LIMIT)
|
||||
self.assertEqual(si['max_account_name_length'],
|
||||
MAX_ACCOUNT_NAME_LENGTH)
|
||||
self.assertEqual(si['max_container_name_length'],
|
||||
MAX_CONTAINER_NAME_LENGTH)
|
||||
self.assertEqual(si['max_object_name_length'], MAX_OBJECT_NAME_LENGTH)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
setup()
|
||||
try:
|
||||
|
|
7
tox.ini
7
tox.ini
|
@ -1,8 +1,11 @@
|
|||
[tox]
|
||||
envlist = py26,py27,pep8
|
||||
minversion = 1.6
|
||||
skipsdist = True
|
||||
|
||||
[testenv]
|
||||
install_command = pip install -U {opts} {packages}
|
||||
usedevelop = True
|
||||
install_command = pip install --allow-external netifaces --allow-insecure netifaces -U {opts} {packages}
|
||||
setenv = VIRTUAL_ENV={envdir}
|
||||
NOSE_WITH_OPENSTACK=1
|
||||
NOSE_OPENSTACK_COLOR=1
|
||||
|
@ -15,7 +18,7 @@ setenv = VIRTUAL_ENV={envdir}
|
|||
deps =
|
||||
-r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
commands = nosetests test/unit {posargs}
|
||||
commands = nosetests {posargs:test/unit}
|
||||
|
||||
[testenv:cover]
|
||||
setenv = VIRTUAL_ENV={envdir}
|
||||
|
|
Loading…
Reference in New Issue