Merge master to feature/ec

Change-Id: If1983b0450a85f79f5bcd5ca6acd859d68de73e4
This commit is contained in:
paul luse 2014-09-18 15:01:59 -07:00
commit 18901494e9
76 changed files with 4761 additions and 1663 deletions

View File

@ -55,7 +55,7 @@ Christian Schwede <info@cschwede.de> <christian.schwede@enovance.com>
Constantine Peresypkin <constantine.peresypk@rackspace.com> <constantine@litestack.com>
Madhuri Kumari <madhuri.rai07@gmail.com> madhuri <madhuri@madhuri-VirtualBox.(none)>
Morgan Fainberg <morgan.fainberg@gmail.com> <m@metacloud.com>
Zhang Hare <zhuadl@cn.ibm.com> <zhuadl@cn.ibm.com>
Hua Zhang <zhuadl@cn.ibm.com> <zhuadl@cn.ibm.com>
Yummy Bian <yummy.bian@gmail.com> <yummy.bian@gmail.com>
Alistair Coles <alistair.coles@hp.com> <alistair.coles@hp.com>
Tong Li <litong01@us.ibm.com> <litong01@us.ibm.com>

View File

@ -61,7 +61,6 @@ Doug Hellmann (doug.hellmann@dreamhost.com)
Dan Hersam (dan.hersam@hp.com)
Derek Higgins (derekh@redhat.com)
Florian Hines (syn@ronin.io)
Zhang Hare (zhuadl@cn.ibm.com)
Alex Holden (alex@alexjonasholden.com)
Edward Hope-Morley (opentastic@gmail.com)
Kun Huang (gareth@unitedstack.com)
@ -179,6 +178,7 @@ Lin Yang (lin.a.yang@intel.com)
Yee (mail.zhang.yee@gmail.com)
Guang Yee (guang.yee@hp.com)
Pete Zaitcev (zaitcev@kotori.zaitcev.us)
Hua Zhang (zhuadl@cn.ibm.com)
Jian Zhang (jian.zhang@intel.com)
Ning Zhang (ning@zmanda.com)
Yuan Zhou (yuan.zhou@intel.com)

View File

@ -20,5 +20,4 @@ from swift.common.wsgi import run_wsgi
if __name__ == '__main__':
conf_file, options = parse_options()
sys.exit(run_wsgi(conf_file,
'account-server', default_port=6002, **options))
sys.exit(run_wsgi(conf_file, 'account-server', **options))

View File

@ -20,5 +20,4 @@ from swift.common.wsgi import run_wsgi
if __name__ == '__main__':
conf_file, options = parse_options()
sys.exit(run_wsgi(conf_file,
'container-server', default_port=6001, **options))
sys.exit(run_wsgi(conf_file, 'container-server', **options))

View File

@ -22,6 +22,6 @@ from swift.obj import server
if __name__ == '__main__':
conf_file, options = parse_options()
sys.exit(run_wsgi(conf_file, 'object-server', default_port=6000,
sys.exit(run_wsgi(conf_file, 'object-server',
global_conf_callback=server.global_conf_callback,
**options))

View File

@ -20,4 +20,4 @@ from swift.common.wsgi import run_wsgi
if __name__ == '__main__':
conf_file, options = parse_options()
sys.exit(run_wsgi(conf_file, 'proxy-server', default_port=8080, **options))
sys.exit(run_wsgi(conf_file, 'proxy-server', **options))

View File

@ -2,6 +2,7 @@
devices = /srv/1/node
mount_check = false
disable_fallocate = true
bind_ip = 127.0.0.1
bind_port = 6012
workers = 1
user = <your-user-name>

View File

@ -2,6 +2,7 @@
devices = /srv/2/node
mount_check = false
disable_fallocate = true
bind_ip = 127.0.0.1
bind_port = 6022
workers = 1
user = <your-user-name>

View File

@ -2,6 +2,7 @@
devices = /srv/3/node
mount_check = false
disable_fallocate = true
bind_ip = 127.0.0.1
bind_port = 6032
workers = 1
user = <your-user-name>

View File

@ -2,6 +2,7 @@
devices = /srv/4/node
mount_check = false
disable_fallocate = true
bind_ip = 127.0.0.1
bind_port = 6042
workers = 1
user = <your-user-name>

View File

@ -2,6 +2,7 @@
devices = /srv/1/node
mount_check = false
disable_fallocate = true
bind_ip = 127.0.0.1
bind_port = 6011
workers = 1
user = <your-user-name>

View File

@ -2,6 +2,7 @@
devices = /srv/2/node
mount_check = false
disable_fallocate = true
bind_ip = 127.0.0.1
bind_port = 6021
workers = 1
user = <your-user-name>

View File

@ -2,6 +2,7 @@
devices = /srv/3/node
mount_check = false
disable_fallocate = true
bind_ip = 127.0.0.1
bind_port = 6031
workers = 1
user = <your-user-name>

View File

@ -2,6 +2,7 @@
devices = /srv/4/node
mount_check = false
disable_fallocate = true
bind_ip = 127.0.0.1
bind_port = 6041
workers = 1
user = <your-user-name>

View File

@ -2,6 +2,7 @@
devices = /srv/1/node
mount_check = false
disable_fallocate = true
bind_ip = 127.0.0.1
bind_port = 6010
workers = 1
user = <your-user-name>

View File

@ -2,6 +2,7 @@
devices = /srv/2/node
mount_check = false
disable_fallocate = true
bind_ip = 127.0.0.1
bind_port = 6020
workers = 1
user = <your-user-name>

View File

@ -2,6 +2,7 @@
devices = /srv/3/node
mount_check = false
disable_fallocate = true
bind_ip = 127.0.0.1
bind_port = 6030
workers = 1
user = <your-user-name>

View File

@ -2,6 +2,7 @@
devices = /srv/4/node
mount_check = false
disable_fallocate = true
bind_ip = 127.0.0.1
bind_port = 6040
workers = 1
user = <your-user-name>

View File

@ -1,4 +1,5 @@
[DEFAULT]
bind_ip = 127.0.0.1
bind_port = 8080
workers = 1
user = <your-user-name>

View File

@ -78,6 +78,12 @@ Custom Logger Hooks
* `swift-sentry <https://github.com/pandemicsyn/swift-sentry>`_ - Sentry exception reporting for Swift
Storage Backends (DiskFile API implementations)
-----------------------------------------------
* `SwiftOnFile <https://github.com/swiftonfile/swiftonfile>`_ - Enables objects created using Swift API to be accessed as files on a POSIX filesystem and vice versa.
* `swift-ceph-backend <https://github.com/stackforge/swift-ceph-backend>`_ - Ceph RADOS object server implementation for Swift.
* `kinetic-swift <https://github.com/swiftstack/kinetic-swift>`_ - Seagate Kinetic Drive as backend for Swift
Other
-----

View File

@ -27,7 +27,7 @@ sys.path.extend([os.path.abspath('../swift'), os.path.abspath('..'),
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath',
'sphinx.ext.ifconfig', 'oslosphinx']
todo_include_todos = True
@ -214,8 +214,3 @@ latex_documents = [
# If false, no module index is generated.
#latex_use_modindex = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'python': ('http://docs.python.org/', None),
'nova': ('http://nova.openstack.org', None),
'glance': ('http://glance.openstack.org', None)}

View File

@ -99,6 +99,7 @@ CS :ref:`container-sync`
TA :ref:`common_tempauth`
DLO :ref:`dynamic-large-objects`
LE :ref:`list_endpoints`
KS :ref:`keystoneauth`
======================= =============================

View File

@ -123,6 +123,8 @@ Healthcheck
:members:
:show-inheritance:
.. _keystoneauth:
KeystoneAuth
============

View File

@ -1,6 +1,6 @@
[DEFAULT]
# bind_ip = 0.0.0.0
# bind_port = 6002
bind_port = 6002
# bind_timeout = 30
# backlog = 4096
# user = swift

View File

@ -1,6 +1,6 @@
[DEFAULT]
# bind_ip = 0.0.0.0
# bind_port = 6001
bind_port = 6001
# bind_timeout = 30
# backlog = 4096
# user = swift

View File

@ -1,6 +1,6 @@
[DEFAULT]
# bind_ip = 0.0.0.0
# bind_port = 6000
bind_port = 6000
# bind_timeout = 30
# backlog = 4096
# user = swift

View File

@ -1,6 +1,6 @@
[DEFAULT]
# bind_ip = 0.0.0.0
# bind_port = 80
bind_port = 8080
# bind_timeout = 30
# backlog = 4096
# swift_dir = /etc/swift
@ -285,6 +285,16 @@ user_test_tester3 = testing3
# operator_roles = admin, swiftoperator
# The reseller admin role has the ability to create and delete accounts
# reseller_admin_role = ResellerAdmin
# For backwards compatibility, keystoneauth will match names in cross-tenant
# access control lists (ACLs) when both the requesting user and the tenant
# are in the default domain i.e the domain to which existing tenants are
# migrated. The default_domain_id value configured here should be the same as
# the value used during migration of tenants to keystone domains.
# default_domain_id = default
# For a new installation, or an installation in which keystone projects may
# move between domains, you should disable backwards compatible name matching
# in ACLs by setting allow_names_in_acls to false:
# allow_names_in_acls = true
[filter:healthcheck]
use = egg:swift#healthcheck

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +1,7 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
dnspython>=1.9.4
eventlet>=0.9.15
greenlet>=0.3.1

View File

@ -106,18 +106,18 @@ tag_date = 0
tag_svn_revision = 0
[compile_catalog]
directory = locale
directory = swift/locale
domain = swift
[update_catalog]
domain = swift
output_dir = locale
input_file = locale/swift.pot
input_file = swift/locale/swift.pot
[extract_messages]
keywords = _ l_ lazy_gettext
mapping_file = babel.cfg
output_file = locale/swift.pot
output_file = swift/locale/swift.pot
[nosetests]
exe = 1

View File

@ -16,17 +16,14 @@
Pluggable Back-end for Account Server
"""
import os
from uuid import uuid4
import time
import cPickle as pickle
import errno
import sqlite3
from swift.common.utils import Timestamp, lock_parent_directory
from swift.common.db import DatabaseBroker, DatabaseConnectionError, \
PENDING_CAP, PICKLE_PROTOCOL, utf8encode
from swift.common.utils import Timestamp
from swift.common.db import DatabaseBroker, utf8encode
DATADIR = 'accounts'
@ -235,6 +232,12 @@ class AccountBroker(DatabaseBroker):
'SELECT container_count from account_stat').fetchone()
return (row[0] == 0)
def make_tuple_for_pickle(self, record):
return (record['name'], record['put_timestamp'],
record['delete_timestamp'], record['object_count'],
record['bytes_used'], record['deleted'],
record['storage_policy_index'])
def put_container(self, name, put_timestamp, delete_timestamp,
object_count, bytes_used, storage_policy_index):
"""
@ -258,31 +261,7 @@ class AccountBroker(DatabaseBroker):
'bytes_used': bytes_used,
'deleted': deleted,
'storage_policy_index': storage_policy_index}
if self.db_file == ':memory:':
self.merge_items([record])
return
if not os.path.exists(self.db_file):
raise DatabaseConnectionError(self.db_file, "DB doesn't exist")
pending_size = 0
try:
pending_size = os.path.getsize(self.pending_file)
except OSError as err:
if err.errno != errno.ENOENT:
raise
if pending_size > PENDING_CAP:
self._commit_puts([record])
else:
with lock_parent_directory(self.pending_file,
self.pending_timeout):
with open(self.pending_file, 'a+b') as fp:
# Colons aren't used in base64 encoding; so they are our
# delimiter
fp.write(':')
fp.write(pickle.dumps(
(name, put_timestamp, delete_timestamp, object_count,
bytes_used, deleted, storage_policy_index),
protocol=PICKLE_PROTOCOL).encode('base64'))
fp.flush()
self.put_record(record)
def _is_deleted_info(self, status, container_count, delete_timestamp,
put_timestamp):

View File

@ -175,22 +175,23 @@ class SwiftRecon(object):
block = f.read(4096)
return md5sum.hexdigest()
def get_devices(self, zone_filter, swift_dir, ring_name):
def get_devices(self, region_filter, zone_filter, swift_dir, ring_name):
"""
Get a list of hosts in the ring
:param region_filter: Only list regions matching given filter
:param zone_filter: Only list zones matching given filter
:param swift_dir: Directory of swift config, usually /etc/swift
:param ring_name: Name of the ring, such as 'object'
:returns: a set of tuples containing the ip and port of hosts
"""
ring_data = Ring(swift_dir, ring_name=ring_name)
devs = [d for d in ring_data.devs if d]
if region_filter is not None:
devs = [d for d in devs if d['region'] == region_filter]
if zone_filter is not None:
ips = set((n['ip'], n['port']) for n in ring_data.devs
if n and n['zone'] == zone_filter)
else:
ips = set((n['ip'], n['port']) for n in ring_data.devs if n)
return ips
devs = [d for d in devs if d['zone'] == zone_filter]
return set((d['ip'], d['port']) for d in devs)
def get_ringmd5(self, hosts, swift_dir):
"""
@ -875,6 +876,8 @@ class SwiftRecon(object):
args.add_option('--all', action="store_true",
help="Perform all checks. Equal to -arudlq --md5 "
"--sockstat")
args.add_option('--region', type="int",
help="Only query servers in specified region")
args.add_option('--zone', '-z', type="int",
help="Only query servers in specified zone")
args.add_option('--timeout', '-t', type="int", metavar="SECONDS",
@ -903,10 +906,8 @@ class SwiftRecon(object):
self.suppress_errors = options.suppress
self.timeout = options.timeout
if options.zone is not None:
hosts = self.get_devices(options.zone, swift_dir, self.server_type)
else:
hosts = self.get_devices(None, swift_dir, self.server_type)
hosts = self.get_devices(options.region, options.zone,
swift_dir, self.server_type)
print("--> Starting reconnaissance on %s hosts" % len(hosts))
print("=" * 79)

View File

@ -15,12 +15,14 @@
import os
import urllib
import time
from urllib import unquote
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
from swift.common import utils, exceptions
from swift.common.swob import HTTPBadRequest, HTTPLengthRequired, \
HTTPRequestEntityTooLarge, HTTPPreconditionFailed
HTTPRequestEntityTooLarge, HTTPPreconditionFailed, HTTPNotImplemented, \
HTTPException
MAX_FILE_SIZE = 5368709122
MAX_META_NAME_LENGTH = 128
@ -154,24 +156,45 @@ def check_object_creation(req, object_name):
a chunked request
:returns HTTPBadRequest: missing or bad content-type header, or
bad metadata
:returns HTTPNotImplemented: unsupported transfer-encoding header value
"""
if req.content_length and req.content_length > MAX_FILE_SIZE:
try:
ml = req.message_length()
except ValueError as e:
return HTTPBadRequest(request=req, content_type='text/plain',
body=str(e))
except AttributeError as e:
return HTTPNotImplemented(request=req, content_type='text/plain',
body=str(e))
if ml is not None and ml > MAX_FILE_SIZE:
return HTTPRequestEntityTooLarge(body='Your request is too large.',
request=req,
content_type='text/plain')
if req.content_length is None and \
req.headers.get('transfer-encoding') != 'chunked':
return HTTPLengthRequired(request=req)
return HTTPLengthRequired(body='Missing Content-Length header.',
request=req,
content_type='text/plain')
if 'X-Copy-From' in req.headers and req.content_length:
return HTTPBadRequest(body='Copy requests require a zero byte body',
request=req, content_type='text/plain')
if len(object_name) > MAX_OBJECT_NAME_LENGTH:
return HTTPBadRequest(body='Object name length of %d longer than %d' %
(len(object_name), MAX_OBJECT_NAME_LENGTH),
request=req, content_type='text/plain')
if 'Content-Type' not in req.headers:
return HTTPBadRequest(request=req, content_type='text/plain',
body='No content type')
try:
req = check_delete_headers(req)
except HTTPException as e:
return HTTPBadRequest(request=req, body=e.body,
content_type='text/plain')
if not check_utf8(req.headers['Content-Type']):
return HTTPBadRequest(request=req, body='Invalid Content-Type',
content_type='text/plain')
@ -225,6 +248,46 @@ def valid_timestamp(request):
content_type='text/plain')
def check_delete_headers(request):
"""
Validate if 'x-delete' headers are have correct values
values should be positive integers and correspond to
a time in the future.
:param request: the swob request object
:returns: HTTPBadRequest in case of invalid values
or None if values are ok
"""
if 'x-delete-after' in request.headers:
try:
x_delete_after = int(request.headers['x-delete-after'])
except ValueError:
raise HTTPBadRequest(request=request,
content_type='text/plain',
body='Non-integer X-Delete-After')
actual_del_time = time.time() + x_delete_after
if actual_del_time < time.time():
raise HTTPBadRequest(request=request,
content_type='text/plain',
body='X-Delete-After in past')
request.headers['x-delete-at'] = utils.normalize_delete_at_timestamp(
actual_del_time)
if 'x-delete-at' in request.headers:
try:
x_delete_at = int(utils.normalize_delete_at_timestamp(
int(request.headers['x-delete-at'])))
except ValueError:
raise HTTPBadRequest(request=request, content_type='text/plain',
body='Non-integer X-Delete-At')
if x_delete_at < time.time():
raise HTTPBadRequest(request=request, content_type='text/plain',
body='X-Delete-At in past')
return request
def check_utf8(string):
"""
Validate if a string is valid UTF-8 str or unicode and that it
@ -248,6 +311,31 @@ def check_utf8(string):
return False
def check_path_header(req, name, length, error_msg):
"""
Validate that the value of path-like header is
well formatted. We assume the caller ensures that
specific header is present in req.headers.
:param req: HTTP request object
:param name: header name
:param length: length of path segment check
:param error_msg: error message for client
:returns: A tuple with path parts according to length
:raise: HTTPPreconditionFailed if header value
is not well formatted.
"""
src_header = unquote(req.headers.get(name))
if not src_header.startswith('/'):
src_header = '/' + src_header
try:
return utils.split_path(src_header, length, length, True)
except ValueError:
raise HTTPPreconditionFailed(
request=req,
body=error_msg)
def check_copy_from_header(req):
"""
Validate that the value from x-copy-from header is
@ -259,13 +347,42 @@ def check_copy_from_header(req):
:raise: HTTPPreconditionFailed if x-copy-from value
is not well formatted.
"""
src_header = unquote(req.headers.get('X-Copy-From'))
if not src_header.startswith('/'):
src_header = '/' + src_header
try:
return utils.split_path(src_header, 2, 2, True)
except ValueError:
return check_path_header(req, 'X-Copy-From', 2,
'X-Copy-From header must be of the form '
'<container name>/<object name>')
def check_destination_header(req):
"""
Validate that the value from destination header is
well formatted. We assume the caller ensures that
destination header is present in req.headers.
:param req: HTTP request object
:returns: A tuple with container name and object name
:raise: HTTPPreconditionFailed if destination value
is not well formatted.
"""
return check_path_header(req, 'Destination', 2,
'Destination header must be of the form '
'<container name>/<object name>')
def check_account_format(req, account):
"""
Validate that the header contains valid account name.
We assume the caller ensures that
destination header is present in req.headers.
:param req: HTTP request object
:returns: A properly encoded account name
:raise: HTTPPreconditionFailed if account header
is not well formatted.
"""
if isinstance(account, unicode):
account = account.encode('utf-8')
if '/' in account:
raise HTTPPreconditionFailed(
request=req,
body='X-Copy-From header must be of the form'
'<container name>/<object name>')
body='Account name cannot contain slashes')
return account

View File

@ -23,6 +23,7 @@ from uuid import uuid4
import sys
import time
import errno
import cPickle as pickle
from swift import gettext_ as _
from tempfile import mkstemp
@ -550,10 +551,36 @@ class DatabaseBroker(object):
curs.row_factory = dict_factory
return curs.fetchone()
def put_record(self, record):
if self.db_file == ':memory:':
self.merge_items([record])
return
if not os.path.exists(self.db_file):
raise DatabaseConnectionError(self.db_file, "DB doesn't exist")
with lock_parent_directory(self.pending_file, self.pending_timeout):
pending_size = 0
try:
pending_size = os.path.getsize(self.pending_file)
except OSError as err:
if err.errno != errno.ENOENT:
raise
if pending_size > PENDING_CAP:
self._commit_puts([record])
else:
with open(self.pending_file, 'a+b') as fp:
# Colons aren't used in base64 encoding; so they are our
# delimiter
fp.write(':')
fp.write(pickle.dumps(
self.make_tuple_for_pickle(record),
protocol=PICKLE_PROTOCOL).encode('base64'))
fp.flush()
def _commit_puts(self, item_list=None):
"""
Scan for .pending files and commit the found records by feeding them
to merge_items().
to merge_items(). Assume that lock_parent_directory has already been
called.
:param item_list: A list of items to commit in addition to .pending
"""
@ -561,36 +588,39 @@ class DatabaseBroker(object):
return
if item_list is None:
item_list = []
with lock_parent_directory(self.pending_file, self.pending_timeout):
self._preallocate()
if not os.path.getsize(self.pending_file):
if item_list:
self.merge_items(item_list)
return
with open(self.pending_file, 'r+b') as fp:
for entry in fp.read().split(':'):
if entry:
try:
self._commit_puts_load(item_list, entry)
except Exception:
self.logger.exception(
_('Invalid pending entry %(file)s: %(entry)s'),
{'file': self.pending_file, 'entry': entry})
if item_list:
self.merge_items(item_list)
try:
os.ftruncate(fp.fileno(), 0)
except OSError as err:
if err.errno != errno.ENOENT:
raise
self._preallocate()
if not os.path.getsize(self.pending_file):
if item_list:
self.merge_items(item_list)
return
with open(self.pending_file, 'r+b') as fp:
for entry in fp.read().split(':'):
if entry:
try:
self._commit_puts_load(item_list, entry)
except Exception:
self.logger.exception(
_('Invalid pending entry %(file)s: %(entry)s'),
{'file': self.pending_file, 'entry': entry})
if item_list:
self.merge_items(item_list)
try:
os.ftruncate(fp.fileno(), 0)
except OSError as err:
if err.errno != errno.ENOENT:
raise
def _commit_puts_stale_ok(self):
"""
Catch failures of _commit_puts() if broker is intended for
reading of stats, and thus does not care for pending updates.
"""
if self.db_file == ':memory:' or not os.path.exists(self.pending_file):
return
try:
self._commit_puts()
with lock_parent_directory(self.pending_file,
self.pending_timeout):
self._commit_puts()
except LockTimeout:
if not self.stale_reads_ok:
raise
@ -603,6 +633,13 @@ class DatabaseBroker(object):
"""
raise NotImplementedError
def make_tuple_for_pickle(self, record):
"""
Turn this db record dict into the format this service uses for
pending pickles.
"""
raise NotImplementedError
def merge_syncs(self, sync_points, incoming=True):
"""
Merge a list of sync points with the incoming sync table.
@ -731,7 +768,10 @@ class DatabaseBroker(object):
:param age_timestamp: max created_at timestamp of object rows to delete
:param sync_timestamp: max update_at timestamp of sync rows to delete
"""
self._commit_puts()
if self.db_file != ':memory:' and os.path.exists(self.pending_file):
with lock_parent_directory(self.pending_file,
self.pending_timeout):
self._commit_puts()
with self.get() as conn:
conn.execute('''
DELETE FROM %s WHERE deleted = 1 AND %s < ?

View File

@ -433,7 +433,7 @@ class MemcacheRing(object):
"""
Sets multiple key/value pairs in memcache.
:param mapping: dictonary of keys and values to be set in memcache
:param mapping: dictionary of keys and values to be set in memcache
:param servery_key: key to use in determining which server in the ring
is used
:param serialize: if True, value is serialized with JSON before sending

View File

@ -278,11 +278,13 @@ class _CappedFileLikeObject(object):
self.fp = fp
self.max_file_size = max_file_size
self.amount_read = 0
self.file_size_exceeded = False
def read(self, size=None):
ret = self.fp.read(size)
self.amount_read += len(ret)
if self.amount_read > self.max_file_size:
self.file_size_exceeded = True
raise EOFError('max_file_size exceeded')
return ret
@ -290,6 +292,7 @@ class _CappedFileLikeObject(object):
ret = self.fp.readline()
self.amount_read += len(ret)
if self.amount_read > self.max_file_size:
self.file_size_exceeded = True
raise EOFError('max_file_size exceeded')
return ret
@ -334,7 +337,7 @@ class FormPost(object):
status, headers, body = self._translate_form(
env, attrs['boundary'])
start_response(status, headers)
return body
return [body]
except (FormInvalid, EOFError) as err:
body = 'FormPost: %s' % err
start_response(
@ -492,7 +495,12 @@ class FormPost(object):
substatus = [None]
subheaders = [None]
wsgi_input = subenv['wsgi.input']
def _start_response(status, headers, exc_info=None):
if wsgi_input.file_size_exceeded:
raise EOFError("max_file_size exceeded")
substatus[0] = status
subheaders[0] = headers

View File

@ -13,11 +13,20 @@
# under the License.
from swift.common import utils as swift_utils
from swift.common.http import is_success
from swift.common.middleware import acl as swift_acl
from swift.common.request_helpers import get_sys_meta_prefix
from swift.common.swob import HTTPNotFound, HTTPForbidden, HTTPUnauthorized
from swift.common.utils import register_swift_info
from swift.proxy.controllers.base import get_account_info
import functools
PROJECT_DOMAIN_ID_HEADER = 'x-account-project-domain-id'
PROJECT_DOMAIN_ID_SYSMETA_HEADER = \
get_sys_meta_prefix('account') + 'project-domain-id'
# a string that is unique w.r.t valid ids
UNKNOWN_ID = '_unknown'
class KeystoneAuth(object):
"""Swift middleware to Keystone authorization system.
@ -69,6 +78,37 @@ class KeystoneAuth(object):
reseller_prefix = NEWAUTH
The keystoneauth middleware supports cross-tenant access control using
the syntax <tenant>:<user> in container Access Control Lists (ACLs). For
a request to be granted by an ACL, <tenant> must match the UUID of the
tenant to which the request token is scoped and <user> must match the
UUID of the user authenticated by the request token.
Note that names must no longer be used in cross-tenant ACLs because with
the introduction of domains in keystone names are no longer globally
unique. For backwards compatibility, ACLs using names will be granted by
keystoneauth when it can be established that both the grantee and the
tenant being accessed are either not yet in a domain (e.g. the request
token has been obtained via the keystone v2 API) or are both in the
default domain to which legacy accounts would have been migrated. The id
of the default domain is specified by the config option
``default_domain_id``:
default_domain_id = default
The backwards compatible behavior can be disabled by setting the config
option ``allow_names_in_acls`` to false::
allow_names_in_acls = false
To enable this backwards compatibility, keystoneauth will attempt to
determine the domain id of a tenant when any new account is created,
and persist this as account metadata. If an account is created for a tenant
using a token with reselleradmin role that is not scoped on that tenant,
keystoneauth is unable to determine the domain id of the tenant;
keystoneauth will assume that the tenant may not be in the default domain
and therefore not match names in ACLs for that account.
:param app: The next WSGI app in the pipeline
:param conf: The dict of configuration values
"""
@ -87,6 +127,9 @@ class KeystoneAuth(object):
self.is_admin = swift_utils.config_true_value(config_is_admin)
config_overrides = conf.get('allow_overrides', 't').lower()
self.allow_overrides = swift_utils.config_true_value(config_overrides)
self.default_domain_id = conf.get('default_domain_id', 'default')
self.allow_names_in_acls = swift_utils.config_true_value(
conf.get('allow_names_in_acls', 'true'))
def __call__(self, environ, start_response):
identity = self._keystone_identity(environ)
@ -116,7 +159,18 @@ class KeystoneAuth(object):
environ['swift.clean_acl'] = swift_acl.clean_acl
return self.app(environ, start_response)
def keystone_start_response(status, response_headers, exc_info=None):
project_domain_id = None
for key, val in response_headers:
if key.lower() == PROJECT_DOMAIN_ID_SYSMETA_HEADER:
project_domain_id = val
break
if project_domain_id:
response_headers.append((PROJECT_DOMAIN_ID_HEADER,
project_domain_id))
return start_response(status, response_headers, exc_info)
return self.app(environ, keystone_start_response)
def _keystone_identity(self, environ):
"""Extract the identity from the Keystone auth component."""
@ -147,6 +201,21 @@ class KeystoneAuth(object):
'tenant': (environ.get('HTTP_X_TENANT_ID'),
environ.get('HTTP_X_TENANT_NAME')),
'roles': roles}
token_info = environ.get('keystone.token_info', {})
auth_version = 0
user_domain = project_domain = (None, None)
if 'access' in token_info:
# ignore any domain id headers that authtoken may have set
auth_version = 2
elif 'token' in token_info:
auth_version = 3
user_domain = (environ.get('HTTP_X_USER_DOMAIN_ID'),
environ.get('HTTP_X_USER_DOMAIN_NAME'))
project_domain = (environ.get('HTTP_X_PROJECT_DOMAIN_ID'),
environ.get('HTTP_X_PROJECT_DOMAIN_NAME'))
identity['user_domain'] = user_domain
identity['project_domain'] = project_domain
identity['auth_version'] = auth_version
return identity
def _get_account_for_tenant(self, tenant_id):
@ -156,8 +225,80 @@ class KeystoneAuth(object):
"""Check reseller prefix."""
return account == self._get_account_for_tenant(tenant_id)
def _get_project_domain_id(self, environ):
info = get_account_info(environ, self.app, 'KS')
domain_id = info.get('sysmeta', {}).get('project-domain-id')
exists = is_success(info.get('status', 0))
return exists, domain_id
def _set_project_domain_id(self, req, path_parts, env_identity):
'''
Try to determine the project domain id and save it as
account metadata. Do this for a PUT or POST to the
account, and also for a container PUT in case that
causes the account to be auto-created.
'''
if PROJECT_DOMAIN_ID_SYSMETA_HEADER in req.headers:
return
version, account, container, obj = path_parts
method = req.method
if (obj or (container and method != 'PUT')
or method not in ['PUT', 'POST']):
return
tenant_id, tenant_name = env_identity['tenant']
exists, sysmeta_id = self._get_project_domain_id(req.environ)
req_has_id, req_id, new_id = False, None, None
if self._reseller_check(account, tenant_id):
# domain id can be inferred from request (may be None)
req_has_id = True
req_id = env_identity['project_domain'][0]
if not exists:
# new account so set a domain id
new_id = req_id if req_has_id else UNKNOWN_ID
elif sysmeta_id is None and req_id == self.default_domain_id:
# legacy account, update if default domain id in req
new_id = req_id
elif sysmeta_id == UNKNOWN_ID and req_has_id:
# unknown domain, update if req confirms domain
new_id = req_id or ''
elif req_has_id and sysmeta_id != req_id:
self.logger.warn("Inconsistent project domain id: " +
"%s in token vs %s in account metadata."
% (req_id, sysmeta_id))
if new_id is not None:
req.headers[PROJECT_DOMAIN_ID_SYSMETA_HEADER] = new_id
def _is_name_allowed_in_acl(self, req, path_parts, identity):
if not self.allow_names_in_acls:
return False
user_domain_id = identity['user_domain'][0]
if user_domain_id and user_domain_id != self.default_domain_id:
return False
proj_domain_id = identity['project_domain'][0]
if proj_domain_id and proj_domain_id != self.default_domain_id:
return False
# request user and scoped project are both in default domain
tenant_id, tenant_name = identity['tenant']
version, account, container, obj = path_parts
if self._reseller_check(account, tenant_id):
# account == scoped project, so account is also in default domain
allow = True
else:
# retrieve account project domain id from account sysmeta
exists, acc_domain_id = self._get_project_domain_id(req.environ)
allow = exists and acc_domain_id in [self.default_domain_id, None]
if allow:
self.logger.debug("Names allowed in acls.")
return allow
def _authorize_cross_tenant(self, user_id, user_name,
tenant_id, tenant_name, roles):
tenant_id, tenant_name, roles,
allow_names=True):
"""Check cross-tenant ACLs.
Match tenant:user, tenant and user could be its id, name or '*'
@ -167,14 +308,21 @@ class KeystoneAuth(object):
:param tenant_id: The tenant ID from the identity token.
:param tenant_name: The tenant name from the identity token.
:param roles: The given container ACL.
:param allow_names: If True then attempt to match tenant and user names
as well as id's.
:returns: matched string if tenant(name/id/*):user(name/id/*) matches
the given ACL.
None otherwise.
"""
for tenant in [tenant_id, tenant_name, '*']:
for user in [user_id, user_name, '*']:
tenant_match = [tenant_id, '*']
user_match = [user_id, '*']
if allow_names:
tenant_match = tenant_match + [tenant_name]
user_match = user_match + [user_name]
for tenant in tenant_match:
for user in user_match:
s = '%s:%s' % (tenant, user)
if s in roles:
return s
@ -195,6 +343,8 @@ class KeystoneAuth(object):
except ValueError:
return HTTPNotFound(request=req)
self._set_project_domain_id(req, part, env_identity)
user_roles = [r.lower() for r in env_identity.get('roles', [])]
# Give unconditional access to a user with the reseller_admin
@ -214,9 +364,12 @@ class KeystoneAuth(object):
return self.denied_response(req)
# cross-tenant authorization
matched_acl = self._authorize_cross_tenant(user_id, user_name,
tenant_id, tenant_name,
roles)
matched_acl = None
if roles:
allow_names = self._is_name_allowed_in_acl(req, part, env_identity)
matched_acl = self._authorize_cross_tenant(user_id, user_name,
tenant_id, tenant_name,
roles, allow_names)
if matched_acl is not None:
log_msg = 'user %s allowed in ACL authorizing.'
self.logger.debug(log_msg, matched_acl)

View File

@ -464,22 +464,19 @@ class TempURL(object):
:param env: The WSGI environment for the request.
"""
for h in env.keys():
remove = h in self.incoming_remove_headers
if not remove:
if h in self.incoming_allow_headers:
continue
for p in self.incoming_allow_headers_startswith:
if h.startswith(p):
break
else:
if h in self.incoming_remove_headers:
del env[h]
continue
for p in self.incoming_remove_headers_startswith:
if h.startswith(p):
remove = True
del env[h]
break
if remove:
if h in self.incoming_allow_headers:
remove = False
if remove:
for p in self.incoming_allow_headers_startswith:
if h.startswith(p):
remove = False
break
if remove:
del env[h]
def _clean_outgoing_headers(self, headers):
"""
@ -495,22 +492,19 @@ class TempURL(object):
"""
headers = HeaderKeyDict(headers)
for h in headers.keys():
remove = h in self.outgoing_remove_headers
if not remove:
if h in self.outgoing_allow_headers:
continue
for p in self.outgoing_allow_headers_startswith:
if h.startswith(p):
break
else:
if h in self.outgoing_remove_headers:
del headers[h]
continue
for p in self.outgoing_remove_headers_startswith:
if h.startswith(p):
remove = True
del headers[h]
break
if remove:
if h in self.outgoing_allow_headers:
remove = False
if remove:
for p in self.outgoing_allow_headers_startswith:
if h.startswith(p):
remove = False
break
if remove:
del headers[h]
return headers.items()

View File

@ -384,10 +384,7 @@ class HTMLViewer(object):
elif output_format == 'ods':
data = stats.to_ods(nfl_esc, limit)
else:
profile_tmp_all = tempfile.mktemp('.profile', 'all')
stats.dump_stats(profile_tmp_all)
data = open(profile_tmp_all).read()
os.remove(profile_tmp_all)
data = stats.print_stats()
return data, [('content-type', self.format_dict[output_format])]
except ODFLIBNotInstalled as ex:
raise ex
@ -427,10 +424,11 @@ class HTMLViewer(object):
plt.xlabel(names[metric_selected])
plt.title('Profile Statistics (by %s)' % names[metric_selected])
#plt.gcf().tight_layout(pad=1.2)
profile_img = tempfile.mktemp('.png', 'plot')
plt.savefig(profile_img, dpi=300)
data = open(profile_img).read()
os.remove(profile_img)
profile_img = tempfile.TemporaryFile()
plt.savefig(profile_img, format='png', dpi=300)
profile_img.seek(0)
data = profile_img.read()
os.close(profile_img)
return data, [('content-type', 'image/jpg')]
except Exception as ex:
raise ProfileException(_('plotting results failed due to %s') % ex)

View File

@ -222,10 +222,11 @@ class Stats2(pstats.Stats):
table.addElement(tr_header)
spreadsheet.spreadsheet.addElement(table)
tmp_ods = tempfile.mktemp('.ods', 'stats')
spreadsheet.save(tmp_ods, False)
data = open(tmp_ods).read()
os.remove(tmp_ods)
tmp_ods = tempfile.TemporaryFile()
spreadsheet.write(tmp_ods)
tmp_ods.seek(0)
data = tmp_ods.read()
os.close(tmp_ods)
return data

View File

@ -931,7 +931,8 @@ class RingBuilder(object):
del dev['sort_key']
del dev['tiers']
def _sort_key_for(self, dev):
@staticmethod
def _sort_key_for(dev):
return (dev['parts_wanted'], random.randint(0, 0xFFFF), dev['id'])
def _build_max_replicas_by_tier(self):
@ -941,39 +942,46 @@ class RingBuilder(object):
There will always be a () entry as the root of the structure, whose
replica_count will equal the ring's replica_count.
Then there will be (dev_id,) entries for each device, indicating the
maximum number of replicas the device might have for any given
partition. Anything greater than 1 indicates a partition at serious
risk, as the data on that partition will not be stored distinctly at
the ring's replica_count.
Then there will be (region,) entries for each region, indicating the
maximum number of replicas the region might have for any given
partition.
Next there will be (dev_id, ip_port) entries for each device,
indicating the maximum number of replicas the device shares with other
devices on the same ip_port for any given partition. Anything greater
than 1 indicates a partition at elevated risk, as if that ip_port were
to fail multiple replicas of that partition would be unreachable.
Next there will be (region, zone) entries for each zone, indicating
the maximum number of replicas in a given region and zone. Anything
greater than 1 indicates a partition at slightly elevated risk, as if
that zone were to fail multiple replicas of that partition would be
unreachable.
Last there will be (dev_id, ip_port, zone) entries for each device,
indicating the maximum number of replicas the device shares with other
devices within the same zone for any given partition. Anything greater
than 1 indicates a partition at slightly elevated risk, as if that zone
were to fail multiple replicas of that partition would be unreachable.
Next there will be (region, zone, ip_port) entries for each node,
indicating the maximum number of replicas stored on a node in a given
region and zone. Anything greater than 1 indicates a partition at
elevated risk, as if that ip_port were to fail multiple replicas of
that partition would be unreachable.
Last there will be (region, zone, ip_port, device) entries for each
device, indicating the maximum number of replicas the device shares
with other devices on the same node for any given partition.
Anything greater than 1 indicates a partition at serious risk, as the
data on that partition will not be stored distinctly at the ring's
replica_count.
Example return dict for the common SAIO setup::
{(): 3,
(1,): 1.0,
(1, '127.0.0.1:6010'): 1.0,
(1, '127.0.0.1:6010', 0): 1.0,
(2,): 1.0,
(2, '127.0.0.1:6020'): 1.0,
(2, '127.0.0.1:6020', 1): 1.0,
(3,): 1.0,
(3, '127.0.0.1:6030'): 1.0,
(3, '127.0.0.1:6030', 2): 1.0,
(4,): 1.0,
(4, '127.0.0.1:6040'): 1.0,
(4, '127.0.0.1:6040', 3): 1.0}
{(): 3.0,
(1,): 3.0,
(1, 1): 1.0,
(1, 1, '127.0.0.1:6010'): 1.0,
(1, 1, '127.0.0.1:6010', 0): 1.0,
(1, 2): 1.0,
(1, 2, '127.0.0.1:6020'): 1.0,
(1, 2, '127.0.0.1:6020', 1): 1.0,
(1, 3): 1.0,
(1, 3, '127.0.0.1:6030'): 1.0,
(1, 3, '127.0.0.1:6030', 2): 1.0,
(1, 4): 1.0,
(1, 4, '127.0.0.1:6040'): 1.0,
(1, 4, '127.0.0.1:6040', 3): 1.0}
"""
# Used by walk_tree to know what entries to create for each recursive
# call.

View File

@ -696,16 +696,25 @@ def normalize_timestamp(timestamp):
return Timestamp(timestamp).normal
EPOCH = datetime.datetime(1970, 1, 1)
def last_modified_date_to_timestamp(last_modified_date_str):
"""
Convert a last modified date (like you'd get from a container listing,
e.g. 2014-02-28T23:22:36.698390) to a float.
"""
return Timestamp(
datetime.datetime.strptime(
last_modified_date_str, '%Y-%m-%dT%H:%M:%S.%f'
).strftime('%s.%f')
)
start = datetime.datetime.strptime(last_modified_date_str,
'%Y-%m-%dT%H:%M:%S.%f')
delta = start - EPOCH
# TODO(sam): after we no longer support py2.6, this expression can
# simplify to Timestamp(delta.total_seconds()).
#
# This calculation is based on Python 2.7's Modules/datetimemodule.c,
# function delta_to_microseconds(), but written in Python.
return Timestamp(delta.days * 86400 +
delta.seconds +
delta.microseconds / 1000000.0)
def normalize_delete_at_timestamp(timestamp):

View File

@ -139,17 +139,19 @@ def monkey_patch_mimetools():
mimetools.Message.parsetype = parsetype
def get_socket(conf, default_port=8080):
def get_socket(conf):
"""Bind socket to bind ip:port in conf
:param conf: Configuration dict to read settings from
:param default_port: port to use if not specified in conf
:returns : a socket object as returned from socket.listen or
ssl.wrap_socket if conf specifies cert_file
"""
bind_addr = (conf.get('bind_ip', '0.0.0.0'),
int(conf.get('bind_port', default_port)))
try:
bind_port = int(conf['bind_port'])
except (ValueError, KeyError, TypeError):
raise ConfigFilePortError()
bind_addr = (conf.get('bind_ip', '0.0.0.0'), bind_port)
address_family = [addr[0] for addr in socket.getaddrinfo(
bind_addr[0], bind_addr[1], socket.AF_UNSPEC, socket.SOCK_STREAM)
if addr[0] in (socket.AF_INET, socket.AF_INET6)][0]
@ -421,7 +423,14 @@ def run_wsgi(conf_path, app_section, *args, **kwargs):
return 1
# bind to address and port
sock = get_socket(conf, default_port=kwargs.get('default_port', 8080))
try:
sock = get_socket(conf)
except ConfigFilePortError:
msg = 'bind_port wasn\'t properly set in the config file. ' \
'It must be explicitly set to a valid port number.'
logger.error(msg)
print(msg)
return 1
# remaining tasks should not require elevated privileges
drop_privileges(conf.get('user', 'swift'))
@ -495,6 +504,10 @@ class ConfigFileError(Exception):
pass
class ConfigFilePortError(ConfigFileError):
pass
def _initrp(conf_path, app_section, *args, **kwargs):
try:
conf = appconfig(conf_path, name=app_section)

View File

@ -20,13 +20,11 @@ import os
from uuid import uuid4
import time
import cPickle as pickle
import errno
import sqlite3
from swift.common.utils import Timestamp, lock_parent_directory
from swift.common.db import DatabaseBroker, DatabaseConnectionError, \
PENDING_CAP, PICKLE_PROTOCOL, utf8encode
from swift.common.utils import Timestamp
from swift.common.db import DatabaseBroker, utf8encode
SQLITE_ARG_LIMIT = 999
@ -320,6 +318,11 @@ class ContainerBroker(DatabaseBroker):
self.put_object(name, timestamp, 0, 'application/deleted', 'noetag',
deleted=1, storage_policy_index=storage_policy_index)
def make_tuple_for_pickle(self, record):
return (record['name'], record['created_at'], record['size'],
record['content_type'], record['etag'], record['deleted'],
record['storage_policy_index'])
def put_object(self, name, timestamp, size, content_type, etag, deleted=0,
storage_policy_index=0):
"""
@ -338,31 +341,7 @@ class ContainerBroker(DatabaseBroker):
'content_type': content_type, 'etag': etag,
'deleted': deleted,
'storage_policy_index': storage_policy_index}
if self.db_file == ':memory:':
self.merge_items([record])
return
if not os.path.exists(self.db_file):
raise DatabaseConnectionError(self.db_file, "DB doesn't exist")
pending_size = 0
try:
pending_size = os.path.getsize(self.pending_file)
except OSError as err:
if err.errno != errno.ENOENT:
raise
if pending_size > PENDING_CAP:
self._commit_puts([record])
else:
with lock_parent_directory(self.pending_file,
self.pending_timeout):
with open(self.pending_file, 'a+b') as fp:
# Colons aren't used in base64 encoding; so they are our
# delimiter
fp.write(':')
fp.write(pickle.dumps(
(name, timestamp, size, content_type, etag, deleted,
storage_policy_index),
protocol=PICKLE_PROTOCOL).encode('base64'))
fp.flush()
self.put_record(record)
def _is_deleted_info(self, object_count, put_timestamp, delete_timestamp,
**kwargs):

View File

@ -63,15 +63,18 @@ class ContainerReplicator(db_replicator.Replicator):
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at.internal)
broker.merge_timestamps(*(remote_info[key] for key in (
'created_at', 'put_timestamp', 'delete_timestamp')))
sync_timestamps = ('created_at', 'put_timestamp',
'delete_timestamp')
if any(info[key] != remote_info[key] for key in sync_timestamps):
broker.merge_timestamps(*(remote_info[key] for key in
sync_timestamps))
rv = parent._handle_sync_response(
node, response, info, broker, http)
return rv
def find_local_handoff_for_part(self, part):
"""
Look through devices in the ring for the first handoff devie that was
Look through devices in the ring for the first handoff device that was
identified during job creation as available on this node.
:returns: a node entry from the ring
@ -179,10 +182,10 @@ class ContainerReplicator(db_replicator.Replicator):
def _post_replicate_hook(self, broker, info, responses):
if info['account'] == MISPLACED_OBJECTS_ACCOUNT:
return
if not broker.has_multiple_policies():
point = broker.get_reconciler_sync()
if not broker.has_multiple_policies() and info['max_row'] != point:
broker.update_reconciler_sync(info['max_row'])
return
point = broker.get_reconciler_sync()
max_sync = self.dump_to_reconciler(broker, point)
success = responses.count(True) >= \
replication_quorum_size(len(responses))

1155
swift/locale/swift.pot Normal file

File diff suppressed because it is too large Load Diff

View File

@ -25,12 +25,14 @@ from eventlet.greenpool import GreenPool
from swift.common.daemon import Daemon
from swift.common.internal_client import InternalClient, UnexpectedResponse
from swift.common.utils import get_logger, dump_recon_cache
from swift.common.utils import get_logger, dump_recon_cache, split_path
from swift.common.http import HTTP_NOT_FOUND, HTTP_CONFLICT, \
HTTP_PRECONDITION_FAILED
from swift.container.reconciler import direct_delete_container_entry
MAX_OBJECTS_TO_CACHE = 100000
class ObjectExpirer(Daemon):
"""
@ -85,6 +87,66 @@ class ObjectExpirer(Daemon):
(elapsed, self.report_objects))
self.report_last_time = time()
def iter_cont_objs_to_expire(self):
"""
Yields (container, obj) tuples to be deleted
"""
obj_cache = {}
cnt = 0
all_containers = set()
for c in self.swift.iter_containers(self.expiring_objects_account):
container = c['name']
timestamp = int(container)
if timestamp > int(time()):
break
all_containers.add(container)
for o in self.swift.iter_objects(self.expiring_objects_account,
container):
obj = o['name'].encode('utf8')
timestamp, actual_obj = obj.split('-', 1)
timestamp = int(timestamp)
if timestamp > int(time()):
break
try:
cust_account, cust_cont, cust_obj = \
split_path('/' + actual_obj, 3, 3, True)
cache_key = '%s/%s' % (cust_account, cust_cont)
except ValueError:
cache_key = None
if self.processes > 0:
obj_process = int(
hashlib.md5('%s/%s' % (str(container), obj)).
hexdigest(), 16)
if obj_process % self.processes != self.process:
continue
if cache_key not in obj_cache:
obj_cache[cache_key] = []
obj_cache[cache_key].append((container, obj))
cnt += 1
if cnt > MAX_OBJECTS_TO_CACHE:
while obj_cache:
for key in obj_cache.keys():
if obj_cache[key]:
yield obj_cache[key].pop()
cnt -= 1
else:
del obj_cache[key]
while obj_cache:
for key in obj_cache.keys():
if obj_cache[key]:
yield obj_cache[key].pop()
else:
del obj_cache[key]
for container in all_containers:
yield (container, None)
def run_once(self, *args, **kwargs):
"""
Executes a single pass, looking for objects to expire.
@ -96,9 +158,9 @@ class ObjectExpirer(Daemon):
These will override the values from the config file if
provided.
"""
processes, process = self.get_process_values(kwargs)
self.get_process_values(kwargs)
pool = GreenPool(self.concurrency)
containers_to_delete = []
containers_to_delete = set([])
self.report_first_time = self.report_last_time = time()
self.report_objects = 0
try:
@ -107,28 +169,21 @@ class ObjectExpirer(Daemon):
self.swift.get_account_info(self.expiring_objects_account)
self.logger.info(_('Pass beginning; %s possible containers; %s '
'possible objects') % (containers, objects))
for c in self.swift.iter_containers(self.expiring_objects_account):
container = c['name']
timestamp = int(container)
for container, obj in self.iter_cont_objs_to_expire():
containers_to_delete.add(container)
if not obj:
continue
timestamp, actual_obj = obj.split('-', 1)
timestamp = int(timestamp)
if timestamp > int(time()):
break
containers_to_delete.append(container)
for o in self.swift.iter_objects(self.expiring_objects_account,
container):
obj = o['name'].encode('utf8')
if processes > 0:
obj_process = int(
hashlib.md5('%s/%s' % (str(container), obj)).
hexdigest(), 16)
if obj_process % processes != process:
continue
timestamp, actual_obj = obj.split('-', 1)
timestamp = int(timestamp)
if timestamp > int(time()):
break
pool.spawn_n(
self.delete_object, actual_obj, timestamp,
container, obj)
pool.spawn_n(
self.delete_object, actual_obj, timestamp,
container, obj)
pool.waitall()
for container in containers_to_delete:
try:
@ -167,38 +222,32 @@ class ObjectExpirer(Daemon):
def get_process_values(self, kwargs):
"""
Gets the processes, process from the kwargs if those values exist.
Otherwise, return processes, process set in the config file.
Sets self.processes and self.process from the kwargs if those
values exist, otherwise, leaves those values as they were set in
the config file.
:param kwargs: Keyword args passed into the run_forever(), run_once()
methods. They have values specified on the command
line when the daemon is run.
"""
if kwargs.get('processes') is not None:
processes = int(kwargs['processes'])
else:
processes = self.processes
self.processes = int(kwargs['processes'])
if kwargs.get('process') is not None:
process = int(kwargs['process'])
else:
process = self.process
self.process = int(kwargs['process'])
if process < 0:
if self.process < 0:
raise ValueError(
'process must be an integer greater than or equal to 0')
if processes < 0:
if self.processes < 0:
raise ValueError(
'processes must be an integer greater than or equal to 0')
if processes and process >= processes:
if self.processes and self.process >= self.processes:
raise ValueError(
'process must be less than or equal to processes')
return processes, process
def delete_object(self, actual_obj, timestamp, container, obj):
start_time = time()
try:

View File

@ -121,7 +121,7 @@ class AccountController(Controller):
req, self.app.account_ring, account_partition, 'POST',
req.swift_entity_path, [headers] * len(accounts))
if resp.status_int == HTTP_NOT_FOUND and self.app.account_autocreate:
self.autocreate_account(req.environ, self.account_name)
self.autocreate_account(req, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'POST',
req.swift_entity_path, [headers] * len(accounts))

View File

@ -28,6 +28,7 @@ import os
import time
import functools
import inspect
import operator
from sys import exc_info
from swift import gettext_ as _
from urllib import quote
@ -1039,7 +1040,7 @@ class Controller(object):
{'method': method, 'path': path})
def make_requests(self, req, ring, part, method, path, headers,
query_string=''):
query_string='', overrides=None):
"""
Sends an HTTP request to multiple nodes and aggregates the results.
It attempts the primary nodes concurrently, then iterates over the
@ -1054,6 +1055,8 @@ class Controller(object):
:param headers: a list of dicts, where each dict represents one
backend request that should be made.
:param query_string: optional query string to send to the backend
:param overrides: optional return status override map used to override
the returned status of a request.
:returns: a swob.Response object
"""
start_nodes = ring.get_part_nodes(part)
@ -1072,13 +1075,18 @@ class Controller(object):
if self.have_quorum(statuses, len(start_nodes), req):
break
# give any pending requests *some* chance to finish
pile.waitall(self.app.post_quorum_timeout)
finished_quickly = pile.waitall(self.app.post_quorum_timeout)
for resp in finished_quickly:
if not resp:
continue
response.append(resp)
statuses.append(resp[0])
while len(response) < len(start_nodes):
response.append((HTTP_SERVICE_UNAVAILABLE, '', '', ''))
statuses, reasons, resp_headers, bodies = zip(*response)
return self.best_response(req, statuses, reasons, bodies,
'%s %s' % (self.server_type, req.method),
headers=resp_headers)
overrides=overrides, headers=resp_headers)
def _quorum_size(self, n, req=None):
"""
@ -1105,7 +1113,7 @@ class Controller(object):
return False
def best_response(self, req, statuses, reasons, bodies, server_type,
etag=None, headers=None):
etag=None, headers=None, overrides=None):
"""
Given a list of responses from several servers, choose the best to
return to the API.
@ -1119,26 +1127,58 @@ class Controller(object):
:param headers: headers of each response
:returns: swob.Response object with the correct status, body, etc. set
"""
resp = Response(request=req)
if len(statuses):
for hundred in (HTTP_OK, HTTP_MULTIPLE_CHOICES, HTTP_BAD_REQUEST):
hstatuses = \
[s for s in statuses if hundred <= s < hundred + 100]
if len(hstatuses) >= self._quorum_size(len(statuses), req):
status = max(hstatuses)
status_index = statuses.index(status)
resp.status = '%s %s' % (status, reasons[status_index])
resp.body = bodies[status_index]
if headers:
update_headers(resp, headers[status_index])
if etag:
resp.headers['etag'] = etag.strip('"')
return resp
self.app.logger.error(_('%(type)s returning 503 for %(statuses)s'),
{'type': server_type, 'statuses': statuses})
resp.status = '503 Internal Server Error'
resp = self._compute_quorum_response(
req, statuses, reasons, bodies, etag, headers)
if overrides and not resp:
faked_up_status_indices = set()
transformed = []
for (i, (status, reason, hdrs, body)) in enumerate(zip(
statuses, reasons, headers, bodies)):
if status in overrides:
faked_up_status_indices.add(i)
transformed.append((overrides[status], '', '', ''))
else:
transformed.append((status, reason, hdrs, body))
statuses, reasons, headers, bodies = zip(*transformed)
resp = self._compute_quorum_response(
req, statuses, reasons, bodies, etag, headers,
indices_to_avoid=faked_up_status_indices)
if not resp:
resp = Response(request=req)
self.app.logger.error(_('%(type)s returning 503 for %(statuses)s'),
{'type': server_type, 'statuses': statuses})
resp.status = '503 Internal Server Error'
return resp
def _compute_quorum_response(self, req, statuses, reasons, bodies, etag,
headers, indices_to_avoid=()):
if not statuses:
return None
for hundred in (HTTP_OK, HTTP_MULTIPLE_CHOICES, HTTP_BAD_REQUEST):
hstatuses = \
[(i, s) for i, s in enumerate(statuses)
if hundred <= s < hundred + 100]
if len(hstatuses) >= self._quorum_size(len(statuses), req):
resp = Response(request=req)
try:
status_index, status = max(
((i, stat) for i, stat in hstatuses
if i not in indices_to_avoid),
key=operator.itemgetter(1))
except ValueError:
# All statuses were indices to avoid
continue
resp.status = '%s %s' % (status, reasons[status_index])
resp.body = bodies[status_index]
if headers:
update_headers(resp, headers[status_index])
if etag:
resp.headers['etag'] = etag.strip('"')
return resp
return None
@public
def GET(self, req):
"""
@ -1159,7 +1199,7 @@ class Controller(object):
"""
return self.GETorHEAD(req)
def autocreate_account(self, env, account):
def autocreate_account(self, req, account):
"""
Autocreate an account
@ -1171,12 +1211,17 @@ class Controller(object):
headers = {'X-Timestamp': Timestamp(time.time()).internal,
'X-Trans-Id': self.trans_id,
'Connection': 'close'}
# transfer any x-account-sysmeta headers from original request
# to the autocreate PUT
headers.update((k, v)
for k, v in req.headers.iteritems()
if is_sys_meta('account', k))
resp = self.make_requests(Request.blank('/v1' + path),
self.app.account_ring, partition, 'PUT',
path, [headers] * len(nodes))
if is_success(resp.status_int):
self.app.logger.info('autocreate account %r' % path)
clear_info_cache(self.app, env, account)
clear_info_cache(self.app, req.environ, account)
else:
self.app.logger.warning('Could not autocreate account %r' % path)

View File

@ -136,7 +136,7 @@ class ContainerController(Controller):
account_partition, accounts, container_count = \
self.account_info(self.account_name, req)
if not accounts and self.app.account_autocreate:
self.autocreate_account(req.environ, self.account_name)
self.autocreate_account(req, self.account_name)
account_partition, accounts, container_count = \
self.account_info(self.account_name, req)
if not accounts:

View File

@ -42,7 +42,8 @@ from swift.common.utils import (
normalize_delete_at_timestamp, public, get_expirer_container)
from swift.common.bufferedhttp import http_connect
from swift.common.constraints import check_metadata, check_object_creation, \
check_copy_from_header
check_copy_from_header, check_destination_header, \
check_account_format
from swift.common import constraints
from swift.common.exceptions import ChunkReadTimeout, \
ChunkWriteTimeout, ConnectionTimeout, ListingIterNotFound, \
@ -57,7 +58,7 @@ from swift.proxy.controllers.base import Controller, delay_denial, \
from swift.common.swob import HTTPAccepted, HTTPBadRequest, HTTPNotFound, \
HTTPPreconditionFailed, HTTPRequestEntityTooLarge, HTTPRequestTimeout, \
HTTPServerError, HTTPServiceUnavailable, Request, \
HTTPClientDisconnect, HTTPNotImplemented
HTTPClientDisconnect
from swift.common.request_helpers import is_sys_or_user_meta, is_sys_meta, \
remove_items, copy_header_subset
@ -293,12 +294,8 @@ class ObjectController(Controller):
if not containers:
return HTTPNotFound(request=req)
try:
req, delete_at_container, delete_at_part, \
delete_at_nodes = self._config_obj_expiration(req)
except ValueError as e:
return HTTPBadRequest(request=req, content_type='text/plain',
body=str(e))
req, delete_at_container, delete_at_part, \
delete_at_nodes = self._config_obj_expiration(req)
# pass the policy index to storage nodes via req header
policy_index = req.headers.get('X-Backend-Storage-Policy-Index',
@ -421,34 +418,45 @@ class ObjectController(Controller):
try:
with Timeout(self.app.node_timeout):
if conn.resp:
return conn.resp
return (conn, conn.resp)
else:
return conn.getresponse()
return (conn, conn.getresponse())
except (Exception, Timeout):
self.app.exception_occurred(
conn.node, _('Object'),
_('Trying to get final status of PUT to %s') % req.path)
return (None, None)
pile = GreenAsyncPile(len(conns))
for conn in conns:
pile.spawn(get_conn_response, conn)
for response in pile:
def _handle_response(conn, response):
statuses.append(response.status)
reasons.append(response.reason)
bodies.append(response.read())
if response.status >= HTTP_INTERNAL_SERVER_ERROR:
self.app.error_occurred(
conn.node,
_('ERROR %(status)d %(body)s From Object Server '
're: %(path)s') %
{'status': response.status,
'body': bodies[-1][:1024], 'path': req.path})
elif is_success(response.status):
etags.add(response.getheader('etag').strip('"'))
for (conn, response) in pile:
if response:
statuses.append(response.status)
reasons.append(response.reason)
bodies.append(response.read())
if response.status >= HTTP_INTERNAL_SERVER_ERROR:
self.app.error_occurred(
conn.node,
_('ERROR %(status)d %(body)s From Object Server '
're: %(path)s') %
{'status': response.status,
'body': bodies[-1][:1024], 'path': req.path})
elif is_success(response.status):
etags.add(response.getheader('etag').strip('"'))
_handle_response(conn, response)
if self.have_quorum(statuses, len(nodes), req):
break
# give any pending requests *some* chance to finish
pile.waitall(self.app.post_quorum_timeout)
finished_quickly = pile.waitall(self.app.post_quorum_timeout)
for (conn, response) in finished_quickly:
if response:
_handle_response(conn, response)
while len(statuses) < len(nodes):
statuses.append(HTTP_SERVICE_UNAVAILABLE)
reasons.append('')
@ -460,24 +468,11 @@ class ObjectController(Controller):
delete_at_part = None
delete_at_nodes = None
if 'x-delete-after' in req.headers:
try:
x_delete_after = int(req.headers['x-delete-after'])
except ValueError:
raise ValueError('Non-integer X-Delete-After')
req.headers['x-delete-at'] = normalize_delete_at_timestamp(
time.time() + x_delete_after)
req = constraints.check_delete_headers(req)
if 'x-delete-at' in req.headers:
try:
x_delete_at = int(normalize_delete_at_timestamp(
int(req.headers['x-delete-at'])))
except ValueError:
raise ValueError('Non-integer X-Delete-At')
if x_delete_at < time.time():
raise ValueError('X-Delete-At in past')
x_delete_at = int(normalize_delete_at_timestamp(
int(req.headers['x-delete-at'])))
req.environ.setdefault('swift.log_info', []).append(
'x-delete-at:%s' % x_delete_at)
@ -523,16 +518,23 @@ class ObjectController(Controller):
if not containers:
return HTTPNotFound(request=req)
try:
ml = req.message_length()
except ValueError as e:
return HTTPBadRequest(request=req, content_type='text/plain',
body=str(e))
except AttributeError as e:
return HTTPNotImplemented(request=req, content_type='text/plain',
body=str(e))
if ml is not None and ml > constraints.MAX_FILE_SIZE:
return HTTPRequestEntityTooLarge(request=req)
# Sometimes the 'content-type' header exists, but is set to None.
content_type_manually_set = True
detect_content_type = \
config_true_value(req.headers.get('x-detect-content-type'))
if detect_content_type or not req.headers.get('content-type'):
guessed_type, _junk = mimetypes.guess_type(req.path_info)
req.headers['Content-Type'] = guessed_type or \
'application/octet-stream'
if detect_content_type:
req.headers.pop('x-detect-content-type')
else:
content_type_manually_set = False
error_response = check_object_creation(req, self.object_name) or \
check_content_type(req)
if error_response:
return error_response
partition, nodes = obj_ring.get_nodes(
self.account_name, self.container_name, self.object_name)
@ -566,23 +568,6 @@ class ObjectController(Controller):
else:
req.headers['X-Timestamp'] = Timestamp(time.time()).internal
# Sometimes the 'content-type' header exists, but is set to None.
content_type_manually_set = True
detect_content_type = \
config_true_value(req.headers.get('x-detect-content-type'))
if detect_content_type or not req.headers.get('content-type'):
guessed_type, _junk = mimetypes.guess_type(req.path_info)
req.headers['Content-Type'] = guessed_type or \
'application/octet-stream'
if detect_content_type:
req.headers.pop('x-detect-content-type')
else:
content_type_manually_set = False
error_response = check_object_creation(req, self.object_name) or \
check_content_type(req)
if error_response:
return error_response
if object_versions and not req.environ.get('swift_versioned_copy'):
if hresp.status_int != HTTP_NOT_FOUND:
# This is a version manifest and needs to be handled
@ -622,12 +607,15 @@ class ObjectController(Controller):
if req.environ.get('swift.orig_req_method', req.method) != 'POST':
req.environ.setdefault('swift.log_info', []).append(
'x-copy-from:%s' % source_header)
src_container_name, src_obj_name = check_copy_from_header(req)
ver, acct, _rest = req.split_path(2, 3, True)
if isinstance(acct, unicode):
acct = acct.encode('utf-8')
source_header = '/%s/%s/%s/%s' % (ver, acct,
src_container_name, src_obj_name)
src_account_name = req.headers.get('X-Copy-From-Account', None)
if src_account_name:
src_account_name = check_account_format(req, src_account_name)
else:
src_account_name = acct
src_container_name, src_obj_name = check_copy_from_header(req)
source_header = '/%s/%s/%s/%s' % (ver, src_account_name,
src_container_name, src_obj_name)
source_req = req.copy_get()
# make sure the source request uses it's container_info
@ -636,8 +624,10 @@ class ObjectController(Controller):
source_req.headers['X-Newest'] = 'true'
orig_obj_name = self.object_name
orig_container_name = self.container_name
orig_account_name = self.account_name
self.object_name = src_obj_name
self.container_name = src_container_name
self.account_name = src_account_name
sink_req = Request.blank(req.path_info,
environ=req.environ, headers=req.headers)
source_resp = self.GET(source_req)
@ -655,6 +645,7 @@ class ObjectController(Controller):
return source_resp
self.object_name = orig_obj_name
self.container_name = orig_container_name
self.account_name = orig_account_name
data_source = iter(source_resp.app_iter)
sink_req.content_length = source_resp.content_length
if sink_req.content_length is None:
@ -669,6 +660,8 @@ class ObjectController(Controller):
# we no longer need the X-Copy-From header
del sink_req.headers['X-Copy-From']
if 'X-Copy-From-Account' in sink_req.headers:
del sink_req.headers['X-Copy-From-Account']
if not content_type_manually_set:
sink_req.headers['Content-Type'] = \
source_resp.headers['Content-Type']
@ -691,12 +684,8 @@ class ObjectController(Controller):
req = sink_req
try:
req, delete_at_container, delete_at_part, \
delete_at_nodes = self._config_obj_expiration(req)
except ValueError as e:
return HTTPBadRequest(request=req, content_type='text/plain',
body=str(e))
req, delete_at_container, delete_at_part, \
delete_at_nodes = self._config_obj_expiration(req)
node_iter = GreenthreadSafeIterator(
self.iter_nodes_local_first(obj_ring, partition))
@ -833,8 +822,9 @@ class ObjectController(Controller):
resp = self.best_response(req, statuses, reasons, bodies,
_('Object PUT'), etag=etag)
if source_header:
resp.headers['X-Copied-From'] = quote(
source_header.split('/', 3)[3])
acct, path = source_header.split('/', 3)[2:4]
resp.headers['X-Copied-From-Account'] = quote(acct)
resp.headers['X-Copied-From'] = quote(path)
if 'last-modified' in source_resp.headers:
resp.headers['X-Copied-From-Last-Modified'] = \
source_resp.headers['last-modified']
@ -867,11 +857,11 @@ class ObjectController(Controller):
lcontainer = object_versions.split('/')[0]
prefix_len = '%03x' % len(self.object_name)
lprefix = prefix_len + self.object_name + '/'
last_item = None
item_list = []
try:
for last_item in self._listing_iter(lcontainer, lprefix,
req.environ):
pass
for _item in self._listing_iter(lcontainer, lprefix,
req.environ):
item_list.append(_item)
except ListingIterNotFound:
# no worries, last_item is None
pass
@ -879,15 +869,19 @@ class ObjectController(Controller):
return err.aresp
except ListingIterError:
return HTTPServerError(request=req)
if last_item:
while len(item_list) > 0:
previous_version = item_list.pop()
# there are older versions so copy the previous version to the
# current object and delete the previous version
orig_container = self.container_name
orig_obj = self.object_name
self.container_name = lcontainer
self.object_name = last_item['name'].encode('utf-8')
self.object_name = previous_version['name'].encode('utf-8')
copy_path = '/v1/' + self.account_name + '/' + \
self.container_name + '/' + self.object_name
copy_headers = {'X-Newest': 'True',
'Destination': orig_container + '/' + orig_obj
}
@ -897,6 +891,11 @@ class ObjectController(Controller):
creq = Request.blank(copy_path, headers=copy_headers,
environ=copy_environ)
copy_resp = self.COPY(creq)
if copy_resp.status_int == HTTP_NOT_FOUND:
# the version isn't there so we'll try with previous
self.container_name = orig_container
self.object_name = orig_obj
continue
if is_client_error(copy_resp.status_int):
# some user error, maybe permissions
return HTTPPreconditionFailed(request=req)
@ -905,7 +904,7 @@ class ObjectController(Controller):
return HTTPServiceUnavailable(request=req)
# reset these because the COPY changed them
self.container_name = lcontainer
self.object_name = last_item['name'].encode('utf-8')
self.object_name = previous_version['name'].encode('utf-8')
new_del_req = Request.blank(copy_path, environ=req.environ)
container_info = self.container_info(
self.account_name, self.container_name, req)
@ -922,6 +921,7 @@ class ObjectController(Controller):
# remove 'X-If-Delete-At', since it is not for the older copy
if 'X-If-Delete-At' in req.headers:
del req.headers['X-If-Delete-At']
break
if 'swift.authorize' in req.environ:
aresp = req.environ['swift.authorize'](req)
if aresp:
@ -945,9 +945,11 @@ class ObjectController(Controller):
headers = self._backend_requests(
req, len(nodes), container_partition, containers)
# When deleting objects treat a 404 status as 204.
status_overrides = {404: 204}
resp = self.make_requests(req, obj_ring,
partition, 'DELETE', req.swift_entity_path,
headers)
headers, overrides=status_overrides)
return resp
@public
@ -955,27 +957,25 @@ class ObjectController(Controller):
@delay_denial
def COPY(self, req):
"""HTTP COPY request handler."""
dest = req.headers.get('Destination')
if not dest:
if not req.headers.get('Destination'):
return HTTPPreconditionFailed(request=req,
body='Destination header required')
dest = unquote(dest)
if not dest.startswith('/'):
dest = '/' + dest
try:
_junk, dest_container, dest_object = dest.split('/', 2)
except ValueError:
return HTTPPreconditionFailed(
request=req,
body='Destination header must be of the form '
'<container name>/<object name>')
source = '/' + self.container_name + '/' + self.object_name
dest_account = self.account_name
if 'Destination-Account' in req.headers:
dest_account = req.headers.get('Destination-Account')
dest_account = check_account_format(req, dest_account)
req.headers['X-Copy-From-Account'] = self.account_name
self.account_name = dest_account
del req.headers['Destination-Account']
dest_container, dest_object = check_destination_header(req)
source = '/%s/%s' % (self.container_name, self.object_name)
self.container_name = dest_container
self.object_name = dest_object
# re-write the existing request as a PUT instead of creating a new one
# since this one is already attached to the posthooklogger
req.method = 'PUT'
req.path_info = '/v1/' + self.account_name + dest
req.path_info = '/v1/%s/%s/%s' % \
(dest_account, dest_container, dest_object)
req.headers['Content-Length'] = 0
req.headers['X-Copy-From'] = quote(source)
del req.headers['Destination']

View File

@ -357,7 +357,8 @@ class Application(object):
# controller's method indicates it'd like to gather more
# information and try again later.
resp = req.environ['swift.authorize'](req)
if not resp:
if not resp and not req.headers.get('X-Copy-From-Account') \
and not req.headers.get('Destination-Account'):
# No resp means authorized, no delayed recheck required.
del req.environ['swift.authorize']
else:

View File

@ -1,3 +1,7 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
# Hacking already pins down pep8, pyflakes and flake8
hacking>=0.8.0,<0.9
coverage

View File

@ -83,10 +83,13 @@ normalized_urls = None
# If no config was read, we will fall back to old school env vars
swift_test_auth_version = None
swift_test_auth = os.environ.get('SWIFT_TEST_AUTH')
swift_test_user = [os.environ.get('SWIFT_TEST_USER'), None, None]
swift_test_key = [os.environ.get('SWIFT_TEST_KEY'), None, None]
swift_test_tenant = ['', '', '']
swift_test_perm = ['', '', '']
swift_test_user = [os.environ.get('SWIFT_TEST_USER'), None, None, '']
swift_test_key = [os.environ.get('SWIFT_TEST_KEY'), None, None, '']
swift_test_tenant = ['', '', '', '']
swift_test_perm = ['', '', '', '']
swift_test_domain = ['', '', '', '']
swift_test_user_id = ['', '', '', '']
swift_test_tenant_id = ['', '', '', '']
skip, skip2, skip3 = False, False, False
@ -432,6 +435,7 @@ def setup_package():
global swift_test_key
global swift_test_tenant
global swift_test_perm
global swift_test_domain
if config:
swift_test_auth_version = str(config.get('auth_version', '1'))
@ -488,8 +492,13 @@ def setup_package():
swift_test_user[2] = config['username3']
swift_test_tenant[2] = config['account']
swift_test_key[2] = config['password3']
if 'username4' in config:
swift_test_user[3] = config['username4']
swift_test_tenant[3] = config['account4']
swift_test_key[3] = config['password4']
swift_test_domain[3] = config['domain4']
for _ in range(3):
for _ in range(4):
swift_test_perm[_] = swift_test_tenant[_] + ':' \
+ swift_test_user[_]
@ -511,6 +520,15 @@ def setup_package():
print >>sys.stderr, \
'SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM'
global skip_if_not_v3
skip_if_not_v3 = (swift_test_auth_version != '3'
or not all([not skip,
swift_test_user[3],
swift_test_key[3]]))
if not skip and skip_if_not_v3:
print >>sys.stderr, \
'SKIPPING FUNCTIONAL TESTS SPECIFIC TO AUTH VERSION 3'
get_cluster_info()
@ -549,10 +567,10 @@ class InternalServerError(Exception):
pass
url = [None, None, None]
token = [None, None, None]
parsed = [None, None, None]
conn = [None, None, None]
url = [None, None, None, None]
token = [None, None, None, None]
parsed = [None, None, None, None]
conn = [None, None, None, None]
def connection(url):
@ -579,7 +597,8 @@ def retry(func, *args, **kwargs):
# access our own account by default
url_account = kwargs.pop('url_account', use_account + 1) - 1
os_options = {'user_domain_name': swift_test_domain[use_account],
'project_domain_name': swift_test_domain[use_account]}
while attempts <= retries:
attempts += 1
try:
@ -590,7 +609,7 @@ def retry(func, *args, **kwargs):
snet=False,
tenant_name=swift_test_tenant[use_account],
auth_version=swift_test_auth_version,
os_options={})
os_options=os_options)
parsed[use_account] = conn[use_account] = None
if not parsed[use_account] or not conn[use_account]:
parsed[use_account], conn[use_account] = \

View File

@ -174,8 +174,10 @@ class Connection(object):
# unicode and this would cause troubles when doing
# no_safe_quote query.
self.storage_url = str('/%s/%s' % (x[3], x[4]))
self.account_name = str(x[4])
self.auth_user = auth_user
self.storage_token = storage_token
self.user_acl = '%s:%s' % (self.account, self.username)
self.http_connect()
return self.storage_url, self.storage_token
@ -664,6 +666,32 @@ class File(Base):
return self.conn.make_request('COPY', self.path, hdrs=headers,
parms=parms) == 201
def copy_account(self, dest_account, dest_cont, dest_file,
hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if 'destination' in cfg:
headers = {'Destination': cfg['destination']}
elif cfg.get('no_destination'):
headers = {}
else:
headers = {'Destination-Account': dest_account,
'Destination': '%s/%s' % (dest_cont, dest_file)}
headers.update(hdrs)
if 'Destination-Account' in headers:
headers['Destination-Account'] = \
urllib.quote(headers['Destination-Account'])
if 'Destination' in headers:
headers['Destination'] = urllib.quote(headers['Destination'])
return self.conn.make_request('COPY', self.path, hdrs=headers,
parms=parms) == 201
def delete(self, hdrs=None, parms=None):
if hdrs is None:
hdrs = {}

View File

@ -809,5 +809,33 @@ class TestAccount(unittest.TestCase):
self.assertEqual(resp.status, 400)
class TestAccountInNonDefaultDomain(unittest.TestCase):
def setUp(self):
if tf.skip or tf.skip2 or tf.skip_if_not_v3:
raise SkipTest('AUTH VERSION 3 SPECIFIC TEST')
def test_project_domain_id_header(self):
# make sure account exists (assumes account auto create)
def post(url, token, parsed, conn):
conn.request('POST', parsed.path, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(post, use_account=4)
resp.read()
self.assertEqual(resp.status, 204)
# account in non-default domain should have a project domain id
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(head, use_account=4)
resp.read()
self.assertEqual(resp.status, 204)
self.assertTrue('X-Account-Project-Domain-Id' in resp.headers)
if __name__ == '__main__':
unittest.main()

View File

@ -1416,7 +1416,7 @@ class TestContainer(unittest.TestCase):
self.assertEquals(headers.get('x-storage-policy'),
policy['name'])
# and test recreate with-out specifiying Storage Policy
# and test recreate with-out specifying Storage Policy
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 202)
@ -1511,5 +1511,179 @@ class TestContainer(unittest.TestCase):
policy['name'])
class BaseTestContainerACLs(unittest.TestCase):
# subclasses can change the account in which container
# is created/deleted by setUp/tearDown
account = 1
def _get_account(self, url, token, parsed, conn):
return parsed.path
def _get_tenant_id(self, url, token, parsed, conn):
account = parsed.path
return account.replace('/v1/AUTH_', '', 1)
def setUp(self):
if tf.skip or tf.skip2 or tf.skip_if_not_v3:
raise SkipTest('AUTH VERSION 3 SPECIFIC TEST')
self.name = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 201)
def tearDown(self):
if tf.skip or tf.skip2 or tf.skip_if_not_v3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path + '/' + self.name + '?format=json',
'', {'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, obj):
conn.request('DELETE',
'/'.join([parsed.path, self.name, obj['name']]), '',
{'X-Auth-Token': token})
return check_response(conn)
while True:
resp = retry(get, use_account=self.account)
body = resp.read()
self.assert_(resp.status // 100 == 2, resp.status)
objs = json.loads(body)
if not objs:
break
for obj in objs:
resp = retry(delete, obj, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 204)
def delete(url, token, parsed, conn):
conn.request('DELETE', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 204)
def _assert_cross_account_acl_granted(self, granted, grantee_account, acl):
'''
Check whether a given container ACL is granted when a user specified
by account_b attempts to access a container.
'''
# Obtain the first account's string
first_account = retry(self._get_account, use_account=self.account)
# Ensure we can't access the container with the grantee account
def get2(url, token, parsed, conn):
conn.request('GET', first_account + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get2, use_account=grantee_account)
resp.read()
self.assertEqual(resp.status, 403)
def put2(url, token, parsed, conn):
conn.request('PUT', first_account + '/' + self.name + '/object',
'test object', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put2, use_account=grantee_account)
resp.read()
self.assertEqual(resp.status, 403)
# Post ACL to the container
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Read': acl,
'X-Container-Write': acl})
return check_response(conn)
resp = retry(post, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 204)
# Check access to container from grantee account with ACL in place
resp = retry(get2, use_account=grantee_account)
resp.read()
expected = 204 if granted else 403
self.assertEqual(resp.status, expected)
resp = retry(put2, use_account=grantee_account)
resp.read()
expected = 201 if granted else 403
self.assertEqual(resp.status, expected)
# Make the container private again
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, 'X-Container-Read': '',
'X-Container-Write': ''})
return check_response(conn)
resp = retry(post, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can't access the container with the grantee account again
resp = retry(get2, use_account=grantee_account)
resp.read()
self.assertEqual(resp.status, 403)
resp = retry(put2, use_account=grantee_account)
resp.read()
self.assertEqual(resp.status, 403)
class TestContainerACLsAccount1(BaseTestContainerACLs):
def test_cross_account_acl_names_with_user_in_non_default_domain(self):
# names in acls are disallowed when grantee is in a non-default domain
acl = '%s:%s' % (tf.swift_test_tenant[3], tf.swift_test_user[3])
self._assert_cross_account_acl_granted(False, 4, acl)
def test_cross_account_acl_ids_with_user_in_non_default_domain(self):
# ids are allowed in acls when grantee is in a non-default domain
tenant_id = retry(self._get_tenant_id, use_account=4)
acl = '%s:%s' % (tenant_id, '*')
self._assert_cross_account_acl_granted(True, 4, acl)
def test_cross_account_acl_names_in_default_domain(self):
# names are allowed in acls when grantee and project are in
# the default domain
acl = '%s:%s' % (tf.swift_test_tenant[1], tf.swift_test_user[1])
self._assert_cross_account_acl_granted(True, 2, acl)
def test_cross_account_acl_ids_in_default_domain(self):
# ids are allowed in acls when grantee and project are in
# the default domain
tenant_id = retry(self._get_tenant_id, use_account=2)
acl = '%s:%s' % (tenant_id, '*')
self._assert_cross_account_acl_granted(True, 2, acl)
class TestContainerACLsAccount4(BaseTestContainerACLs):
account = 4
def test_cross_account_acl_names_with_project_in_non_default_domain(self):
# names in acls are disallowed when project is in a non-default domain
acl = '%s:%s' % (tf.swift_test_tenant[0], tf.swift_test_user[0])
self._assert_cross_account_acl_granted(False, 1, acl)
def test_cross_account_acl_ids_with_project_in_non_default_domain(self):
# ids are allowed in acls when project is in a non-default domain
tenant_id = retry(self._get_tenant_id, use_account=1)
acl = '%s:%s' % (tenant_id, '*')
self._assert_cross_account_acl_granted(True, 1, acl)
if __name__ == '__main__':
unittest.main()

View File

@ -35,6 +35,7 @@ class TestObject(unittest.TestCase):
self.containers = []
self._create_container(self.container)
self._create_container(self.container, use_account=2)
self.obj = uuid4().hex
@ -47,7 +48,7 @@ class TestObject(unittest.TestCase):
resp.read()
self.assertEqual(resp.status, 201)
def _create_container(self, name=None, headers=None):
def _create_container(self, name=None, headers=None, use_account=1):
if not name:
name = uuid4().hex
self.containers.append(name)
@ -58,7 +59,7 @@ class TestObject(unittest.TestCase):
conn.request('PUT', parsed.path + '/' + name, '',
new_headers)
return check_response(conn)
resp = retry(put, name)
resp = retry(put, name, use_account=use_account)
resp.read()
self.assertEqual(resp.status, 201)
return name
@ -133,6 +134,45 @@ class TestObject(unittest.TestCase):
resp.read()
self.assertEquals(resp.status, 400)
def test_non_integer_x_delete_after(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'non_integer_x_delete_after'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-After': '*'})
return check_response(conn)
resp = retry(put)
body = resp.read()
self.assertEquals(resp.status, 400)
self.assertEqual(body, 'Non-integer X-Delete-After')
def test_non_integer_x_delete_at(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'non_integer_x_delete_at'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-At': '*'})
return check_response(conn)
resp = retry(put)
body = resp.read()
self.assertEquals(resp.status, 400)
self.assertEqual(body, 'Non-integer X-Delete-At')
def test_x_delete_at_in_the_past(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'x_delete_at_in_the_past'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-At': '0'})
return check_response(conn)
resp = retry(put)
body = resp.read()
self.assertEquals(resp.status, 400)
self.assertEqual(body, 'X-Delete-At in past')
def test_copy_object(self):
if tf.skip:
raise SkipTest
@ -207,6 +247,116 @@ class TestObject(unittest.TestCase):
resp.read()
self.assertEqual(resp.status, 204)
def test_copy_between_accounts(self):
if tf.skip:
raise SkipTest
source = '%s/%s' % (self.container, self.obj)
dest = '%s/%s' % (self.container, 'test_copy')
# get contents of source
def get_source(url, token, parsed, conn):
conn.request('GET',
'%s/%s' % (parsed.path, source),
'', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get_source)
source_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(source_contents, 'test')
acct = tf.parsed[0].path.split('/', 2)[2]
# copy source to dest with X-Copy-From-Account
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s' % (parsed.path, dest), '',
{'X-Auth-Token': token,
'Content-Length': '0',
'X-Copy-From-Account': acct,
'X-Copy-From': source})
return check_response(conn)
# try to put, will not succeed
# user does not have permissions to read from source
resp = retry(put, use_account=2)
self.assertEqual(resp.status, 403)
# add acl to allow reading from source
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s' % (parsed.path, self.container), '',
{'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[1]})
return check_response(conn)
resp = retry(post)
self.assertEqual(resp.status, 204)
# retry previous put, now should succeed
resp = retry(put, use_account=2)
self.assertEqual(resp.status, 201)
# contents of dest should be the same as source
def get_dest(url, token, parsed, conn):
conn.request('GET',
'%s/%s' % (parsed.path, dest),
'', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get_dest, use_account=2)
dest_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents)
# delete the copy
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s' % (parsed.path, dest), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete, use_account=2)
resp.read()
self.assertEqual(resp.status, 204)
# verify dest does not exist
resp = retry(get_dest, use_account=2)
resp.read()
self.assertEqual(resp.status, 404)
acct_dest = tf.parsed[1].path.split('/', 2)[2]
# copy source to dest with COPY
def copy(url, token, parsed, conn):
conn.request('COPY', '%s/%s' % (parsed.path, source), '',
{'X-Auth-Token': token,
'Destination-Account': acct_dest,
'Destination': dest})
return check_response(conn)
# try to copy, will not succeed
# user does not have permissions to write to destination
resp = retry(copy)
resp.read()
self.assertEqual(resp.status, 403)
# add acl to allow write to destination
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s' % (parsed.path, self.container), '',
{'X-Auth-Token': token,
'X-Container-Write': tf.swift_test_perm[0]})
return check_response(conn)
resp = retry(post, use_account=2)
self.assertEqual(resp.status, 204)
# now copy will succeed
resp = retry(copy)
resp.read()
self.assertEqual(resp.status, 201)
# contents of dest should be the same as source
resp = retry(get_dest, use_account=2)
dest_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents)
# delete the copy
resp = retry(delete, use_account=2)
resp.read()
self.assertEqual(resp.status, 204)
def test_public_object(self):
if tf.skip:
raise SkipTest

View File

@ -25,6 +25,7 @@ import time
import unittest
import urllib
import uuid
from copy import deepcopy
import eventlet
from nose import SkipTest
@ -269,6 +270,8 @@ class TestAccount(Base):
containers)
def testQuotedWWWAuthenticateHeader(self):
# check that the www-authenticate header value with the swift realm
# is correctly quoted.
conn = Connection(tf.config)
conn.authenticate()
inserted_html = '<b>Hello World'
@ -277,9 +280,16 @@ class TestAccount(Base):
quoted_hax = urllib.quote(hax)
conn.connection.request('GET', '/v1/' + quoted_hax, None, {})
resp = conn.connection.getresponse()
resp_headers = resp.getheaders()
expected = ('www-authenticate', 'Swift realm="%s"' % quoted_hax)
self.assert_(expected in resp_headers)
resp_headers = dict(resp.getheaders())
self.assertTrue('www-authenticate' in resp_headers,
'www-authenticate not found in %s' % resp_headers)
actual = resp_headers['www-authenticate']
expected = 'Swift realm="%s"' % quoted_hax
# other middleware e.g. auth_token may also set www-authenticate
# headers in which case actual values will be a comma separated list.
# check that expected value is among the actual values
self.assertTrue(expected in actual,
'%s not found in %s' % (expected, actual))
class TestAccountUTF8(Base2, TestAccount):
@ -790,9 +800,22 @@ class TestFileEnv(object):
def setUp(cls):
cls.conn = Connection(tf.config)
cls.conn.authenticate()
cls.account = Account(cls.conn, tf.config.get('account',
tf.config['username']))
# creating another account and connection
# for account to account copy tests
config2 = deepcopy(tf.config)
config2['account'] = tf.config['account2']
config2['username'] = tf.config['username2']
config2['password'] = tf.config['password2']
cls.conn2 = Connection(config2)
cls.conn2.authenticate()
cls.account = Account(cls.conn, tf.config.get('account',
tf.config['username']))
cls.account.delete_containers()
cls.account2 = cls.conn2.get_account()
cls.account2.delete_containers()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
@ -846,6 +869,62 @@ class TestFile(Base):
self.assert_(file_item.initialize())
self.assert_(metadata == file_item.metadata)
def testCopyAccount(self):
# makes sure to test encoded characters
source_filename = 'dealde%2Fl04 011e%204c8df/flash.png'
file_item = self.env.container.file(source_filename)
metadata = {Utils.create_ascii_name(): Utils.create_name()}
data = file_item.write_random()
file_item.sync_metadata(metadata)
dest_cont = self.env.account.container(Utils.create_name())
self.assert_(dest_cont.create())
acct = self.env.conn.account_name
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.copy_account(acct,
'%s%s' % (prefix, cont),
dest_filename)
self.assert_(dest_filename in cont.files())
file_item = cont.file(dest_filename)
self.assert_(data == file_item.read())
self.assert_(file_item.initialize())
self.assert_(metadata == file_item.metadata)
dest_cont = self.env.account2.container(Utils.create_name())
self.assert_(dest_cont.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
acct = self.env.conn2.account_name
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.copy_account(acct,
'%s%s' % (prefix, dest_cont),
dest_filename)
self.assert_(dest_filename in dest_cont.files())
file_item = dest_cont.file(dest_filename)
self.assert_(data == file_item.read())
self.assert_(file_item.initialize())
self.assert_(metadata == file_item.metadata)
def testCopy404s(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
@ -884,6 +963,77 @@ class TestFile(Base):
'%s%s' % (prefix, Utils.create_name()),
Utils.create_name()))
def testCopyAccount404s(self):
acct = self.env.conn.account_name
acct2 = self.env.conn2.account_name
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assert_(dest_cont.create(hdrs={
'X-Container-Read': self.env.conn2.user_acl
}))
dest_cont2 = self.env.account2.container(Utils.create_name())
self.assert_(dest_cont2.create(hdrs={
'X-Container-Write': self.env.conn.user_acl,
'X-Container-Read': self.env.conn.user_acl
}))
for acct, cont in ((acct, dest_cont), (acct2, dest_cont2)):
for prefix in ('', '/'):
# invalid source container
source_cont = self.env.account.container(Utils.create_name())
file_item = source_cont.file(source_filename)
self.assert_(not file_item.copy_account(
acct,
'%s%s' % (prefix, self.env.container),
Utils.create_name()))
if acct == acct2:
# there is no such source container
# and foreign user can have no permission to read it
self.assert_status(403)
else:
self.assert_status(404)
self.assert_(not file_item.copy_account(
acct,
'%s%s' % (prefix, cont),
Utils.create_name()))
self.assert_status(404)
# invalid source object
file_item = self.env.container.file(Utils.create_name())
self.assert_(not file_item.copy_account(
acct,
'%s%s' % (prefix, self.env.container),
Utils.create_name()))
if acct == acct2:
# there is no such object
# and foreign user can have no permission to read it
self.assert_status(403)
else:
self.assert_status(404)
self.assert_(not file_item.copy_account(
acct,
'%s%s' % (prefix, cont),
Utils.create_name()))
self.assert_status(404)
# invalid destination container
file_item = self.env.container.file(source_filename)
self.assert_(not file_item.copy_account(
acct,
'%s%s' % (prefix, Utils.create_name()),
Utils.create_name()))
if acct == acct2:
# there is no such destination container
# and foreign user can have no permission to write there
self.assert_status(403)
else:
self.assert_status(404)
def testCopyNoDestinationHeader(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
@ -938,6 +1088,49 @@ class TestFile(Base):
self.assert_(file_item.initialize())
self.assert_(metadata == file_item.metadata)
def testCopyFromAccountHeader(self):
acct = self.env.conn.account_name
src_cont = self.env.account.container(Utils.create_name())
self.assert_(src_cont.create(hdrs={
'X-Container-Read': self.env.conn2.user_acl
}))
source_filename = Utils.create_name()
file_item = src_cont.file(source_filename)
metadata = {}
for i in range(1):
metadata[Utils.create_ascii_name()] = Utils.create_name()
file_item.metadata = metadata
data = file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assert_(dest_cont.create())
dest_cont2 = self.env.account2.container(Utils.create_name())
self.assert_(dest_cont2.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
for cont in (src_cont, dest_cont, dest_cont2):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item = cont.file(dest_filename)
file_item.write(hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' % (
prefix,
src_cont.name,
source_filename)})
self.assert_(dest_filename in cont.files())
file_item = cont.file(dest_filename)
self.assert_(data == file_item.read())
self.assert_(file_item.initialize())
self.assert_(metadata == file_item.metadata)
def testCopyFromHeader404s(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
@ -969,6 +1162,52 @@ class TestFile(Base):
self.env.container.name, source_filename)})
self.assert_status(404)
def testCopyFromAccountHeader404s(self):
acct = self.env.conn2.account_name
src_cont = self.env.account2.container(Utils.create_name())
self.assert_(src_cont.create(hdrs={
'X-Container-Read': self.env.conn.user_acl
}))
source_filename = Utils.create_name()
file_item = src_cont.file(source_filename)
file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assert_(dest_cont.create())
for prefix in ('', '/'):
# invalid source container
file_item = dest_cont.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' %
(prefix,
Utils.create_name(),
source_filename)})
# looks like cached responses leak "not found"
# to un-authorized users, not going to fix it now, but...
self.assert_status([403, 404])
# invalid source object
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' %
(prefix,
src_cont,
Utils.create_name())})
self.assert_status(404)
# invalid destination container
dest_cont = self.env.account.container(Utils.create_name())
file_item = dest_cont.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' %
(prefix,
src_cont,
source_filename)})
self.assert_status(404)
def testNameLimit(self):
limit = load_constraint('max_object_name_length')
@ -1189,6 +1428,16 @@ class TestFile(Base):
cfg={'no_content_length': True})
self.assert_status(400)
# no content-length
self.assertRaises(ResponseError, file_item.write_random, file_length,
cfg={'no_content_length': True})
self.assert_status(411)
self.assertRaises(ResponseError, file_item.write_random, file_length,
hdrs={'transfer-encoding': 'gzip,chunked'},
cfg={'no_content_length': True})
self.assert_status(501)
# bad request types
#for req in ('LICK', 'GETorHEAD_base', 'container_info',
# 'best_response'):
@ -1591,6 +1840,30 @@ class TestDlo(Base):
file_contents,
"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff")
def test_copy_account(self):
# dlo use same account and same container only
acct = self.env.conn.account_name
# Adding a new segment, copying the manifest, and then deleting the
# segment proves that the new object is really the concatenated
# segments and not just a manifest.
f_segment = self.env.container.file("%s/seg_lowerf" %
(self.env.segment_prefix))
f_segment.write('ffffffffff')
try:
man1_item = self.env.container.file('man1')
man1_item.copy_account(acct,
self.env.container.name,
"copied-man1")
finally:
# try not to leave this around for other tests to stumble over
f_segment.delete()
file_item = self.env.container.file('copied-man1')
file_contents = file_item.read()
self.assertEqual(
file_contents,
"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff")
def test_copy_manifest(self):
# Copying the manifest should result in another manifest
try:
@ -1787,6 +2060,14 @@ class TestSloEnv(object):
def setUp(cls):
cls.conn = Connection(tf.config)
cls.conn.authenticate()
config2 = deepcopy(tf.config)
config2['account'] = tf.config['account2']
config2['username'] = tf.config['username2']
config2['password'] = tf.config['password2']
cls.conn2 = Connection(config2)
cls.conn2.authenticate()
cls.account2 = cls.conn2.get_account()
cls.account2.delete_containers()
if cls.slo_enabled is None:
cls.slo_enabled = 'slo' in cluster_info
@ -1969,6 +2250,29 @@ class TestSlo(Base):
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
self.assertEqual(4 * 1024 * 1024 + 1, len(copied_contents))
def test_slo_copy_account(self):
acct = self.env.conn.account_name
# same account copy
file_item = self.env.container.file("manifest-abcde")
file_item.copy_account(acct, self.env.container.name, "copied-abcde")
copied = self.env.container.file("copied-abcde")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
self.assertEqual(4 * 1024 * 1024 + 1, len(copied_contents))
# copy to different account
acct = self.env.conn2.account_name
dest_cont = self.env.account2.container(Utils.create_name())
self.assert_(dest_cont.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
file_item = self.env.container.file("manifest-abcde")
file_item.copy_account(acct, dest_cont, "copied-abcde")
copied = dest_cont.file("copied-abcde")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
self.assertEqual(4 * 1024 * 1024 + 1, len(copied_contents))
def test_slo_copy_the_manifest(self):
file_item = self.env.container.file("manifest-abcde")
file_item.copy(self.env.container.name, "copied-abcde-manifest-only",
@ -1981,6 +2285,40 @@ class TestSlo(Base):
except ValueError:
self.fail("COPY didn't copy the manifest (invalid json on GET)")
def test_slo_copy_the_manifest_account(self):
acct = self.env.conn.account_name
# same account
file_item = self.env.container.file("manifest-abcde")
file_item.copy_account(acct,
self.env.container.name,
"copied-abcde-manifest-only",
parms={'multipart-manifest': 'get'})
copied = self.env.container.file("copied-abcde-manifest-only")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
try:
json.loads(copied_contents)
except ValueError:
self.fail("COPY didn't copy the manifest (invalid json on GET)")
# different account
acct = self.env.conn2.account_name
dest_cont = self.env.account2.container(Utils.create_name())
self.assert_(dest_cont.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
file_item.copy_account(acct,
dest_cont,
"copied-abcde-manifest-only",
parms={'multipart-manifest': 'get'})
copied = dest_cont.file("copied-abcde-manifest-only")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
try:
json.loads(copied_contents)
except ValueError:
self.fail("COPY didn't copy the manifest (invalid json on GET)")
def test_slo_get_the_manifest(self):
manifest = self.env.container.file("manifest-abcde")
got_body = manifest.read(parms={'multipart-manifest': 'get'})

View File

@ -4,7 +4,8 @@ auth_host = 127.0.0.1
auth_port = 8080
auth_ssl = no
auth_prefix = /auth/
## sample config for Swift with Keystone
## sample config for Swift with Keystone v2 API
# For keystone v3 change auth_version to 3 and auth_prefix to /v3/
#auth_version = 2
#auth_host = localhost
#auth_port = 5000
@ -25,6 +26,13 @@ password2 = testing2
username3 = tester3
password3 = testing3
# Fourth user is required for keystone v3 specific tests.
# Account must be in a non-default domain.
#account4 = test4
#username4 = tester4
#password4 = testing4
#domain4 = test-domain
collate = C
# Only necessary if a pre-exising server uses self-signed certificate

View File

@ -1061,7 +1061,7 @@ class TestAccountBrokerBeforeSPI(TestAccountBroker):
self.assertEqual(len(called), 1)
self.assert_('CREATE TABLE policy_stat' in called[0])
# nothing was commited
# nothing was committed
broker = AccountBroker(db_path, account='a')
with broker.get() as conn:
try:

View File

@ -134,21 +134,38 @@ class TestRecon(unittest.TestCase):
ringbuilder.add_dev({'id': 1, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001,
'device': 'sda1', 'region': 0})
ringbuilder.add_dev({'id': 2, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002,
'device': 'sda1', 'region': 1})
ringbuilder.add_dev({'id': 3, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10003,
'device': 'sda1', 'region': 1})
ringbuilder.rebalance()
ringbuilder.get_ring().save(self.tmpfile_name)
ips = self.recon_instance.get_devices(
None, self.swift_dir, self.ring_name)
None, None, self.swift_dir, self.ring_name)
self.assertEqual(
set([('127.0.0.1', 10000), ('127.0.0.1', 10001),
('127.0.0.1', 10002), ('127.0.0.1', 10003)]), ips)
ips = self.recon_instance.get_devices(
0, None, self.swift_dir, self.ring_name)
self.assertEqual(
set([('127.0.0.1', 10000), ('127.0.0.1', 10001)]), ips)
ips = self.recon_instance.get_devices(
0, self.swift_dir, self.ring_name)
1, None, self.swift_dir, self.ring_name)
self.assertEqual(
set([('127.0.0.1', 10002), ('127.0.0.1', 10003)]), ips)
ips = self.recon_instance.get_devices(
0, 0, self.swift_dir, self.ring_name)
self.assertEqual(set([('127.0.0.1', 10000)]), ips)
ips = self.recon_instance.get_devices(
1, self.swift_dir, self.ring_name)
self.assertEqual(set([('127.0.0.1', 10001)]), ips)
1, 1, self.swift_dir, self.ring_name)
self.assertEqual(set([('127.0.0.1', 10003)]), ips)
def test_get_ringmd5(self):
for server_type in ('account', 'container', 'object', 'object-1'):

View File

@ -38,29 +38,34 @@ class FakeApp(object):
self.check_no_query_string = check_no_query_string
def __call__(self, env, start_response):
if self.check_no_query_string and env.get('QUERY_STRING'):
raise Exception('Query string %s should have been discarded!' %
env['QUERY_STRING'])
body = ''
while True:
chunk = env['wsgi.input'].read()
if not chunk:
break
body += chunk
env['wsgi.input'] = StringIO(body)
self.requests.append(Request.blank('', environ=env))
if env.get('swift.authorize_override') and \
env.get('REMOTE_USER') != '.wsgi.pre_authed':
raise Exception(
'Invalid REMOTE_USER %r with swift.authorize_override' % (
env.get('REMOTE_USER'),))
if 'swift.authorize' in env:
resp = env['swift.authorize'](self.requests[-1])
if resp:
return resp(env, start_response)
status, headers, body = self.status_headers_body_iter.next()
return Response(status=status, headers=headers,
body=body)(env, start_response)
try:
if self.check_no_query_string and env.get('QUERY_STRING'):
raise Exception('Query string %s should have been discarded!' %
env['QUERY_STRING'])
body = ''
while True:
chunk = env['wsgi.input'].read()
if not chunk:
break
body += chunk
env['wsgi.input'] = StringIO(body)
self.requests.append(Request.blank('', environ=env))
if env.get('swift.authorize_override') and \
env.get('REMOTE_USER') != '.wsgi.pre_authed':
raise Exception(
'Invalid REMOTE_USER %r with swift.authorize_override' % (
env.get('REMOTE_USER'),))
if 'swift.authorize' in env:
resp = env['swift.authorize'](self.requests[-1])
if resp:
return resp(env, start_response)
status, headers, body = self.status_headers_body_iter.next()
return Response(status=status, headers=headers,
body=body)(env, start_response)
except EOFError:
start_response('499 Client Disconnect',
[('Content-Type', 'text/plain')])
return ['Client Disconnect\n']
class TestParseAttrs(unittest.TestCase):

View File

@ -18,12 +18,23 @@ import unittest
from swift.common.middleware import keystoneauth
from swift.common.swob import Request, Response
from swift.common.http import HTTP_FORBIDDEN
from swift.proxy.controllers.base import _get_cache_key
from test.unit import FakeLogger
UNKNOWN_ID = keystoneauth.UNKNOWN_ID
def _fake_token_info(version='2'):
if version == '2':
return {'access': 'fake_value'}
if version == '3':
return {'token': 'fake_value'}
class FakeApp(object):
def __init__(self, status_headers_body_iter=None):
self.calls = 0
self.call_contexts = []
self.status_headers_body_iter = status_headers_body_iter
if not self.status_headers_body_iter:
self.status_headers_body_iter = iter([('404 Not Found', {}, '')])
@ -35,6 +46,9 @@ class FakeApp(object):
resp = env['swift.authorize'](self.request)
if resp:
return resp(env, start_response)
context = {'method': self.request.method,
'headers': self.request.headers}
self.call_contexts.append(context)
status, headers, body = self.status_headers_body_iter.next()
return Response(status=status, headers=headers,
body=body)(env, start_response)
@ -51,12 +65,23 @@ class SwiftAuth(unittest.TestCase):
return Request.blank(path, headers=headers, **kwargs)
def _get_identity_headers(self, status='Confirmed', tenant_id='1',
tenant_name='acct', user='usr', role=''):
tenant_name='acct', project_domain_name='domA',
project_domain_id='99',
user_name='usr', user_id='42',
user_domain_name='domA', user_domain_id='99',
role='admin'):
return dict(X_IDENTITY_STATUS=status,
X_TENANT_ID=tenant_id,
X_TENANT_NAME=tenant_name,
X_PROJECT_ID=tenant_id,
X_PROJECT_NAME=tenant_name,
X_PROJECT_DOMAIN_ID=project_domain_id,
X_PROJECT_DOMAIN_NAME=project_domain_name,
X_ROLES=role,
X_USER_NAME=user)
X_USER_NAME=user_name,
X_USER_ID=user_id,
X_USER_DOMAIN_NAME=user_domain_name,
X_USER_DOMAIN_ID=user_domain_id)
def _get_successful_middleware(self):
response_iter = iter([('200 OK', {}, '')])
@ -172,8 +197,105 @@ class SwiftAuth(unittest.TestCase):
self.assertEqual(resp.status_int, 401)
self.assertTrue('Www-Authenticate' in resp.headers)
def test_project_domain_id_sysmeta_set(self):
proj_id = '12345678'
proj_domain_id = '13'
headers = self._get_identity_headers(tenant_id=proj_id,
project_domain_id=proj_domain_id)
account = self.test_auth._get_account_for_tenant(proj_id)
path = '/v1/' + account
# fake cached account info
_, info_key = _get_cache_key(account, None)
env = {info_key: {'status': 0, 'sysmeta': {}},
'keystone.token_info': _fake_token_info(version='3')}
req = Request.blank(path, environ=env, headers=headers)
req.method = 'POST'
headers_out = {'X-Account-Sysmeta-Project-Domain-Id': proj_domain_id}
fake_app = FakeApp(iter([('200 OK', headers_out, '')]))
test_auth = keystoneauth.filter_factory({})(fake_app)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(fake_app.call_contexts), 1)
headers_sent = fake_app.call_contexts[0]['headers']
self.assertTrue('X-Account-Sysmeta-Project-Domain-Id' in headers_sent,
headers_sent)
self.assertEqual(headers_sent['X-Account-Sysmeta-Project-Domain-Id'],
proj_domain_id)
self.assertTrue('X-Account-Project-Domain-Id' in resp.headers)
self.assertEqual(resp.headers['X-Account-Project-Domain-Id'],
proj_domain_id)
class TestAuthorize(unittest.TestCase):
def test_project_domain_id_sysmeta_set_to_unknown(self):
proj_id = '12345678'
# token scoped to a different project
headers = self._get_identity_headers(tenant_id='87654321',
project_domain_id='default',
role='reselleradmin')
account = self.test_auth._get_account_for_tenant(proj_id)
path = '/v1/' + account
# fake cached account info
_, info_key = _get_cache_key(account, None)
env = {info_key: {'status': 0, 'sysmeta': {}},
'keystone.token_info': _fake_token_info(version='3')}
req = Request.blank(path, environ=env, headers=headers)
req.method = 'POST'
fake_app = FakeApp(iter([('200 OK', {}, '')]))
test_auth = keystoneauth.filter_factory({})(fake_app)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(fake_app.call_contexts), 1)
headers_sent = fake_app.call_contexts[0]['headers']
self.assertTrue('X-Account-Sysmeta-Project-Domain-Id' in headers_sent,
headers_sent)
self.assertEqual(headers_sent['X-Account-Sysmeta-Project-Domain-Id'],
UNKNOWN_ID)
def test_project_domain_id_sysmeta_not_set(self):
proj_id = '12345678'
headers = self._get_identity_headers(tenant_id=proj_id, role='admin')
account = self.test_auth._get_account_for_tenant(proj_id)
path = '/v1/' + account
_, info_key = _get_cache_key(account, None)
# v2 token
env = {info_key: {'status': 0, 'sysmeta': {}},
'keystone.token_info': _fake_token_info(version='2')}
req = Request.blank(path, environ=env, headers=headers)
req.method = 'POST'
fake_app = FakeApp(iter([('200 OK', {}, '')]))
test_auth = keystoneauth.filter_factory({})(fake_app)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(fake_app.call_contexts), 1)
headers_sent = fake_app.call_contexts[0]['headers']
self.assertFalse('X-Account-Sysmeta-Project-Domain-Id' in headers_sent,
headers_sent)
def test_project_domain_id_sysmeta_set_unknown_with_v2(self):
proj_id = '12345678'
# token scoped to a different project
headers = self._get_identity_headers(tenant_id='87654321',
role='reselleradmin')
account = self.test_auth._get_account_for_tenant(proj_id)
path = '/v1/' + account
_, info_key = _get_cache_key(account, None)
# v2 token
env = {info_key: {'status': 0, 'sysmeta': {}},
'keystone.token_info': _fake_token_info(version='2')}
req = Request.blank(path, environ=env, headers=headers)
req.method = 'POST'
fake_app = FakeApp(iter([('200 OK', {}, '')]))
test_auth = keystoneauth.filter_factory({})(fake_app)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(fake_app.call_contexts), 1)
headers_sent = fake_app.call_contexts[0]['headers']
self.assertTrue('X-Account-Sysmeta-Project-Domain-Id' in headers_sent,
headers_sent)
self.assertEqual(headers_sent['X-Account-Sysmeta-Project-Domain-Id'],
UNKNOWN_ID)
class BaseTestAuthorize(unittest.TestCase):
def setUp(self):
self.test_auth = keystoneauth.filter_factory({})(FakeApp())
self.test_auth.logger = FakeLogger()
@ -188,18 +310,39 @@ class TestAuthorize(unittest.TestCase):
identity['HTTP_X_TENANT_ID'])
def _get_identity(self, tenant_id='tenant_id', tenant_name='tenant_name',
user_id='user_id', user_name='user_name', roles=None):
user_id='user_id', user_name='user_name', roles=None,
project_domain_name='domA', project_domain_id='foo',
user_domain_name='domA', user_domain_id='foo'):
if roles is None:
roles = []
if isinstance(roles, list):
roles = ','.join(roles)
return {'HTTP_X_USER_ID': user_id,
'HTTP_X_USER_NAME': user_name,
'HTTP_X_USER_DOMAIN_NAME': user_domain_name,
'HTTP_X_USER_DOMAIN_ID': user_domain_id,
'HTTP_X_TENANT_ID': tenant_id,
'HTTP_X_TENANT_NAME': tenant_name,
'HTTP_X_PROJECT_DOMAIN_ID': project_domain_id,
'HTTP_X_PROJECT_DOMAIN_NAME': project_domain_name,
'HTTP_X_ROLES': roles,
'HTTP_X_IDENTITY_STATUS': 'Confirmed'}
def _get_env_id(self, tenant_id='tenant_id', tenant_name='tenant_name',
user_id='user_id', user_name='user_name', roles=[],
project_domain_name='domA', project_domain_id='99',
user_domain_name='domA', user_domain_id='99',
auth_version='3'):
env = self._get_identity(tenant_id, tenant_name, user_id, user_name,
roles, project_domain_name,
project_domain_id, user_domain_name,
user_domain_id)
token_info = _fake_token_info(version=auth_version)
env.update({'keystone.token_info': token_info})
return self.test_auth._integral_keystone_identity(env)
class TestAuthorize(BaseTestAuthorize):
def _check_authenticate(self, account=None, identity=None, headers=None,
exception=None, acl=None, env=None, path=None):
if not identity:
@ -208,7 +351,10 @@ class TestAuthorize(unittest.TestCase):
account = self._get_account(identity)
if not path:
path = '/v1/%s/c' % account
default_env = {'REMOTE_USER': identity['HTTP_X_TENANT_ID']}
# fake cached account info
_, info_key = _get_cache_key(account, None)
default_env = {'REMOTE_USER': identity['HTTP_X_TENANT_ID'],
info_key: {'status': 200, 'sysmeta': {}}}
default_env.update(identity)
if env:
default_env.update(env)
@ -380,6 +526,49 @@ class TestAuthorize(unittest.TestCase):
['tenantXYZ:userA']),
None)
def test_cross_tenant_authorization_allow_names(self):
# tests that the allow_names arg does the right thing
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantNAME:userA'], allow_names=True),
'tenantNAME:userA')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantNAME:userID'], allow_names=True),
'tenantNAME:userID')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantID:userA'], allow_names=True),
'tenantID:userA')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantID:userID'], allow_names=True),
'tenantID:userID')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantNAME:userA'], allow_names=False),
None)
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantID:userA'], allow_names=False),
None)
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantNAME:userID'], allow_names=False),
None)
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantID:userID'], allow_names=False),
'tenantID:userID')
def test_delete_own_account_not_allowed(self):
roles = self.test_auth.operator_roles.split(',')
identity = self._get_identity(roles=roles)
@ -415,5 +604,576 @@ class TestAuthorize(unittest.TestCase):
authorize_resp = the_env['swift.authorize'](subreq)
self.assertEqual(authorize_resp, None)
def test_names_disallowed_in_acls_outside_default_domain(self):
id = self._get_identity(user_domain_id='non-default',
project_domain_id='non-default')
env = {'keystone.token_info': _fake_token_info(version='3')}
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
def test_names_allowed_in_acls_inside_default_domain(self):
id = self._get_identity(user_domain_id='default',
project_domain_id='default')
env = {'keystone.token_info': _fake_token_info(version='3')}
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
def test_names_allowed_in_acls_inside_default_domain_with_config(self):
conf = {'allow_names_in_acls': 'yes'}
self.test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.test_auth.logger = FakeLogger()
id = self._get_identity(user_domain_id='default',
project_domain_id='default')
env = {'keystone.token_info': _fake_token_info(version='3')}
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
def test_names_disallowed_in_acls_inside_default_domain(self):
conf = {'allow_names_in_acls': 'false'}
self.test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.test_auth.logger = FakeLogger()
id = self._get_identity(user_domain_id='default',
project_domain_id='default')
env = {'keystone.token_info': _fake_token_info(version='3')}
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
def test_integral_keystone_identity(self):
user = ('U_ID', 'U_NAME')
roles = ('ROLE1', 'ROLE2')
project = ('P_ID', 'P_NAME')
user_domain = ('UD_ID', 'UD_NAME')
project_domain = ('PD_ID', 'PD_NAME')
# no valid identity info in headers
req = Request.blank('/v/a/c/o')
data = self.test_auth._integral_keystone_identity(req.environ)
self.assertEqual(None, data)
# valid identity info in headers, but status unconfirmed
req.headers.update({'X-Identity-Status': 'Blah',
'X-Roles': '%s,%s' % roles,
'X-User-Id': user[0],
'X-User-Name': user[1],
'X-Tenant-Id': project[0],
'X-Tenant-Name': project[1],
'X-User-Domain-Id': user_domain[0],
'X-User-Domain-Name': user_domain[1],
'X-Project-Domain-Id': project_domain[0],
'X-Project-Domain-Name': project_domain[1]})
data = self.test_auth._integral_keystone_identity(req.environ)
self.assertEqual(None, data)
# valid identity info in headers, no token info in environ
req.headers.update({'X-Identity-Status': 'Confirmed'})
expected = {'user': user,
'tenant': project,
'roles': list(roles),
'user_domain': (None, None),
'project_domain': (None, None),
'auth_version': 0}
data = self.test_auth._integral_keystone_identity(req.environ)
self.assertEquals(expected, data)
# v2 token info in environ
req.environ['keystone.token_info'] = _fake_token_info(version='2')
expected = {'user': user,
'tenant': project,
'roles': list(roles),
'user_domain': (None, None),
'project_domain': (None, None),
'auth_version': 2}
data = self.test_auth._integral_keystone_identity(req.environ)
self.assertEquals(expected, data)
# v3 token info in environ
req.environ['keystone.token_info'] = _fake_token_info(version='3')
expected = {'user': user,
'tenant': project,
'roles': list(roles),
'user_domain': user_domain,
'project_domain': project_domain,
'auth_version': 3}
data = self.test_auth._integral_keystone_identity(req.environ)
self.assertEquals(expected, data)
def test_get_project_domain_id(self):
sysmeta = {}
info = {'sysmeta': sysmeta}
_, info_key = _get_cache_key('AUTH_1234', None)
env = {'PATH_INFO': '/v1/AUTH_1234',
info_key: info}
# account does not exist
info['status'] = 404
self.assertEqual(self.test_auth._get_project_domain_id(env),
(False, None))
info['status'] = 0
self.assertEqual(self.test_auth._get_project_domain_id(env),
(False, None))
# account exists, no project domain id in sysmeta
info['status'] = 200
self.assertEqual(self.test_auth._get_project_domain_id(env),
(True, None))
# account exists with project domain id in sysmeta
sysmeta['project-domain-id'] = 'default'
self.assertEqual(self.test_auth._get_project_domain_id(env),
(True, 'default'))
class TestIsNameAllowedInACL(BaseTestAuthorize):
def setUp(self):
super(TestIsNameAllowedInACL, self).setUp()
self.default_id = 'default'
def _assert_names_allowed(self, expected, user_domain_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id=None,
scoped='account'):
project_name = 'foo'
account_id = '12345678'
account = self.test_auth._get_account_for_tenant(account_id)
parts = ('v1', account, None, None)
path = '/%s/%s' % parts[0:2]
sysmeta = {}
if sysmeta_project_domain_id:
sysmeta = {'project-domain-id': sysmeta_project_domain_id}
# pretend account exists
info = {'status': 200, 'sysmeta': sysmeta}
_, info_key = _get_cache_key(account, None)
req = Request.blank(path, environ={info_key: info})
if scoped == 'account':
project_name = 'account_name'
project_id = account_id
elif scoped == 'other':
project_name = 'other_name'
project_id = '87654321'
else:
# unscoped token
project_name, project_id, req_project_domain_id = None, None, None
if user_domain_id:
id = self._get_env_id(tenant_name=project_name,
tenant_id=project_id,
user_domain_id=user_domain_id,
project_domain_id=req_project_domain_id)
else:
# must be v2 token info
id = self._get_env_id(tenant_name=project_name,
tenant_id=project_id,
auth_version='2')
actual = self.test_auth._is_name_allowed_in_acl(req, parts, id)
self.assertEqual(actual, expected, '%s, %s, %s, %s'
% (user_domain_id, req_project_domain_id,
sysmeta_project_domain_id, scoped))
def test_is_name_allowed_in_acl_with_token_scoped_to_tenant(self):
# no user or project domain ids in request token so must be v2,
# user and project should be assumed to be in default domain
self._assert_names_allowed(True, user_domain_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id=None)
self._assert_names_allowed(True, user_domain_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id=self.default_id)
self._assert_names_allowed(True, user_domain_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id=UNKNOWN_ID)
self._assert_names_allowed(True, user_domain_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id='foo')
# user in default domain, project domain in token info takes precedence
self._assert_names_allowed(True, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=None)
self._assert_names_allowed(True, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=UNKNOWN_ID)
self._assert_names_allowed(True, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id='bar')
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id=None)
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id=self.default_id)
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id='foo')
# user in non-default domain so names should never be allowed
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=None)
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=self.default_id)
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=UNKNOWN_ID)
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id='foo')
def test_is_name_allowed_in_acl_with_unscoped_token(self):
# user in default domain
self._assert_names_allowed(True, user_domain_id=self.default_id,
sysmeta_project_domain_id=None,
scoped=False)
self._assert_names_allowed(True, user_domain_id=self.default_id,
sysmeta_project_domain_id=self.default_id,
scoped=False)
self._assert_names_allowed(False, user_domain_id=self.default_id,
sysmeta_project_domain_id=UNKNOWN_ID,
scoped=False)
self._assert_names_allowed(False, user_domain_id=self.default_id,
sysmeta_project_domain_id='foo',
scoped=False)
# user in non-default domain so names should never be allowed
self._assert_names_allowed(False, user_domain_id='foo',
sysmeta_project_domain_id=None,
scoped=False)
self._assert_names_allowed(False, user_domain_id='foo',
sysmeta_project_domain_id=self.default_id,
scoped=False)
self._assert_names_allowed(False, user_domain_id='foo',
sysmeta_project_domain_id=UNKNOWN_ID,
scoped=False)
self._assert_names_allowed(False, user_domain_id='foo',
sysmeta_project_domain_id='foo',
scoped=False)
def test_is_name_allowed_in_acl_with_token_scoped_to_other_tenant(self):
# user and scoped tenant in default domain
self._assert_names_allowed(True, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=None,
scoped='other')
self._assert_names_allowed(True, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=self.default_id,
scoped='other')
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=UNKNOWN_ID,
scoped='other')
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id='foo',
scoped='other')
# user in default domain, but scoped tenant in non-default domain
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id=None,
scoped='other')
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id=self.default_id,
scoped='other')
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id=UNKNOWN_ID,
scoped='other')
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id='foo',
scoped='other')
# user in non-default domain, scoped tenant in default domain
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=None,
scoped='other')
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=self.default_id,
scoped='other')
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=UNKNOWN_ID,
scoped='other')
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id='foo',
scoped='other')
class TestIsNameAllowedInACLWithConfiguredDomain(TestIsNameAllowedInACL):
def setUp(self):
super(TestIsNameAllowedInACLWithConfiguredDomain, self).setUp()
conf = {'default_domain_id': 'mydefault'}
self.test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.test_auth.logger = FakeLogger()
self.default_id = 'mydefault'
class TestSetProjectDomain(BaseTestAuthorize):
def _assert_set_project_domain(self, expected, account, req_project_id,
req_project_domain_id,
sysmeta_project_domain_id,
warning=False):
hdr = 'X-Account-Sysmeta-Project-Domain-Id'
# set up fake account info in req env
status = 0 if sysmeta_project_domain_id is None else 200
sysmeta = {}
if sysmeta_project_domain_id:
sysmeta['project-domain-id'] = sysmeta_project_domain_id
info = {'status': status, 'sysmeta': sysmeta}
_, info_key = _get_cache_key(account, None)
env = {info_key: info}
# create fake env identity
env_id = self._get_env_id(tenant_id=req_project_id,
project_domain_id=req_project_domain_id)
# reset fake logger
self.test_auth.logger = FakeLogger()
num_warnings = 0
# check account requests
path = '/v1/%s' % account
for method in ['PUT', 'POST']:
req = Request.blank(path, environ=env)
req.method = method
path_parts = req.split_path(1, 4, True)
self.test_auth._set_project_domain_id(req, path_parts, env_id)
if warning:
num_warnings += 1
warnings = self.test_auth.logger.get_lines_for_level('warning')
self.assertEqual(len(warnings), num_warnings)
self.assertTrue(warnings[-1].startswith('Inconsistent proj'))
if expected is not None:
self.assertTrue(hdr in req.headers)
self.assertEqual(req.headers[hdr], expected)
else:
self.assertFalse(hdr in req.headers, req.headers)
for method in ['GET', 'HEAD', 'DELETE', 'OPTIONS']:
req = Request.blank(path, environ=env)
req.method = method
self.test_auth._set_project_domain_id(req, path_parts, env_id)
self.assertFalse(hdr in req.headers)
# check container requests
path = '/v1/%s/c' % account
for method in ['PUT']:
req = Request.blank(path, environ=env)
req.method = method
path_parts = req.split_path(1, 4, True)
self.test_auth._set_project_domain_id(req, path_parts, env_id)
if warning:
num_warnings += 1
warnings = self.test_auth.logger.get_lines_for_level('warning')
self.assertEqual(len(warnings), num_warnings)
self.assertTrue(warnings[-1].startswith('Inconsistent proj'))
if expected is not None:
self.assertTrue(hdr in req.headers)
self.assertEqual(req.headers[hdr], expected)
else:
self.assertFalse(hdr in req.headers)
for method in ['POST', 'GET', 'HEAD', 'DELETE', 'OPTIONS']:
req = Request.blank(path, environ=env)
req.method = method
self.test_auth._set_project_domain_id(req, path_parts, env_id)
self.assertFalse(hdr in req.headers)
# never set for object requests
path = '/v1/%s/c/o' % account
for method in ['PUT', 'COPY', 'POST', 'GET', 'HEAD', 'DELETE',
'OPTIONS']:
req = Request.blank(path, environ=env)
req.method = method
path_parts = req.split_path(1, 4, True)
self.test_auth._set_project_domain_id(req, path_parts, env_id)
self.assertFalse(hdr in req.headers)
def test_set_project_domain_id_new_account(self):
# scoped token with project domain info
self._assert_set_project_domain('test_id',
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id='test_id',
sysmeta_project_domain_id=None)
# scoped v2 token without project domain id
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id=None,
sysmeta_project_domain_id=None)
# unscoped v2 token without project domain id
self._assert_set_project_domain(UNKNOWN_ID,
account='AUTH_1234',
req_project_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id=None)
# token scoped on another project
self._assert_set_project_domain(UNKNOWN_ID,
account='AUTH_1234',
req_project_id='4321',
req_project_domain_id='default',
sysmeta_project_domain_id=None)
def test_set_project_domain_id_existing_v2_account(self):
# project domain id provided in scoped request token,
# update empty value
self._assert_set_project_domain('default',
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id='default',
sysmeta_project_domain_id='')
# inconsistent project domain id provided in scoped request token,
# leave known value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id='unexpected_id',
sysmeta_project_domain_id='',
warning=True)
# project domain id not provided, scoped request token,
# no change to empty value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id=None,
sysmeta_project_domain_id='')
# unscoped request token, no change to empty value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id='')
# token scoped on another project,
# update empty value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='4321',
req_project_domain_id=None,
sysmeta_project_domain_id='')
def test_set_project_domain_id_existing_account_unknown_domain(self):
# project domain id provided in scoped request token,
# set known value
self._assert_set_project_domain('test_id',
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id='test_id',
sysmeta_project_domain_id=UNKNOWN_ID)
# project domain id not provided, scoped request token,
# set empty value
self._assert_set_project_domain('',
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id=None,
sysmeta_project_domain_id=UNKNOWN_ID)
# project domain id not provided, unscoped request token,
# leave unknown value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id=UNKNOWN_ID)
# token scoped on another project, leave unknown value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='4321',
req_project_domain_id='default',
sysmeta_project_domain_id=UNKNOWN_ID)
def test_set_project_domain_id_existing_known_domain(self):
# project domain id provided in scoped request token,
# leave known value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id='test_id',
sysmeta_project_domain_id='test_id')
# inconsistent project domain id provided in scoped request token,
# leave known value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id='unexpected_id',
sysmeta_project_domain_id='test_id',
warning=True)
# project domain id not provided, scoped request token,
# leave known value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id=None,
sysmeta_project_domain_id='test_id')
# project domain id not provided, unscoped request token,
# leave known value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id='test_id')
# project domain id not provided, token scoped on another project,
# leave known value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='4321',
req_project_domain_id='default',
sysmeta_project_domain_id='test_id')
if __name__ == '__main__':
unittest.main()

View File

@ -663,6 +663,44 @@ class TestTempURL(unittest.TestCase):
self.assertEquals(
self.app.request.headers['x-remove-this-except-this'], 'value2')
def test_allow_trumps_incoming_header_conflict(self):
self.tempurl = tempurl.filter_factory({
'incoming_remove_headers': 'x-conflict-header',
'incoming_allow_headers': 'x-conflict-header'})(self.auth)
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = 'abc'
hmac_body = '%s\n%s\n%s' % (method, expires, path)
sig = hmac.new(key, hmac_body, sha1).hexdigest()
req = self._make_request(
path, keys=[key],
headers={'x-conflict-header': 'value'},
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEquals(resp.status_int, 404)
self.assertTrue('x-conflict-header' in self.app.request.headers)
def test_allow_trumps_incoming_header_startswith_conflict(self):
self.tempurl = tempurl.filter_factory({
'incoming_remove_headers': 'x-conflict-header-*',
'incoming_allow_headers': 'x-conflict-header-*'})(self.auth)
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = 'abc'
hmac_body = '%s\n%s\n%s' % (method, expires, path)
sig = hmac.new(key, hmac_body, sha1).hexdigest()
req = self._make_request(
path, keys=[key],
headers={'x-conflict-header-test': 'value'},
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEquals(resp.status_int, 404)
self.assertTrue('x-conflict-header-test' in self.app.request.headers)
def test_removed_outgoing_header(self):
self.tempurl = tempurl.filter_factory({
'outgoing_remove_headers': 'x-test-header-one-a'})(self.auth)
@ -701,6 +739,50 @@ class TestTempURL(unittest.TestCase):
self.assertTrue('x-test-header-two-a' not in resp.headers)
self.assertEquals(resp.headers['x-test-header-two-b'], 'value3')
def test_allow_trumps_outgoing_header_conflict(self):
self.tempurl = tempurl.filter_factory({
'outgoing_remove_headers': 'x-conflict-header',
'outgoing_allow_headers': 'x-conflict-header'})(self.auth)
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = 'abc'
hmac_body = '%s\n%s\n%s' % (method, expires, path)
sig = hmac.new(key, hmac_body, sha1).hexdigest()
req = self._make_request(
path, keys=[key],
headers={},
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', {
'X-Conflict-Header': 'value'}, '123')]))
resp = req.get_response(self.tempurl)
self.assertEquals(resp.status_int, 200)
self.assertTrue('x-conflict-header' in resp.headers)
self.assertEqual(resp.headers['x-conflict-header'], 'value')
def test_allow_trumps_outgoing_header_startswith_conflict(self):
self.tempurl = tempurl.filter_factory({
'outgoing_remove_headers': 'x-conflict-header-*',
'outgoing_allow_headers': 'x-conflict-header-*'})(self.auth)
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = 'abc'
hmac_body = '%s\n%s\n%s' % (method, expires, path)
sig = hmac.new(key, hmac_body, sha1).hexdigest()
req = self._make_request(
path, keys=[key],
headers={},
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', {
'X-Conflict-Header-Test': 'value'}, '123')]))
resp = req.get_response(self.tempurl)
self.assertEquals(resp.status_int, 200)
self.assertTrue('x-conflict-header-test' in resp.headers)
self.assertEqual(resp.headers['x-conflict-header-test'], 'value')
def test_get_account(self):
self.assertEquals(self.tempurl._get_account({
'REQUEST_METHOD': 'HEAD', 'PATH_INFO': '/v1/a/c/o'}), 'a')

View File

@ -23,7 +23,7 @@ from test.unit import MockTrue
from swift.common.swob import HTTPBadRequest, Request, HTTPException
from swift.common.http import HTTP_REQUEST_ENTITY_TOO_LARGE, \
HTTP_BAD_REQUEST, HTTP_LENGTH_REQUIRED
HTTP_BAD_REQUEST, HTTP_LENGTH_REQUIRED, HTTP_NOT_IMPLEMENTED
from swift.common import constraints, utils
@ -125,20 +125,68 @@ class TestConstraints(unittest.TestCase):
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name'), None)
headers = {'Content-Length': str(constraints.MAX_FILE_SIZE + 1),
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name').status_int,
HTTP_REQUEST_ENTITY_TOO_LARGE)
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name'), None)
headers = {'Transfer-Encoding': 'gzip',
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name').status_int,
HTTP_BAD_REQUEST)
headers = {'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name').status_int,
HTTP_LENGTH_REQUIRED)
headers = {'Content-Length': 'abc',
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name').status_int,
HTTP_BAD_REQUEST)
headers = {'Transfer-Encoding': 'gzip,chunked',
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name').status_int,
HTTP_NOT_IMPLEMENTED)
def test_check_object_creation_copy(self):
headers = {'Content-Length': '0',
'X-Copy-From': 'c/o2',
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name'), None)
headers = {'Content-Length': '1',
'X-Copy-From': 'c/o2',
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name').status_int,
HTTP_BAD_REQUEST)
headers = {'Transfer-Encoding': 'chunked',
'X-Copy-From': 'c/o2',
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name'), None)
# a content-length header is always required
headers = {'X-Copy-From': 'c/o2',
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name').status_int,
HTTP_LENGTH_REQUIRED)
def test_check_object_creation_name_length(self):
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': 'text/plain'}
@ -168,6 +216,158 @@ class TestConstraints(unittest.TestCase):
self.assertEquals(resp.status_int, HTTP_BAD_REQUEST)
self.assert_('Content-Type' in resp.body)
def test_check_object_creation_bad_delete_headers(self):
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': 'text/plain',
'X-Delete-After': 'abc'}
resp = constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name')
self.assertEquals(resp.status_int, HTTP_BAD_REQUEST)
self.assert_('Non-integer X-Delete-After' in resp.body)
t = str(int(time.time() - 60))
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': 'text/plain',
'X-Delete-At': t}
resp = constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name')
self.assertEquals(resp.status_int, HTTP_BAD_REQUEST)
self.assert_('X-Delete-At in past' in resp.body)
def test_check_delete_headers(self):
# X-Delete-After
headers = {'X-Delete-After': '60'}
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
self.assertTrue(isinstance(resp, Request))
self.assertTrue('x-delete-at' in resp.headers)
headers = {'X-Delete-After': 'abc'}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEquals(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('Non-integer X-Delete-After' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
headers = {'X-Delete-After': '60.1'}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEquals(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('Non-integer X-Delete-After' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
headers = {'X-Delete-After': '-1'}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEquals(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('X-Delete-After in past' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
# X-Delete-At
t = str(int(time.time() + 100))
headers = {'X-Delete-At': t}
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
self.assertTrue(isinstance(resp, Request))
self.assertTrue('x-delete-at' in resp.headers)
self.assertEquals(resp.headers.get('X-Delete-At'), t)
headers = {'X-Delete-At': 'abc'}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEquals(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('Non-integer X-Delete-At' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
t = str(int(time.time() + 100)) + '.1'
headers = {'X-Delete-At': t}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEquals(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('Non-integer X-Delete-At' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
t = str(int(time.time()))
headers = {'X-Delete-At': t}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEquals(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('X-Delete-At in past' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
t = str(int(time.time() - 1))
headers = {'X-Delete-At': t}
try:
resp = constraints.check_delete_headers(
Request.blank('/', headers=headers))
except HTTPException as e:
self.assertEquals(e.status_int, HTTP_BAD_REQUEST)
self.assertTrue('X-Delete-At in past' in e.body)
else:
self.fail("Should have failed with HTTPBadRequest")
def test_check_delete_headers_sets_delete_at(self):
t = time.time() + 1000
# check delete-at is passed through
headers = {'Content-Length': '0',
'Content-Type': 'text/plain',
'X-Delete-At': str(int(t))}
req = Request.blank('/', headers=headers)
constraints.check_delete_headers(req)
self.assertTrue('X-Delete-At' in req.headers)
self.assertEqual(req.headers['X-Delete-At'], str(int(t)))
# check delete-after is converted to delete-at
headers = {'Content-Length': '0',
'Content-Type': 'text/plain',
'X-Delete-After': '42'}
req = Request.blank('/', headers=headers)
with mock.patch('time.time', lambda: t):
constraints.check_delete_headers(req)
self.assertTrue('X-Delete-At' in req.headers)
expected = str(int(t) + 42)
self.assertEqual(req.headers['X-Delete-At'], expected)
# check delete-after takes precedence over delete-at
headers = {'Content-Length': '0',
'Content-Type': 'text/plain',
'X-Delete-After': '42',
'X-Delete-At': str(int(t) + 40)}
req = Request.blank('/', headers=headers)
with mock.patch('time.time', lambda: t):
constraints.check_delete_headers(req)
self.assertTrue('X-Delete-At' in req.headers)
self.assertEqual(req.headers['X-Delete-At'], expected)
headers = {'Content-Length': '0',
'Content-Type': 'text/plain',
'X-Delete-After': '42',
'X-Delete-At': str(int(t) + 44)}
req = Request.blank('/', headers=headers)
with mock.patch('time.time', lambda: t):
constraints.check_delete_headers(req)
self.assertTrue('X-Delete-At' in req.headers)
self.assertEqual(req.headers['X-Delete-At'], expected)
def test_check_mount(self):
self.assertFalse(constraints.check_mount('', ''))
with mock.patch("swift.common.utils.ismount", MockTrue()):
@ -260,6 +460,41 @@ class TestConstraints(unittest.TestCase):
self.assertRaises(HTTPException,
constraints.check_copy_from_header, req)
def test_validate_destination(self):
req = Request.blank(
'/v/a/c/o',
headers={'destination': 'c/o2'})
src_cont, src_obj = constraints.check_destination_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'o2')
req = Request.blank(
'/v/a/c/o',
headers={'destination': 'c/subdir/o2'})
src_cont, src_obj = constraints.check_destination_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'subdir/o2')
req = Request.blank(
'/v/a/c/o',
headers={'destination': '/c/o2'})
src_cont, src_obj = constraints.check_destination_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'o2')
def test_validate_bad_destination(self):
req = Request.blank(
'/v/a/c/o',
headers={'destination': 'bad_object'})
self.assertRaises(HTTPException,
constraints.check_destination_header, req)
def test_check_account_format(self):
req = Request.blank(
'/v/a/c/o',
headers={'X-Copy-From-Account': 'account/with/slashes'})
self.assertRaises(HTTPException,
constraints.check_account_format,
req, req.headers['X-Copy-From-Account'])
class TestConstraintsConfig(unittest.TestCase):

View File

@ -446,7 +446,7 @@ class TestTimestamp(unittest.TestCase):
self.assertTrue(float(timestamp) < maximum,
'%f is not smaller than %f given %r' % (
timestamp, maximum, value))
# direct comparision of timestamp works too
# direct comparison of timestamp works too
self.assertTrue(timestamp > minimum,
'%s is not bigger than %f given %r' % (
timestamp.normal, minimum, value))
@ -859,6 +859,24 @@ class TestUtils(unittest.TestCase):
real = utils.last_modified_date_to_timestamp(last_modified)
self.assertEqual(real, ts, "failed for %s" % last_modified)
def test_last_modified_date_to_timestamp_when_system_not_UTC(self):
try:
old_tz = os.environ.get('TZ')
# Western Argentina Summer Time. Found in glibc manual; this
# timezone always has a non-zero offset from UTC, so this test is
# always meaningful.
os.environ['TZ'] = 'WART4WARST,J1/0,J365/25'
self.assertEqual(utils.last_modified_date_to_timestamp(
'1970-01-01T00:00:00.000000'),
0.0)
finally:
if old_tz is not None:
os.environ['TZ'] = old_tz
else:
os.environ.pop('TZ')
def test_backwards(self):
# Test swift.common.utils.backward

View File

@ -200,13 +200,23 @@ class TestWSGI(unittest.TestCase):
logger.info('testing')
self.assertEquals('proxy-server', log_name)
def test_get_socket_bad_values(self):
# first try with no port set
self.assertRaises(wsgi.ConfigFilePortError, wsgi.get_socket, {})
# next try with a bad port value set
self.assertRaises(wsgi.ConfigFilePortError, wsgi.get_socket,
{'bind_port': 'abc'})
self.assertRaises(wsgi.ConfigFilePortError, wsgi.get_socket,
{'bind_port': None})
def test_get_socket(self):
# stubs
conf = {}
ssl_conf = {
conf = {'bind_port': 54321}
ssl_conf = conf.copy()
ssl_conf.update({
'cert_file': '',
'key_file': '',
}
})
# mocks
class MockSocket(object):
@ -263,7 +273,7 @@ class TestWSGI(unittest.TestCase):
def test_address_in_use(self):
# stubs
conf = {}
conf = {'bind_port': 54321}
# mocks
def mock_listen(*args, **kwargs):

View File

@ -849,7 +849,7 @@ class TestReconciler(unittest.TestCase):
self.assertEqual(self.reconciler.stats['unavailable_container'], 1)
# we don't clean up anything
self.assertEqual(self.reconciler.stats['cleanup_object'], 0)
# and we definately should not pop_queue
# and we definitely should not pop_queue
self.assertFalse(deleted_container_entries)
self.assertEqual(self.reconciler.stats['retry'], 1)
@ -1001,7 +1001,7 @@ class TestReconciler(unittest.TestCase):
delete_headers.get('X-Backend-Storage-Policy-Index'), '1')
# and when we're done, we pop the entry from the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
# this mock recieved the name, it's encoded down in buffered_http
# this mock received the name, it's encoded down in buffered_http
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/%s' % obj_name)])
self.assertEqual(self.reconciler.stats['success'], 1)

View File

@ -31,6 +31,7 @@ from swift.common.storage_policy import POLICIES
from test.unit.common import test_db_replicator
from test.unit import patch_policies
from contextlib import contextmanager
class TestReplicator(unittest.TestCase):
@ -341,6 +342,62 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
remote_info['status_changed_at'],
info['status_changed_at']))
@contextmanager
def _wrap_merge_timestamps(self, broker, calls):
def fake_merge_timestamps(*args, **kwargs):
calls.append(args[0])
orig_merge_timestamps(*args, **kwargs)
orig_merge_timestamps = broker.merge_timestamps
broker.merge_timestamps = fake_merge_timestamps
try:
yield True
finally:
broker.merge_timestamps = orig_merge_timestamps
def test_sync_merge_timestamps(self):
ts = (Timestamp(t).internal for t in
itertools.count(int(time.time())))
# setup a local container
broker = self._get_broker('a', 'c', node_index=0)
put_timestamp = ts.next()
broker.initialize(put_timestamp, POLICIES.default.idx)
# setup remote container
remote_broker = self._get_broker('a', 'c', node_index=1)
remote_put_timestamp = ts.next()
remote_broker.initialize(remote_put_timestamp, POLICIES.default.idx)
# replicate, expect call to merge_timestamps on remote and local
daemon = replicator.ContainerReplicator({})
part, node = self._get_broker_part_node(remote_broker)
info = broker.get_replication_info()
local_calls = []
remote_calls = []
with self._wrap_merge_timestamps(broker, local_calls):
with self._wrap_merge_timestamps(broker, remote_calls):
success = daemon._repl_to_node(node, broker, part, info)
self.assertTrue(success)
self.assertEqual(1, len(remote_calls))
self.assertEqual(1, len(local_calls))
self.assertEqual(remote_put_timestamp,
broker.get_info()['put_timestamp'])
self.assertEqual(remote_put_timestamp,
remote_broker.get_info()['put_timestamp'])
# replicate again, no changes so expect no calls to merge_timestamps
info = broker.get_replication_info()
local_calls = []
remote_calls = []
with self._wrap_merge_timestamps(broker, local_calls):
with self._wrap_merge_timestamps(broker, remote_calls):
success = daemon._repl_to_node(node, broker, part, info)
self.assertTrue(success)
self.assertEqual(0, len(remote_calls))
self.assertEqual(0, len(local_calls))
self.assertEqual(remote_put_timestamp,
broker.get_info()['put_timestamp'])
self.assertEqual(remote_put_timestamp,
remote_broker.get_info()['put_timestamp'])
def test_sync_bogus_db_quarantines(self):
ts = (Timestamp(t).internal for t in
itertools.count(int(time.time())))
@ -851,6 +908,41 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
a, c, name = path.lstrip('/').split('/')
self.assertEqual(most_recent_items[name], timestamp)
@contextmanager
def _wrap_update_reconciler_sync(self, broker, calls):
def wrapper_function(*args, **kwargs):
calls.append(args)
orig_function(*args, **kwargs)
orig_function = broker.update_reconciler_sync
broker.update_reconciler_sync = wrapper_function
try:
yield True
finally:
broker.update_reconciler_sync = orig_function
def test_post_replicate_hook(self):
ts = (Timestamp(t).internal for t in
itertools.count(int(time.time())))
broker = self._get_broker('a', 'c', node_index=0)
broker.initialize(ts.next(), 0)
broker.put_object('foo', ts.next(), 0, 'text/plain', 'xyz', deleted=0,
storage_policy_index=0)
info = broker.get_replication_info()
self.assertEqual(1, info['max_row'])
self.assertEqual(-1, broker.get_reconciler_sync())
daemon = replicator.ContainerReplicator({})
calls = []
with self._wrap_update_reconciler_sync(broker, calls):
daemon._post_replicate_hook(broker, info, [])
self.assertEqual(1, len(calls))
# repeated call to _post_replicate_hook with no change to info
# should not call update_reconciler_sync
calls = []
with self._wrap_update_reconciler_sync(broker, calls):
daemon._post_replicate_hook(broker, info, [])
self.assertEqual(0, len(calls))
if __name__ == '__main__':
unittest.main()

View File

@ -587,7 +587,7 @@ class TestContainerController(unittest.TestCase):
self.assertEqual(resp.headers['X-Backend-Storage-Policy-Index'],
str(non_default_policy.idx))
# put again without specifiying the storage policy
# put again without specifying the storage policy
req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': ts.next(),
})

View File

@ -65,7 +65,9 @@ class TestObjectExpirer(TestCase):
'processes': 5,
'process': 1,
}
self.assertEqual((5, 1), x.get_process_values(vals))
x.get_process_values(vals)
self.assertEqual(x.processes, 5)
self.assertEqual(x.process, 1)
def test_get_process_values_from_config(self):
vals = {
@ -73,7 +75,9 @@ class TestObjectExpirer(TestCase):
'process': 1,
}
x = expirer.ObjectExpirer(vals)
self.assertEqual((5, 1), x.get_process_values({}))
x.get_process_values({})
self.assertEqual(x.processes, 5)
self.assertEqual(x.process, 1)
def test_get_process_values_negative_process(self):
vals = {
@ -129,11 +133,13 @@ class TestObjectExpirer(TestCase):
super(ObjectExpirer, self).__init__(conf)
self.processes = 3
self.deleted_objects = {}
self.obj_containers_in_order = []
def delete_object(self, actual_obj, timestamp, container, obj):
if container not in self.deleted_objects:
self.deleted_objects[container] = set()
self.deleted_objects[container].add(obj)
self.obj_containers_in_order.append(container)
class InternalClient(object):
@ -172,6 +178,7 @@ class TestObjectExpirer(TestCase):
self.assertEqual(containers[ukey].pop(),
deleted_objects[ukey].pop().decode('utf8'))
self.assertEqual(containers, deleted_objects)
self.assertEqual(len(set(x.obj_containers_in_order[:4])), 4)
def test_delete_object(self):
class InternalClient(object):
@ -451,18 +458,19 @@ class TestObjectExpirer(TestCase):
fake_swift = InternalClient(
[{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % int(time() - 86400)}])
[{'name': '%d-acc/c/actual-obj' % int(time() - 86400)}])
x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
x.delete_actual_object = lambda o, t: None
x.pop_queue = lambda c, o: None
self.assertEqual(x.report_objects, 0)
x.run_once()
self.assertEqual(x.report_objects, 1)
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 1 objects expired',), {})])
with mock.patch('swift.obj.expirer.MAX_OBJECTS_TO_CACHE', 0):
x.run_once()
self.assertEqual(x.report_objects, 1)
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 1 objects expired',), {})])
def test_delete_actual_object_does_not_get_unicode(self):
class InternalClient(object):

View File

@ -554,6 +554,32 @@ class TestFuncs(unittest.TestCase):
self.assertEqual(base.have_quorum([404, 404], 2), True)
self.assertEqual(base.have_quorum([201, 404, 201, 201], 4), True)
def test_best_response_overrides(self):
base = Controller(self.app)
responses = [
(302, 'Found', '', 'The resource has moved temporarily.'),
(100, 'Continue', '', ''),
(404, 'Not Found', '', 'Custom body'),
]
server_type = "Base DELETE"
req = Request.blank('/v1/a/c/o', method='DELETE')
statuses, reasons, headers, bodies = zip(*responses)
# First test that you can't make a quorum with only overridden
# responses
overrides = {302: 204, 100: 204}
resp = base.best_response(req, statuses, reasons, bodies, server_type,
headers=headers, overrides=overrides)
self.assertEqual(resp.status, '503 Internal Server Error')
# next make a 404 quorum and make sure the last delete (real) 404
# status is the one returned.
overrides = {100: 404}
resp = base.best_response(req, statuses, reasons, bodies, server_type,
headers=headers, overrides=overrides)
self.assertEqual(resp.status, '404 Not Found')
self.assertEqual(resp.body, 'Custom body')
def test_range_fast_forward(self):
req = Request.blank('/')
handler = GetOrHeadHandler(None, req, None, None, None, None, {})

View File

@ -237,6 +237,31 @@ class TestObjController(unittest.TestCase):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 204)
def test_DELETE_half_not_found_statuses(self):
self.obj_ring.set_replicas(4)
req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE')
with set_http_connect(404, 204, 404, 204):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 204)
def test_DELETE_half_not_found_headers_and_body(self):
# Transformed responses have bogus bodies and headers, so make sure we
# send the client headers and body from a real node's response.
self.obj_ring.set_replicas(4)
status_codes = (404, 404, 204, 204)
bodies = ('not found', 'not found', '', '')
headers = [{}, {}, {'Pick-Me': 'yes'}, {'Pick-Me': 'yes'}]
req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE')
with set_http_connect(*status_codes, body_iter=bodies,
headers=headers):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('Pick-Me'), 'yes')
self.assertEquals(resp.body, '')
def test_DELETE_not_found(self):
req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE')
with set_http_connect(404, 404, 204):
@ -260,6 +285,152 @@ class TestObjController(unittest.TestCase):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 202)
def test_POST_delete_at(self):
t = str(int(time.time() + 100))
req = swob.Request.blank('/v1/a/c/o', method='POST',
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
post_headers = []
def capture_headers(ip, port, device, part, method, path, headers,
**kwargs):
if method == 'POST':
post_headers.append(headers)
x_newest_responses = [200] * self.obj_ring.replicas + \
[404] * self.obj_ring.max_more_nodes
post_resp = [200] * self.obj_ring.replicas
codes = x_newest_responses + post_resp
with set_http_connect(*codes, give_connect=capture_headers):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
for given_headers in post_headers:
self.assertEquals(given_headers.get('X-Delete-At'), t)
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
def test_POST_non_int_delete_after(self):
t = str(int(time.time() + 100)) + '.1'
req = swob.Request.blank('/v1/a/c/o', method='POST',
headers={'Content-Type': 'foo/bar',
'X-Delete-After': t})
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('Non-integer X-Delete-After', resp.body)
def test_POST_negative_delete_after(self):
req = swob.Request.blank('/v1/a/c/o', method='POST',
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '-60'})
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('X-Delete-After in past', resp.body)
def test_POST_delete_at_non_integer(self):
t = str(int(time.time() + 100)) + '.1'
req = swob.Request.blank('/v1/a/c/o', method='POST',
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('Non-integer X-Delete-At', resp.body)
def test_POST_delete_at_in_past(self):
t = str(int(time.time() - 100))
req = swob.Request.blank('/v1/a/c/o', method='POST',
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('X-Delete-At in past', resp.body)
def test_PUT_converts_delete_after_to_delete_at(self):
req = swob.Request.blank('/v1/a/c/o', method='PUT', body='',
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '60'})
put_headers = []
def capture_headers(ip, port, device, part, method, path, headers,
**kwargs):
if method == 'PUT':
put_headers.append(headers)
codes = [201] * self.obj_ring.replicas
t = time.time()
with set_http_connect(*codes, give_connect=capture_headers):
with mock.patch('time.time', lambda: t):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
expected_delete_at = str(int(t) + 60)
for given_headers in put_headers:
self.assertEquals(given_headers.get('X-Delete-At'),
expected_delete_at)
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
def test_PUT_non_int_delete_after(self):
t = str(int(time.time() + 100)) + '.1'
req = swob.Request.blank('/v1/a/c/o', method='PUT', body='',
headers={'Content-Type': 'foo/bar',
'X-Delete-After': t})
with set_http_connect():
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('Non-integer X-Delete-After', resp.body)
def test_PUT_negative_delete_after(self):
req = swob.Request.blank('/v1/a/c/o', method='PUT', body='',
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '-60'})
with set_http_connect():
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('X-Delete-After in past', resp.body)
def test_PUT_delete_at(self):
t = str(int(time.time() + 100))
req = swob.Request.blank('/v1/a/c/o', method='PUT', body='',
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
put_headers = []
def capture_headers(ip, port, device, part, method, path, headers,
**kwargs):
if method == 'PUT':
put_headers.append(headers)
codes = [201] * self.obj_ring.replicas
with set_http_connect(*codes, give_connect=capture_headers):
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
for given_headers in put_headers:
self.assertEquals(given_headers.get('X-Delete-At'), t)
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
def test_PUT_delete_at_non_integer(self):
t = str(int(time.time() - 100)) + '.1'
req = swob.Request.blank('/v1/a/c/o', method='PUT', body='',
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
with set_http_connect():
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('Non-integer X-Delete-At', resp.body)
def test_PUT_delete_at_in_past(self):
t = str(int(time.time() - 100))
req = swob.Request.blank('/v1/a/c/o', method='PUT', body='',
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
with set_http_connect():
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual('X-Delete-At in past', resp.body)
def test_container_sync_put_x_timestamp_not_found(self):
test_indexes = [None] + [int(p) for p in POLICIES]
for policy_index in test_indexes:

View File

@ -183,6 +183,20 @@ def do_setup(the_object_server):
'x-trans-id': 'test'})
resp = conn.getresponse()
assert(resp.status == 201)
# Create another account
# used for account-to-account tests
ts = normalize_timestamp(time.time())
partition, nodes = prosrv.account_ring.get_nodes('a1')
for node in nodes:
conn = swift.proxy.controllers.obj.http_connect(node['ip'],
node['port'],
node['device'],
partition, 'PUT',
'/a1',
{'X-Timestamp': ts,
'x-trans-id': 'test'})
resp = conn.getresponse()
assert(resp.status == 201)
# Create containers, 1 per test policy
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
@ -192,6 +206,18 @@ def do_setup(the_object_server):
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
assert headers[:len(exp)] == exp, "Expected '%s', encountered '%s'" % (
exp, headers[:len(exp)])
# Create container in other account
# used for account-to-account tests
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a1/c1 HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
assert headers[:len(exp)] == exp, "Expected '%s', encountered '%s'" % (
exp, headers[:len(exp)])
@ -295,6 +321,17 @@ def set_http_connect(*args, **kwargs):
return new_connect
def _make_callback_func(calls):
def callback(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
context = {}
context['method'] = method
context['path'] = path
context['headers'] = headers or {}
calls.append(context)
return callback
# tests
class TestController(unittest.TestCase):
@ -1534,7 +1571,7 @@ class TestObjectController(unittest.TestCase):
# HEAD HEAD GET GET HEAD GET GET GET PUT PUT
# PUT DEL DEL DEL
set_http_connect(200, 200, 200, 200, 200, 200, 200, 200, 201, 201,
201, 200, 200, 200,
201, 204, 204, 204,
give_connect=test_connect,
body_iter=body_iter,
headers={'x-versions-location': 'foo'})
@ -1546,6 +1583,63 @@ class TestObjectController(unittest.TestCase):
controller.DELETE(req)
self.assertEquals(test_errors, [])
@patch_policies([
StoragePolicy.from_conf(
REPL_POLICY, {'idx': 0, 'name': 'zero', 'is_default': True,
'object_ring': FakeRing()}),
StoragePolicy.from_conf(
REPL_POLICY, {'idx': 1, 'name': 'one', 'is_default': False,
'object_ring': FakeRing()})
])
def test_DELETE_on_expired_versioned_object(self):
methods = set()
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
methods.add((method, path))
def fake_container_info(account, container, req):
return {'status': 200, 'sync_key': None,
'meta': {}, 'cors': {'allow_origin': None,
'expose_headers': None,
'max_age': None},
'sysmeta': {}, 'read_acl': None, 'object_count': None,
'write_acl': None, 'versions': 'foo',
'partition': 1, 'bytes': None, 'storage_policy': '1',
'nodes': [{'zone': 0, 'ip': '10.0.0.0', 'region': 0,
'id': 0, 'device': 'sda', 'port': 1000},
{'zone': 1, 'ip': '10.0.0.1', 'region': 1,
'id': 1, 'device': 'sdb', 'port': 1001},
{'zone': 2, 'ip': '10.0.0.2', 'region': 0,
'id': 2, 'device': 'sdc', 'port': 1002}]}
def fake_list_iter(container, prefix, env):
object_list = [{'name': '1'}, {'name': '2'}, {'name': '3'}]
for obj in object_list:
yield obj
with save_globals():
controller = proxy_server.ObjectController(self.app,
'a', 'c', 'o')
controller.container_info = fake_container_info
controller._listing_iter = fake_list_iter
set_http_connect(404, 404, 404, # get for the previous version
200, 200, 200, # get for the pre-previous
201, 201, 201, # put move the pre-previous
204, 204, 204, # delete for the pre-previous
give_connect=test_connect)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
self.app.memcache.store = {}
self.app.update_request(req)
controller.DELETE(req)
exp_methods = [('GET', '/a/foo/3'),
('GET', '/a/foo/2'),
('PUT', '/a/c/o'),
('DELETE', '/a/foo/2')]
self.assertEquals(set(exp_methods), (methods))
def test_PUT_auto_content_type(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
@ -1611,6 +1705,7 @@ class TestObjectController(unittest.TestCase):
test_status_map((200, 200, 201, 201, 500), 201)
test_status_map((200, 200, 204, 404, 404), 404)
test_status_map((200, 200, 204, 500, 404), 503)
test_status_map((200, 200, 202, 202, 204), 204)
def test_PUT_connect_exceptions(self):
with save_globals():
@ -1888,9 +1983,9 @@ class TestObjectController(unittest.TestCase):
test_status_map((200, 200, 204, 204, 204), 204)
test_status_map((200, 200, 204, 204, 500), 204)
test_status_map((200, 200, 204, 404, 404), 404)
test_status_map((200, 200, 204, 500, 404), 503)
test_status_map((200, 204, 500, 500, 404), 503)
test_status_map((200, 200, 404, 404, 404), 404)
test_status_map((200, 200, 404, 404, 500), 404)
test_status_map((200, 200, 400, 400, 400), 400)
def test_HEAD(self):
with save_globals():
@ -2640,6 +2735,42 @@ class TestObjectController(unittest.TestCase):
(200, 200, 200, 204, 204, 204), 503,
raise_exc=True)
def test_PUT_error_limiting(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
controller.app.sort_nodes = lambda l: l
object_ring = controller.app.get_object_ring(None)
# acc con obj obj obj
self.assert_status_map(controller.PUT, (200, 200, 503, 200, 200),
200)
# 2, not 1, because assert_status_map() calls the method twice
self.assertEquals(object_ring.devs[0].get('errors', 0), 2)
self.assertEquals(object_ring.devs[1].get('errors', 0), 0)
self.assertEquals(object_ring.devs[2].get('errors', 0), 0)
self.assert_('last_error' in object_ring.devs[0])
self.assert_('last_error' not in object_ring.devs[1])
self.assert_('last_error' not in object_ring.devs[2])
def test_PUT_error_limiting_last_node(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
controller.app.sort_nodes = lambda l: l
object_ring = controller.app.get_object_ring(None)
# acc con obj obj obj
self.assert_status_map(controller.PUT, (200, 200, 200, 200, 503),
200)
# 2, not 1, because assert_status_map() calls the method twice
self.assertEquals(object_ring.devs[0].get('errors', 0), 0)
self.assertEquals(object_ring.devs[1].get('errors', 0), 0)
self.assertEquals(object_ring.devs[2].get('errors', 0), 2)
self.assert_('last_error' not in object_ring.devs[0])
self.assert_('last_error' not in object_ring.devs[1])
self.assert_('last_error' in object_ring.devs[2])
def test_acc_or_con_missing_returns_404(self):
with save_globals():
self.app.memcache = FakeMemcacheReturnsNone()
@ -2904,6 +3035,19 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
def test_basic_put_with_x_copy_from_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acc1 con1 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
self.assertEquals(resp.headers['x-copied-from-account'], 'a')
def test_basic_put_with_x_copy_from_across_container(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
@ -2915,6 +3059,19 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c2/o')
def test_basic_put_with_x_copy_from_across_container_and_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c2/o',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acc1 con1 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c2/o')
self.assertEquals(resp.headers['x-copied-from-account'], 'a')
def test_copy_non_zero_content_length(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5',
@ -2925,6 +3082,17 @@ class TestObjectController(unittest.TestCase):
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 400)
def test_copy_non_zero_content_length_with_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5',
'X-Copy-From': 'c/o',
'X-Copy-From-Account': 'a'})
status_list = (200, 200)
# acct cont
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 400)
def test_copy_with_slashes_in_x_copy_from(self):
# extra source path parsing
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
@ -2937,6 +3105,20 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
def test_copy_with_slashes_in_x_copy_from_and_account(self):
# extra source path parsing
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o/o2',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acc1 con1 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
self.assertEquals(resp.headers['x-copied-from-account'], 'a')
def test_copy_with_spaces_in_x_copy_from(self):
# space in soure path
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
@ -2949,6 +3131,20 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o%20o2')
def test_copy_with_spaces_in_x_copy_from_and_account(self):
# space in soure path
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o%20o2',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acc1 con1 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o%20o2')
self.assertEquals(resp.headers['x-copied-from-account'], 'a')
def test_copy_with_leading_slash_in_x_copy_from(self):
# repeat tests with leading /
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
@ -2961,6 +3157,20 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
def test_copy_with_leading_slash_in_x_copy_from_and_account(self):
# repeat tests with leading /
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acc1 con1 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
self.assertEquals(resp.headers['x-copied-from-account'], 'a')
def test_copy_with_leading_slash_and_slashes_in_x_copy_from(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
@ -2972,6 +3182,19 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
def test_copy_with_leading_slash_and_slashes_in_x_copy_from_acct(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o/o2',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acc1 con1 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
self.assertEquals(resp.headers['x-copied-from-account'], 'a')
def test_copy_with_no_object_in_x_copy_from(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
@ -2987,6 +3210,22 @@ class TestObjectController(unittest.TestCase):
raise self.fail('Invalid X-Copy-From did not raise '
'client error')
def test_copy_with_no_object_in_x_copy_from_and_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c',
'X-Copy-From-Account': 'a'})
status_list = (200, 200)
# acct cont
with self.controller_context(req, *status_list) as controller:
try:
controller.PUT(req)
except HTTPException as resp:
self.assertEquals(resp.status_int // 100, 4) # client error
else:
raise self.fail('Invalid X-Copy-From did not raise '
'client error')
def test_copy_server_error_reading_source(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
@ -2997,6 +3236,17 @@ class TestObjectController(unittest.TestCase):
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 503)
def test_copy_server_error_reading_source_and_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 503, 503, 503)
# acct cont acct cont objc objc objc
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 503)
def test_copy_not_found_reading_source(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
@ -3008,6 +3258,18 @@ class TestObjectController(unittest.TestCase):
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 404)
def test_copy_not_found_reading_source_and_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
# not found
status_list = (200, 200, 200, 200, 404, 404, 404)
# acct cont acct cont objc objc objc
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 404)
def test_copy_with_some_missing_sources(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
@ -3018,6 +3280,17 @@ class TestObjectController(unittest.TestCase):
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
def test_copy_with_some_missing_sources_and_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 404, 404, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
def test_copy_with_object_metadata(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
@ -3033,6 +3306,22 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay')
self.assertEquals(resp.headers.get('x-delete-at'), '9876543210')
def test_copy_with_object_metadata_and_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Object-Meta-Ours': 'okay',
'X-Copy-From-Account': 'a'})
# test object metadata
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers.get('x-object-meta-test'), 'testing')
self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay')
self.assertEquals(resp.headers.get('x-delete-at'), '9876543210')
def test_copy_source_larger_than_max_file_size(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
@ -3070,6 +3359,19 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
def test_basic_COPY_account(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c1/o2',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
self.assertEquals(resp.headers['x-copied-from-account'], 'a')
def test_COPY_across_containers(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
@ -3092,6 +3394,19 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
def test_COPY_account_source_with_slashes_in_name(self):
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c1/o',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
self.assertEquals(resp.headers['x-copied-from-account'], 'a')
def test_COPY_destination_leading_slash(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
@ -3103,6 +3418,19 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
def test_COPY_account_destination_leading_slash(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
self.assertEquals(resp.headers['x-copied-from-account'], 'a')
def test_COPY_source_with_slashes_destination_leading_slash(self):
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
@ -3114,14 +3442,35 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
def test_COPY_account_source_with_slashes_destination_leading_slash(self):
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
self.assertEquals(resp.headers['x-copied-from-account'], 'a')
def test_COPY_no_object_in_destination(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c_o'})
status_list = [] # no requests needed
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 412)
self.assertRaises(HTTPException, controller.COPY, req)
def test_COPY_account_no_object_in_destination(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c_o',
'Destination-Account': 'a1'})
status_list = [] # no requests needed
with self.controller_context(req, *status_list) as controller:
self.assertRaises(HTTPException, controller.COPY, req)
def test_COPY_server_error_reading_source(self):
req = Request.blank('/v1/a/c/o',
@ -3133,6 +3482,17 @@ class TestObjectController(unittest.TestCase):
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 503)
def test_COPY_account_server_error_reading_source(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 503, 503, 503)
# acct cont acct cont objc objc objc
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 503)
def test_COPY_not_found_reading_source(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
@ -3143,6 +3503,17 @@ class TestObjectController(unittest.TestCase):
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 404)
def test_COPY_account_not_found_reading_source(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 404, 404, 404)
# acct cont acct cont objc objc objc
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 404)
def test_COPY_with_some_missing_sources(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
@ -3153,6 +3524,17 @@ class TestObjectController(unittest.TestCase):
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
def test_COPY_account_with_some_missing_sources(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 404, 404, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
def test_COPY_with_metadata(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
@ -3168,6 +3550,22 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay')
self.assertEquals(resp.headers.get('x-delete-at'), '9876543210')
def test_COPY_account_with_metadata(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'X-Object-Meta-Ours': 'okay',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers.get('x-object-meta-test'),
'testing')
self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay')
self.assertEquals(resp.headers.get('x-delete-at'), '9876543210')
def test_COPY_source_larger_than_max_file_size(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
@ -3190,6 +3588,29 @@ class TestObjectController(unittest.TestCase):
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 413)
def test_COPY_account_source_larger_than_max_file_size(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
class LargeResponseBody(object):
def __len__(self):
return constraints.MAX_FILE_SIZE + 1
def __getitem__(self, key):
return ''
copy_from_obj_body = LargeResponseBody()
status_list = (200, 200, 200, 200, 200)
# acct cont objc objc objc
kwargs = dict(body=copy_from_obj_body)
with self.controller_context(req, *status_list,
**kwargs) as controller:
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 413)
def test_COPY_newest(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
@ -3208,6 +3629,25 @@ class TestObjectController(unittest.TestCase):
self.assertEquals(resp.headers['x-copied-from-last-modified'],
'3')
def test_COPY_account_newest(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
req.account = 'a'
controller.object_name = 'o'
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201,
#act cont acct cont objc objc objc obj obj obj
timestamps=('1', '1', '1', '1', '3', '2', '1',
'4', '4', '4'))
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from-last-modified'],
'3')
def test_COPY_delete_at(self):
with save_globals():
given_headers = {}
@ -3233,6 +3673,32 @@ class TestObjectController(unittest.TestCase):
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
def test_COPY_account_delete_at(self):
with save_globals():
given_headers = {}
def fake_connect_put_node(nodes, part, path, headers,
logger_thread_locals):
given_headers.update(headers)
controller = proxy_server.ObjectController(self.app, 'a',
'c', 'o')
controller._connect_put_node = fake_connect_put_node
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
self.app.update_request(req)
controller.COPY(req)
self.assertEquals(given_headers.get('X-Delete-At'), '9876543210')
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
def test_chunked_put(self):
class ChunkedFile(object):
@ -4159,176 +4625,6 @@ class TestObjectController(unittest.TestCase):
finally:
time.time = orig_time
def test_POST_non_int_delete_after(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '60.1'})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status, '400 Bad Request')
self.assertTrue('Non-integer X-Delete-After' in res.body)
def test_POST_negative_delete_after(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '-60'})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status, '400 Bad Request')
self.assertTrue('X-Delete-At in past' in res.body)
def test_POST_delete_at(self):
with save_globals():
given_headers = {}
def fake_make_requests(req, ring, part, method, path, headers,
query_string=''):
given_headers.update(headers[0])
self.app.object_post_as_copy = False
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
controller.make_requests = fake_make_requests
set_http_connect(200, 200)
self.app.memcache.store = {}
t = str(int(time.time() + 100))
req = Request.blank('/v1/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
self.app.update_request(req)
controller.POST(req)
self.assertEquals(given_headers.get('X-Delete-At'), t)
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
t = str(int(time.time() + 100)) + '.1'
req = Request.blank('/v1/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
self.app.update_request(req)
resp = controller.POST(req)
self.assertEquals(resp.status_int, 400)
self.assertTrue('Non-integer X-Delete-At' in resp.body)
t = str(int(time.time() - 100))
req = Request.blank('/v1/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
self.app.update_request(req)
resp = controller.POST(req)
self.assertEquals(resp.status_int, 400)
self.assertTrue('X-Delete-At in past' in resp.body)
def test_PUT_converts_delete_after_to_delete_at(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 201, 201, 201)
self.app.memcache.store = {}
orig_time = time.time
try:
t = time.time()
time.time = lambda: t
req = Request.blank('/v1/a/c/o', {},
headers={'Content-Length': '0',
'Content-Type': 'foo/bar',
'X-Delete-After': '60'})
self.app.update_request(req)
res = controller.PUT(req)
self.assertEquals(res.status, '201 Fake')
self.assertEquals(req.headers.get('x-delete-at'),
str(int(t + 60)))
finally:
time.time = orig_time
def test_PUT_non_int_delete_after(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 201, 201, 201)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o', {},
headers={'Content-Length': '0',
'Content-Type': 'foo/bar',
'X-Delete-After': '60.1'})
self.app.update_request(req)
res = controller.PUT(req)
self.assertEquals(res.status, '400 Bad Request')
self.assertTrue('Non-integer X-Delete-After' in res.body)
def test_PUT_negative_delete_after(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 201, 201, 201)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o', {},
headers={'Content-Length': '0',
'Content-Type': 'foo/bar',
'X-Delete-After': '-60'})
self.app.update_request(req)
res = controller.PUT(req)
self.assertEquals(res.status, '400 Bad Request')
self.assertTrue('X-Delete-At in past' in res.body)
def test_PUT_delete_at(self):
with save_globals():
given_headers = {}
def fake_connect_put_node(nodes, part, path, headers,
logger_thread_locals):
given_headers.update(headers)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
controller._connect_put_node = fake_connect_put_node
set_http_connect(200, 200)
self.app.memcache.store = {}
t = str(int(time.time() + 100))
req = Request.blank('/v1/a/c/o', {},
headers={'Content-Length': '0',
'Content-Type': 'foo/bar',
'X-Delete-At': t})
self.app.update_request(req)
controller.PUT(req)
self.assertEquals(given_headers.get('X-Delete-At'), t)
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
t = str(int(time.time() + 100)) + '.1'
req = Request.blank('/v1/a/c/o', {},
headers={'Content-Length': '0',
'Content-Type': 'foo/bar',
'X-Delete-At': t})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 400)
self.assertTrue('Non-integer X-Delete-At' in resp.body)
t = str(int(time.time() - 100))
req = Request.blank('/v1/a/c/o', {},
headers={'Content-Length': '0',
'Content-Type': 'foo/bar',
'X-Delete-At': t})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 400)
self.assertTrue('X-Delete-At in past' in resp.body)
@patch_policies([
StoragePolicy.from_conf(
REPL_POLICY, {'idx': 0, 'name': 'zero', 'is_default': False,
@ -5230,6 +5526,47 @@ class TestContainerController(unittest.TestCase):
503, 201, 201), # put container success
201, missing_container=True)
def test_PUT_autocreate_account_with_sysmeta(self):
# x-account-sysmeta headers in a container PUT request should be
# transferred to the account autocreate PUT request
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
def test_status_map(statuses, expected, headers=None, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c', {}, headers=headers)
req.content_length = 0
self.app.update_request(req)
res = controller.PUT(req)
expected = str(expected)
self.assertEquals(res.status[:len(expected)], expected)
self.app.account_autocreate = True
calls = []
callback = _make_callback_func(calls)
key, value = 'X-Account-Sysmeta-Blah', 'something'
headers = {key: value}
# all goes according to plan
test_status_map(
(404, 404, 404, # account_info fails on 404
201, 201, 201, # PUT account
200, # account_info success
201, 201, 201), # put container success
201, missing_container=True,
headers=headers,
give_connect=callback)
self.assertEqual(10, len(calls))
for call in calls[3:6]:
self.assertEqual('/account', call['path'])
self.assertTrue(key in call['headers'],
'%s call, key %s missing in headers %s' %
(call['method'], key, call['headers']))
self.assertEqual(value, call['headers'][key])
def test_POST(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
@ -6067,10 +6404,12 @@ class TestAccountController(unittest.TestCase):
account_ring=FakeRing(),
container_ring=FakeRing())
def assert_status_map(self, method, statuses, expected, env_expected=None):
def assert_status_map(self, method, statuses, expected, env_expected=None,
headers=None, **kwargs):
headers = headers or {}
with save_globals():
set_http_connect(*statuses)
req = Request.blank('/v1/a', {})
set_http_connect(*statuses, **kwargs)
req = Request.blank('/v1/a', {}, headers=headers)
self.app.update_request(req)
res = method(req)
self.assertEquals(res.status_int, expected)
@ -6229,6 +6568,33 @@ class TestAccountController(unittest.TestCase):
controller.POST,
(404, 404, 404, 403, 403, 403, 400, 400, 400), 400)
def test_POST_autocreate_with_sysmeta(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.app.memcache = FakeMemcacheReturnsNone()
# first test with autocreate being False
self.assertFalse(self.app.account_autocreate)
self.assert_status_map(controller.POST,
(404, 404, 404), 404)
# next turn it on and test account being created than updated
controller.app.account_autocreate = True
calls = []
callback = _make_callback_func(calls)
key, value = 'X-Account-Sysmeta-Blah', 'something'
headers = {key: value}
self.assert_status_map(
controller.POST,
(404, 404, 404, 202, 202, 202, 201, 201, 201), 201,
# POST , autocreate PUT, POST again
headers=headers,
give_connect=callback)
self.assertEqual(9, len(calls))
for call in calls:
self.assertTrue(key in call['headers'],
'%s call, key %s missing in headers %s' %
(call['method'], key, call['headers']))
self.assertEqual(value, call['headers'][key])
def test_connection_refused(self):
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs:
@ -6310,6 +6676,12 @@ class TestAccountController(unittest.TestCase):
self.assert_status_map(controller.PUT, (201, -1, -1), 503)
self.assert_status_map(controller.PUT, (503, 503, -1), 503)
def test_PUT_status(self):
with save_globals():
self.app.allow_account_management = True
controller = proxy_server.AccountController(self.app, 'account')
self.assert_status_map(controller.PUT, (201, 201, 202), 202)
def test_PUT_metadata(self):
self.metadata_helper('PUT')

View File

@ -51,6 +51,7 @@ class FakeServerConnection(WSGIContext):
environ = {'REQUEST_METHOD': self.method}
req = Request.blank(self.path, environ, headers=self.req_headers,
body=self.data)
self.data = ''
self.resp = self._app_call(req.environ)
self.resp_iter = iter(self.resp)
if self._response_headers is None:
@ -66,7 +67,7 @@ class FakeServerConnection(WSGIContext):
return ContinueResponse()
def send(self, data):
self.data = data
self.data += data
def __call__(self, ipaddr, port, device, partition, method, path,
headers=None, query_string=None):