Adds option to set custom data buffer dir

Implements blueprint custom-disk-buffer

When pushing an image large enough to be chunked into either a Swift
or S3 backend, the chunks are buffered to /tmp.

This change allows a user to define an alternate directory via the
configuration files.

Change-Id: I2660b6e5a76cbfd43834384b129550b66bb9afce
This commit is contained in:
Paul Bourke 2011-11-17 14:46:11 +00:00
parent 1f28b6ca7e
commit 49d67bd505
4 changed files with 50 additions and 2 deletions

View File

@ -332,6 +332,18 @@ Glance write chunks to Swift? This amount of data is written
to a temporary disk buffer during the process of chunking
the image file, and the default is 200MB
* ``swift_store_object_buffer_dir=PATH``
Optional. Default: ``the platform's default temporary directory``
Can only be specified in configuration files.
`This option is specific to the Swift storage backend.`
When sending large images to Swift, what directory should be
used to buffer the chunks? By default the platform's
temporary directory will be used.
Configuring the S3 Storage Backend
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -408,6 +420,18 @@ Can only be specified in configuration files.
If true, Glance will attempt to create the bucket ``s3_store_bucket``
if it does not exist.
* ``s3_store_object_buffer_dir=PATH``
Optional. Default: ``the platform's default temporary directory``
Can only be specified in configuration files.
`This option is specific to the S3 storage backend.`
When sending images to S3, what directory should be
used to buffer the chunks? By default the platform's
temporary directory will be used.
Configuring the RBD Storage Backend
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

View File

@ -130,6 +130,11 @@ swift_store_large_object_chunk_size = 200
# Ex. https://example.com/v1.0/ -> https://snet-example.com/v1.0/
swift_enable_snet = False
# When sending large images to Swift, the chunks will be written to a
# temporary buffer on disk. By default the platform's temporary directory
# will be used. If required, an alternative directory can be specified here.
# swift_store_object_buffer_dir = /path/to/dir
# ============ S3 Store Options =============================
# Address where the S3 authentication service lives
@ -153,6 +158,11 @@ s3_store_bucket = <lowercased 20-char aws access key>glance
# Do we create the bucket if it does not exist?
s3_store_create_bucket_on_put = False
# When sending images to S3, the data will first be written to a
# temporary buffer on disk. By default the platform's temporary directory
# will be used. If required, an alternative directory can be specified here.
# s3_store_object_buffer_dir = /path/to/dir
# ============ RBD Store Options =============================
# Ceph configuration file path

View File

@ -210,6 +210,12 @@ class Store(glance.store.base.Store):
else: # Defaults http
self.full_s3_host = 'http://' + self.s3_host
if self.options.get('s3_store_object_buffer_dir'):
self.s3_store_object_buffer_dir = self.options.get(
's3_store_object_buffer_dir')
else:
self.s3_store_object_buffer_dir = None
def _option_get(self, param):
result = self.options.get(param)
if not result:
@ -325,8 +331,9 @@ class Store(glance.store.base.Store):
msg = _("Writing request body file to temporary file "
"for %s") % loc.get_uri()
logger.debug(msg)
temp_file = tempfile.NamedTemporaryFile()
tmpdir = self.s3_store_object_buffer_dir
temp_file = tempfile.NamedTemporaryFile(dir=tmpdir)
checksum = hashlib.md5()
chunk = image_file.read(self.CHUNKSIZE)
while chunk:

View File

@ -215,6 +215,12 @@ class Store(glance.store.base.Store):
) * (1024 * 1024) # Size specified in MB in conf files
else:
self.large_object_chunk_size = DEFAULT_LARGE_OBJECT_CHUNK_SIZE
if self.options.get('swift_store_object_buffer_dir'):
self.swift_store_object_buffer_dir = (
self.options.get('swift_store_object_buffer_dir'))
else:
self.swift_store_object_buffer_dir = None
except Exception, e:
reason = _("Error in configuration options: %s") % e
logger.error(reason)
@ -359,8 +365,9 @@ class Store(glance.store.base.Store):
total_chunks = int(math.ceil(
float(image_size) / float(self.large_object_chunk_size)))
checksum = hashlib.md5()
tmp = self.swift_store_object_buffer_dir
while bytes_left > 0:
with tempfile.NamedTemporaryFile() as disk_buffer:
with tempfile.NamedTemporaryFile(dir=tmp) as disk_buffer:
chunk_size = min(self.large_object_chunk_size,
bytes_left)
logger.debug(_("Writing %(chunk_size)d bytes for "