Fix big job binary objects in mysql

Fixes bug: #1302579

Change-Id: I3acc13864c41127c341875e350b4611ff7035e29
This commit is contained in:
Sergey Reshetnyak 2014-04-04 23:54:10 +04:00
parent 2984d3c8da
commit 3e317ed73d
6 changed files with 43 additions and 5 deletions

View File

@ -115,14 +115,28 @@ To install into a virtual environment
check each option in sahara-venv/etc/sahara.conf, and make necessary changes
5. Create database schema:
5. If you use Sahara with MySQL database, then for storing big Job Binaries
in Sahara Internal Database you must configure size of max allowed packet.
Edit ``my.cnf`` and change parameter:
.. sourcecode:: ini
...
[mysqld]
...
max_allowed_packet = 256M
..
and restart mysql server.
6. Create database schema:
.. sourcecode:: console
$ sahara-venv/bin/python sahara-venv/bin/sahara-db-manage --config-file sahara-venv/etc/sahara.conf upgrade head
..
6. To start Sahara call:
7. To start Sahara call:
.. sourcecode:: console

View File

@ -33,3 +33,7 @@ class Base(object):
def __init__(self):
self.db = importutils.import_module(CONF.db_driver)
def is_mysql_avail():
return CONF.database.connection.startswith('mysql')

View File

@ -111,7 +111,7 @@ def upgrade():
sa.Column('tenant_id', sa.String(length=36),
nullable=True),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('data', sa.LargeBinary(), nullable=True),
sa.Column('data', st.LargeBinary(), nullable=True),
sa.Column('datasize', sa.BIGINT(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name', 'tenant_id'),

View File

@ -331,8 +331,7 @@ class JobBinaryInternal(mb.SaharaBase):
id = _id_column()
tenant_id = sa.Column(sa.String(36))
name = sa.Column(sa.String(80), nullable=False)
data = sa.orm.deferred(sa.Column(sa.LargeBinary))
data = sa.orm.deferred(sa.Column(st.LargeBinary()))
datasize = sa.Column(sa.BIGINT)

View File

@ -14,8 +14,10 @@
# limitations under the License.
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from sqlalchemy.ext import mutable
from sahara.db import base
from sahara.openstack.common import jsonutils
@ -108,3 +110,9 @@ def JsonDictType():
def JsonListType():
"""Returns an SQLAlchemy Column Type suitable to store a Json array."""
return MutableList.as_mutable(JsonEncoded)
def LargeBinary():
if base.is_mysql_avail():
return mysql.LONGBLOB
return sa.LargeBinary

View File

@ -37,6 +37,8 @@ postgres=# create database openstack_citest with owner openstack_citest;
"""
import os
from oslo.config import cfg
from sahara.openstack.common.db.sqlalchemy import utils as db_utils
@ -323,6 +325,17 @@ class TestMigrations(base.BaseWalkMigrationTestCase, base.CommonTestsMixIn):
self.assertColumnsExists(engine, 'instances', instances_columns)
self.assertColumnCount(engine, 'instances', instances_columns)
self._data_001(engine, data)
def _data_001(self, engine, data):
datasize = 512 * 1024 # 512kB
data = os.urandom(datasize)
t = db_utils.get_table(engine, 'job_binary_internal')
engine.execute(t.insert(), data=data, id='123', name='name')
new_data = engine.execute(t.select()).fetchone().data
self.assertEqual(data, new_data)
engine.execute(t.delete())
def _check_002(self, engine, data):
# currently, 002 is just a placeholder
self._check_001(engine, data)