Database layer for Artifact Repository

Adds a Data Model (Tables: Artifact, ArtifactDependecy, ArtifactTag,
ArtifactProperty, ArtifactBlob, ArtifactBlobLocation), Migrations and DB
API for SQLAlchemy. Adds wrappers for simple and registry DB drivers.

Adds appropriate unittests to DB API tests and to migration tests.

Implements-blueprint: artifact-repository

Co-Authored-By: Alexander Tivelkov <ativelkov@mirantis.com>
Co-Authored-By: Mike Fedosin <mfedosin@mirantis.com>
Co-Authored-By: Inessa Vasilevskaya <ivasilevskaya@mirantis.com>

Change-Id: Ia491a58956101a1c40f1bca95cd9efe432f13dce
This commit is contained in:
Alexander Tivelkov 2014-08-21 18:44:24 +04:00 committed by Mike Fedosin
parent 001940f129
commit b281eec8b3
12 changed files with 2661 additions and 2 deletions

View File

@ -0,0 +1,46 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from glance.common import exception
class Showlevel(object):
# None - do not show additional properties and blobs with locations;
# Basic - show all artifact fields except dependencies;
# Direct - show all artifact fields with only direct dependencies;
# Transitive - show all artifact fields with all of dependencies.
NONE = 0
BASIC = 1
DIRECT = 2
TRANSITIVE = 3
_level_map = {'none': NONE, 'basic': BASIC, 'direct': DIRECT,
'transitive': TRANSITIVE}
_inverted_level_map = {v: k for k, v in six.iteritems(_level_map)}
@staticmethod
def to_str(n):
try:
return Showlevel._inverted_level_map[n]
except KeyError:
raise exception.ArtifactUnsupportedShowLevel()
@staticmethod
def from_str(str_value):
try:
return Showlevel._level_map[str_value]
except KeyError:
raise exception.ArtifactUnsupportedShowLevel()

View File

@ -450,3 +450,50 @@ class MetadefTagNotFound(NotFound):
class InvalidVersion(Invalid):
message = _("Version is invalid: %(reason)s")
class ArtifactNotFound(NotFound):
message = _("Artifact with id=%(id)s was not found")
class ArtifactForbidden(Forbidden):
message = _("Artifact with id=%(id)s is not accessible")
class ArtifactDuplicateNameTypeVersion(Duplicate):
message = _("Artifact with the specified type, name and version"
" already exists")
class InvalidArtifactStateTransition(Invalid):
message = _("Artifact cannot change state from %(source)s to %(target)s")
class ArtifactDuplicateDirectDependency(Duplicate):
message = _("Artifact with the specified type, name and version"
" already has the direct dependency=%(dep)s")
class ArtifactDuplicateTransitiveDependency(Duplicate):
message = _("Artifact with the specified type, name and version"
" already has the transitive dependency=%(dep)s")
class ArtifactUnsupportedPropertyOperator(Invalid):
message = _("Operator %(op)s is not supported")
class ArtifactUnsupportedShowLevel(Invalid):
message = _("Show level %(shl)s is not supported in this operation")
class ArtifactPropertyValueNotFound(NotFound):
message = _("Property's %(prop)s value has not been found")
class ArtifactInvalidPropertyParameter(Invalid):
message = _("Cannot use this parameter with the operator %(op)s")
class ArtifactInvalidStateTransition(Invalid):
message = _("Artifact state cannot be changed from %(curr)s to %(to)s")

View File

@ -1,4 +1,5 @@
# Copyright 2013 Red Hat, Inc.
# Copyright 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -32,6 +33,7 @@ import functools
from oslo_log import log as logging
from glance import artifacts
from glance.registry.client.v2 import api
@ -557,3 +559,51 @@ def metadef_tag_delete_namespace_content(
@_get_client
def metadef_tag_count(client, namespace_name, session=None):
return client.metadef_tag_count(namespace_name=namespace_name)
@_get_client
def artifact_create(client, values,
type_name, type_version=None, session=None):
return client.artifact_create(values=values,
type_name=type_name,
type_version=type_version)
@_get_client
def artifact_update(client, values, artifact_id,
type_name, type_version=None, session=None):
return client.artifact_update(values=values, artifact_id=artifact_id,
type_name=type_name,
type_version=type_version)
@_get_client
def artifact_delete(client, artifact_id,
type_name, type_version=None, session=None):
return client.artifact_delete(artifact_id=artifact_id,
type_name=type_name,
type_version=type_version)
@_get_client
def artifact_get(client, artifact_id,
type_name, type_version=None, session=None):
return client.artifact_get(artifact_id=artifact_id,
type_name=type_name,
type_version=type_version)
@_get_client
def artifact_get_all(client, marker=None, limit=None, sort_key=None,
sort_dir=None, filters={},
show_level=artifacts.Showlevel.NONE, session=None):
return client.artifact_create(marker, limit, sort_key,
sort_dir, filters, show_level)
@_get_client
def artifact_publish(client, artifact_id,
type_name, type_version=None, session=None):
return client.artifact_publish(artifact_id=artifact_id,
type_name=type_name,
type_version=type_version)

View File

@ -44,7 +44,13 @@ DATA = {
'tags': {},
'locations': [],
'tasks': {},
'task_info': {}
'task_info': {},
'artifacts': {},
'artifact_properties': {},
'artifact_tags': {},
'artifact_dependencies': {},
'artifact_blobs': {},
'artifact_blob_locations': {}
}
INDEX = 0
@ -80,7 +86,8 @@ def reset():
'tags': {},
'locations': [],
'tasks': {},
'task_info': {}
'task_info': {},
'artifacts': {}
}
@ -1921,6 +1928,96 @@ def metadef_tag_count(context, namespace_name):
return count
def _artifact_format(artifact_id, **values):
dt = timeutils.utcnow()
artifact = {
'id': artifact_id,
'type_name': None,
'type_version_prefix': None,
'type_version_suffix': None,
'type_version_meta': None,
'version_prefix': None,
'version_suffix': None,
'version_meta': None,
'description': None,
'visibility': None,
'state': None,
'owner': None,
'scope': None,
'tags': [],
'properties': {},
'blobs': [],
'created_at': dt,
'updated_at': dt,
'deleted_at': None,
'deleted': False,
}
artifact.update(values)
return artifact
@log_call
def artifact_create(context, values, type_name, type_version):
global DATA
artifact_id = values.get('id', str(uuid.uuid4()))
if artifact_id in DATA['artifacts']:
raise exception.Duplicate()
if 'state' not in values:
raise exception.Invalid('state is a required attribute')
allowed_keys = set(['id',
'type_name',
'type_version',
'name',
'version',
'description',
'visibility',
'state',
'owner',
'scope'])
incorrect_keys = set(values.keys()) - allowed_keys
if incorrect_keys:
raise exception.Invalid(
'The keys %s are not valid' % str(incorrect_keys))
artifact = _artifact_format(artifact_id, **values)
DATA['artifacts'][artifact_id] = artifact
return copy.deepcopy(artifact)
def _artifact_get(context, artifact_id, type_name,
type_version=None):
try:
artifact = DATA['artifacts'][artifact_id]
if artifact['type_name'] != type_name or\
(type_version is not None and
artifact['type_version'] != type_version):
raise KeyError
except KeyError:
LOG.info(_LI('Could not find artifact %s') % artifact_id)
raise exception.NotFound()
if artifact['deleted_at']:
LOG.info(_LI('Unable to get deleted image'))
raise exception.NotFound()
return artifact
@log_call
def artifact_get(context, artifact_id,
type_name,
type_version=None, session=None):
artifact = _artifact_get(context, artifact_id, type_name,
type_version)
return copy.deepcopy(artifact)
def _format_association(namespace, resource_type, association_values):
association = {
'namespace_id': namespace['id'],

View File

@ -3,6 +3,7 @@
# Copyright 2010-2011 OpenStack Foundation
# Copyright 2012 Justin Santa Barbara
# Copyright 2013 IBM Corp.
# Copyright 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -36,8 +37,10 @@ import sqlalchemy
import sqlalchemy.orm as sa_orm
import sqlalchemy.sql as sa_sql
from glance import artifacts as ga
from glance.common import exception
from glance.common import utils
from glance.db.sqlalchemy import artifacts
from glance.db.sqlalchemy.metadef_api import namespace as metadef_namespace_api
from glance.db.sqlalchemy.metadef_api import object as metadef_object_api
from glance.db.sqlalchemy.metadef_api import property as metadef_property_api
@ -1695,3 +1698,58 @@ def metadef_tag_count(context, namespace_name, session=None):
"""Get count of tags for a namespace, raise if ns doesn't exist."""
session = session or get_session()
return metadef_tag_api.count(context, namespace_name, session)
def artifact_create(context, values, type_name,
type_version=None, session=None):
session = session or get_session()
artifact = artifacts.create(context, values, session, type_name,
type_version)
return artifact
def artifact_delete(context, artifact_id, type_name,
type_version=None, session=None):
session = session or get_session()
artifact = artifacts.delete(context, artifact_id, session, type_name,
type_version)
return artifact
def artifact_update(context, values, artifact_id, type_name,
type_version=None, session=None):
session = session or get_session()
artifact = artifacts.update(context, values, artifact_id, session,
type_name, type_version)
return artifact
def artifact_get(context, artifact_id,
type_name=None,
type_version=None,
show_level=ga.Showlevel.BASIC,
session=None):
session = session or get_session()
return artifacts.get(context, artifact_id, session, type_name,
type_version, show_level)
def artifact_publish(context,
artifact_id,
type_name,
type_version=None,
session=None):
session = session or get_session()
return artifacts.publish(context,
artifact_id,
session,
type_name,
type_version)
def artifact_get_all(context, marker=None, limit=None, sort_keys=None,
sort_dirs=None, filters=None,
show_level=ga.Showlevel.NONE, session=None):
session = session or get_session()
return artifacts.get_all(context, session, marker, limit, sort_keys,
sort_dirs, filters, show_level)

View File

@ -0,0 +1,756 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import operator
import uuid
from enum import Enum
from oslo.config import cfg
from oslo.db import exception as db_exc
from oslo_utils import timeutils
import sqlalchemy
from sqlalchemy import and_
from sqlalchemy import or_
import sqlalchemy.orm as orm
from sqlalchemy.orm import joinedload
import glance.artifacts as ga
from glance.common import exception
from glance.common import semver_db
from glance.db.sqlalchemy import models_artifacts as models
from glance import i18n
from oslo_log import log as os_logging
LOG = os_logging.getLogger(__name__)
_LW = i18n._LW
_LE = i18n._LE
CONF = cfg.CONF
class Visibility(Enum):
PRIVATE = 'private'
PUBLIC = 'public'
SHARED = 'shared'
class State(Enum):
CREATING = 'creating'
ACTIVE = 'active'
DEACTIVATED = 'deactivated'
DELETED = 'deleted'
TRANSITIONS = {
State.CREATING: [State.ACTIVE, State.DELETED],
State.ACTIVE: [State.DEACTIVATED, State.DELETED],
State.DEACTIVATED: [State.ACTIVE, State.DELETED],
State.DELETED: []
}
def create(context, values, session, type_name, type_version=None):
return _out(_create_or_update(context, values, None, session,
type_name, type_version))
def update(context, values, artifact_id, session,
type_name, type_version=None):
return _out(_create_or_update(context, values, artifact_id, session,
type_name, type_version))
def delete(context, artifact_id, session, type_name, type_version=None):
values = {'state': 'deleted'}
return _out(_create_or_update(context, values, artifact_id, session,
type_name, type_version))
def _create_or_update(context, values, artifact_id, session, type_name,
type_version=None):
values = copy.deepcopy(values)
with session.begin():
_set_version_fields(values)
_validate_values(values)
_drop_protected_attrs(models.Artifact, values)
if artifact_id:
# update existing artifact
state = values.get('state')
show_level = ga.Showlevel.BASIC
if state is not None:
if state == 'active':
show_level = ga.Showlevel.DIRECT
values['published_at'] = timeutils.utcnow()
if state == 'deleted':
values['deleted_at'] = timeutils.utcnow()
artifact = _get(context, artifact_id, session, type_name,
type_version, show_level=show_level)
_validate_transition(artifact.state,
values.get('state') or artifact.state)
else:
# create new artifact
artifact = models.Artifact()
if 'id' not in values:
artifact.id = str(uuid.uuid4())
else:
artifact.id = values['id']
if 'tags' in values:
tags = values.pop('tags')
artifact.tags = _do_tags(artifact, tags)
if 'properties' in values:
properties = values.pop('properties', {})
artifact.properties = _do_properties(artifact, properties)
if 'blobs' in values:
blobs = values.pop('blobs')
artifact.blobs = _do_blobs(artifact, blobs)
if 'dependencies' in values:
dependencies = values.pop('dependencies')
_do_dependencies(artifact, dependencies, session)
if values.get('state', None) == 'publish':
artifact.dependencies.extend(
_do_transitive_dependencies(artifact, session))
artifact.update(values)
try:
artifact.save(session=session)
except db_exc.DBDuplicateEntry:
LOG.warn(_LW("Artifact with the specified type, name and version "
"already exists"))
raise exception.ArtifactDuplicateNameTypeVersion()
return artifact
def get(context, artifact_id, session, type_name=None, type_version=None,
show_level=ga.Showlevel.BASIC):
artifact = _get(context, artifact_id, session, type_name, type_version,
show_level)
return _out(artifact, show_level)
def publish(context, artifact_id, session, type_name,
type_version=None):
"""
Because transitive dependencies are not initially created it has to be done
manually by calling this function.
It creates transitive dependencies for the given artifact_id and saves
them in DB.
:returns artifact dict with Transitive show level
"""
values = {'state': 'active'}
return _out(_create_or_update(context, values, artifact_id, session,
type_name, type_version))
def _validate_transition(source_state, target_state):
if target_state == source_state:
return
try:
source_state = State(source_state)
target_state = State(target_state)
except ValueError:
raise exception.InvalidArtifactStateTransition(source=source_state,
target=target_state)
if (source_state not in TRANSITIONS or
target_state not in TRANSITIONS[source_state]):
raise exception.InvalidArtifactStateTransition(source=source_state,
target=target_state)
def _out(artifact, show_level=ga.Showlevel.BASIC, show_text_properties=True):
"""
Transforms sqlalchemy object into dict depending on the show level.
:param artifact: sql
:param show_level: constant from Showlevel class
:param show_text_properties: for performance optimization it's possible
to disable loading of massive text properties
:return: generated dict
"""
res = artifact.to_dict(show_level=show_level,
show_text_properties=show_text_properties)
if show_level >= ga.Showlevel.DIRECT:
dependencies = artifact.dependencies
dependencies.sort(key=lambda elem: (elem.artifact_origin,
elem.name, elem.position))
res['dependencies'] = {}
if show_level == ga.Showlevel.DIRECT:
new_show_level = ga.Showlevel.BASIC
else:
new_show_level = ga.Showlevel.TRANSITIVE
for dep in dependencies:
if dep.artifact_origin == artifact.id:
# make array
for p in res['dependencies'].keys():
if p == dep.name:
# add value to array
res['dependencies'][p].append(
_out(dep.dest, new_show_level))
break
else:
# create new array
deparr = []
deparr.append(_out(dep.dest, new_show_level))
res['dependencies'][dep.name] = deparr
return res
def _get(context, artifact_id, session, type_name=None, type_version=None,
show_level=ga.Showlevel.BASIC):
values = dict(id=artifact_id)
if type_name is not None:
values['type_name'] = type_name
if type_version is not None:
values['type_version'] = type_version
_set_version_fields(values)
try:
if show_level == ga.Showlevel.NONE:
query = session.query(models.Artifact) \
.options(joinedload(models.Artifact.tags)) \
.filter_by(**values)
else:
query = session.query(models.Artifact) \
.options(joinedload(models.Artifact.properties)) \
.options(joinedload(models.Artifact.tags)) \
.options(joinedload(models.Artifact.blobs).
joinedload(models.ArtifactBlob.locations)) \
.filter_by(**values)
artifact = query.one()
except orm.exc.NoResultFound:
LOG.warn(_LW("Artifact with id=%s not found") % artifact_id)
raise exception.ArtifactNotFound(id=artifact_id)
if not _check_visibility(context, artifact):
LOG.warn(_LW("Artifact with id=%s is not accessible") % artifact_id)
raise exception.ArtifactForbidden(id=artifact_id)
return artifact
def get_all(context, session, marker=None, limit=None,
sort_keys=None, sort_dirs=None, filters=None,
show_level=ga.Showlevel.NONE):
"""List all visible artifacts"""
filters = filters or {}
artifacts = _get_all(
context, session, filters, marker,
limit, sort_keys, sort_dirs, show_level)
return map(lambda ns: _out(ns, show_level, show_text_properties=False),
artifacts)
def _get_all(context, session, filters=None, marker=None,
limit=None, sort_keys=None, sort_dirs=None,
show_level=ga.Showlevel.NONE):
"""Get all namespaces that match zero or more filters.
:param filters: dict of filter keys and values.
:param marker: namespace id after which to start page
:param limit: maximum number of namespaces to return
:param sort_keys: namespace attributes by which results should be sorted
:param sort_dirs: directions in which results should be sorted (asc, desc)
"""
filters = filters or {}
query = _do_artifacts_query(context, session, show_level)
basic_conds, tag_conds, prop_conds = _do_query_filters(filters)
if basic_conds:
for basic_condition in basic_conds:
query = query.filter(and_(*basic_condition))
if tag_conds:
for tag_condition in tag_conds:
query = query.join(models.ArtifactTag, aliased=True).filter(
and_(*tag_condition))
if prop_conds:
for prop_condition in prop_conds:
query = query.join(models.ArtifactProperty, aliased=True).filter(
and_(*prop_condition))
marker_artifact = None
if marker is not None:
marker_artifact = _get(context, marker, session, None, None)
if sort_keys is None:
sort_keys = [('created_at', None), ('id', None)]
sort_dirs = ['desc', 'desc']
else:
for key in [('created_at', None), ('id', None)]:
if key not in sort_keys:
sort_keys.append(key)
sort_dirs.append('desc')
# Note(mfedosin): Kostyl to deal with situation that sqlalchemy cannot
# work with composite keys correctly
if ('version', None) in sort_keys:
i = sort_keys.index(('version', None))
version_sort_dir = sort_dirs[i]
sort_keys[i:i + 1] = [('version_prefix', None),
('version_suffix', None),
('version_meta', None)]
sort_dirs[i:i + 1] = [version_sort_dir] * 3
query = _do_paginate_query(query=query,
limit=limit,
sort_keys=sort_keys,
marker=marker_artifact,
sort_dirs=sort_dirs)
return query.all()
def _do_paginate_query(query, sort_keys=None, sort_dirs=None,
marker=None, limit=None):
# Default the sort direction to ascending
if sort_dirs is None:
sort_dir = 'asc'
# Ensure a per-column sort direction
if sort_dirs is None:
sort_dirs = [sort_dir for _sort_key in sort_keys]
assert(len(sort_dirs) == len(sort_keys))
# Add sorting
for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs):
try:
sort_dir_func = {
'asc': sqlalchemy.asc,
'desc': sqlalchemy.desc,
}[current_sort_dir]
except KeyError:
raise ValueError(_LE("Unknown sort direction, "
"must be 'desc' or 'asc'"))
if current_sort_key[1] is None:
# sort by generic property
query = query.order_by(sort_dir_func(getattr(
models.Artifact,
current_sort_key[0])))
else:
# sort by custom property
prop_type = current_sort_key[1] + "_value"
query = query.join(models.ArtifactProperty).\
filter(
models.ArtifactProperty.name == current_sort_key[0]).\
order_by(
sort_dir_func(getattr(models.ArtifactProperty,
prop_type)))
# Add pagination
if marker is not None:
marker_values = []
for sort_key in sort_keys:
v = getattr(marker, sort_key[0])
marker_values.append(v)
# Build up an array of sort criteria as in the docstring
criteria_list = []
for i in range(len(sort_keys)):
crit_attrs = []
for j in range(i):
if sort_keys[j][1] is None:
model_attr = getattr(models.Artifact, sort_keys[j][0])
else:
model_attr = getattr(models.ArtifactProperty,
sort_keys[j][1] + "_value")
crit_attrs.append((model_attr == marker_values[j]))
if sort_keys[i][1] is None:
model_attr = getattr(models.Artifact, sort_keys[j][0])
else:
model_attr = getattr(models.ArtifactProperty,
sort_keys[j][1] + "_value")
if sort_dirs[i] == 'desc':
crit_attrs.append((model_attr < marker_values[i]))
else:
crit_attrs.append((model_attr > marker_values[i]))
criteria = and_(*crit_attrs)
criteria_list.append(criteria)
f = or_(*criteria_list)
query = query.filter(f)
if limit is not None:
query = query.limit(limit)
return query
def _do_artifacts_query(context, session, show_level=ga.Showlevel.NONE):
"""Build the query to get all artifacts based on the context"""
LOG.debug("context.is_admin=%(is_admin)s; context.owner=%(owner)s" %
{'is_admin': context.is_admin, 'owner': context.owner})
if show_level == ga.Showlevel.NONE:
query = session.query(models.Artifact) \
.options(joinedload(models.Artifact.tags))
elif show_level == ga.Showlevel.BASIC:
query = session.query(models.Artifact) \
.options(joinedload(models.Artifact.properties)
.defer(models.ArtifactProperty.text_value)) \
.options(joinedload(models.Artifact.tags)) \
.options(joinedload(models.Artifact.blobs).
joinedload(models.ArtifactBlob.locations))
else:
# other show_levels aren't supported
msg = _LW("Show level %s is not supported in this "
"operation") % ga.Showlevel.to_str(show_level)
LOG.warn(msg)
raise exception.ArtifactUnsupportedShowLevel(shl=show_level)
# If admin, return everything.
if context.is_admin:
return query
else:
# If regular user, return only public artifacts.
# However, if context.owner has a value, return both
# public and private artifacts of the context.owner.
if context.owner is not None:
query = query.filter(
or_(models.Artifact.owner == context.owner,
models.Artifact.visibility == 'public'))
else:
query = query.filter(
models.Artifact.visibility == 'public')
return query
op_mappings = {
'EQ': operator.eq,
'GT': operator.gt,
'GE': operator.ge,
'LT': operator.lt,
'LE': operator.le,
'NE': operator.ne,
'IN': operator.eq # it must be eq
}
def _do_query_filters(filters):
basic_conds = []
tag_conds = []
prop_conds = []
# don't show deleted artifacts
basic_conds.append([models.Artifact.state != 'deleted'])
visibility = filters.pop('visibility', None)
if visibility is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.visibility == visibility['value']])
type_name = filters.pop('type_name', None)
if type_name is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.type_name == type_name['value']])
type_version = filters.pop('type_version', None)
if type_version is not None:
# ignore operator. always consider it EQ
# TODO(mfedosin) add support of LIKE operator
type_version = semver_db.parse(type_version['value'])
basic_conds.append([models.Artifact.type_version == type_version])
name = filters.pop('name', None)
if name is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.name == name['value']])
version = filters.pop('version', None)
if version is not None:
# ignore operator. always consider it EQ
# TODO(mfedosin) add support of LIKE operator
version = semver_db.parse(version['value'])
basic_conds.append([models.Artifact.version == version])
state = filters.pop('state', None)
if state is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.state == state['value']])
owner = filters.pop('owner', None)
if owner is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.owner == owner['value']])
id_list = filters.pop('id_list', None)
if id_list is not None:
basic_conds.append([models.Artifact.id.in_(id_list['value'])])
name_list = filters.pop('name_list', None)
if name_list is not None:
basic_conds.append([models.Artifact.name.in_(name_list['value'])])
tags = filters.pop('tags', None)
if tags is not None:
for tag in tags['value']:
tag_conds.append([models.ArtifactTag.value == tag])
# process remaining filters
for filtername, filtervalue in filters.items():
db_prop_op = filtervalue['operator']
db_prop_value = filtervalue['value']
db_prop_type = filtervalue['type'] + "_value"
db_prop_position = filtervalue.get('position')
conds = [models.ArtifactProperty.name == filtername]
if db_prop_op in op_mappings:
fn = op_mappings[db_prop_op]
result = fn(getattr(models.ArtifactProperty, db_prop_type),
db_prop_value)
cond = [result,
models.ArtifactProperty.position == db_prop_position]
if db_prop_op == 'IN':
if db_prop_position is not None:
msg = _LE("Cannot use this parameter with "
"the operator IN")
LOG.error(msg)
raise exception.ArtifactInvalidPropertyParameter(op='IN')
cond = [result,
models.ArtifactProperty.position >= 0]
else:
msg = _LE("Operator %s is not supported") % db_prop_op
LOG.error(msg)
raise exception.ArtifactUnsupportedPropertyOperator(op=db_prop_op)
conds.extend(cond)
prop_conds.append(conds)
return basic_conds, tag_conds, prop_conds
def _do_tags(artifact, new_tags):
tags_to_update = []
# don't touch existing tags
for tag in artifact.tags:
if tag.value in new_tags:
tags_to_update.append(tag)
new_tags.remove(tag.value)
# add new tags
for tag in new_tags:
db_tag = models.ArtifactTag()
db_tag.value = tag
tags_to_update.append(db_tag)
return tags_to_update
def _do_property(propname, prop, position=None):
db_prop = models.ArtifactProperty()
db_prop.name = propname
setattr(db_prop,
(prop['type'] + "_value"),
prop['value'])
db_prop.position = position
return db_prop
def _do_properties(artifact, new_properties):
props_to_update = []
# don't touch existing properties
for prop in artifact.properties:
if prop.name not in new_properties:
props_to_update.append(prop)
for propname, prop in new_properties.items():
if prop['type'] == 'array':
for pos, arrprop in enumerate(prop['value']):
props_to_update.append(
_do_property(propname, arrprop, pos)
)
else:
props_to_update.append(
_do_property(propname, prop)
)
return props_to_update
def _do_blobs(artifact, new_blobs):
blobs_to_update = []
# don't touch existing blobs
for blob in artifact.blobs:
if blob.name not in new_blobs:
blobs_to_update.append(blob)
for blobname, blobs in new_blobs.items():
for pos, blob in enumerate(blobs):
for db_blob in artifact.blobs:
if db_blob.name == blobname and db_blob.position == pos:
# update existing blobs
db_blob.size = blob['size']
db_blob.checksum = blob['checksum']
db_blob.item_key = blob['item_key']
db_blob.locations = _do_locations(db_blob,
blob['locations'])
blobs_to_update.append(db_blob)
break
else:
# create new blob
db_blob = models.ArtifactBlob()
db_blob.name = blobname
db_blob.size = blob['size']
db_blob.checksum = blob['checksum']
db_blob.item_key = blob['item_key']
db_blob.position = pos
db_blob.locations = _do_locations(db_blob, blob['locations'])
blobs_to_update.append(db_blob)
return blobs_to_update
def _do_locations(blob, new_locations):
locs_to_update = []
for pos, loc in enumerate(new_locations):
for db_loc in blob.locations:
if db_loc.value == loc['value']:
# update existing location
db_loc.position = pos
db_loc.status = loc['status']
locs_to_update.append(db_loc)
break
else:
# create new location
db_loc = models.ArtifactBlobLocation()
db_loc.value = loc['value']
db_loc.status = loc['status']
db_loc.position = pos
locs_to_update.append(db_loc)
return locs_to_update
def _do_dependencies(artifact, new_dependencies, session):
deps_to_update = []
# small check that all dependencies are new
if artifact.dependencies is not None:
for db_dep in artifact.dependencies:
for dep in new_dependencies.keys():
if db_dep.name == dep:
msg = _LW("Artifact with the specified type, name "
"and versions already has the direct "
"dependency=%s") % dep
LOG.warn(msg)
# change values of former dependency
for dep in artifact.dependencies:
session.delete(dep)
artifact.dependencies = []
for depname, depvalues in new_dependencies.items():
for pos, depvalue in enumerate(depvalues):
db_dep = models.ArtifactDependency()
db_dep.name = depname
db_dep.artifact_source = artifact.id
db_dep.artifact_dest = depvalue
db_dep.artifact_origin = artifact.id
db_dep.is_direct = True
db_dep.position = pos
deps_to_update.append(db_dep)
artifact.dependencies = deps_to_update
def _do_transitive_dependencies(artifact, session):
deps_to_update = []
for dependency in artifact.dependencies:
depvalue = dependency.artifact_dest
transitdeps = session.query(models.ArtifactDependency). \
filter_by(artifact_source=depvalue).all()
for transitdep in transitdeps:
if not transitdep.is_direct:
# transitive dependencies are already created
msg = _LW("Artifact with the specified type, "
"name and version already has the "
"direct dependency=%d") % transitdep.id
LOG.warn(msg)
raise exception.ArtifactDuplicateTransitiveDependency(
dep=transitdep.id)
db_dep = models.ArtifactDependency()
db_dep.name = transitdep['name']
db_dep.artifact_source = artifact.id
db_dep.artifact_dest = transitdep.artifact_dest
db_dep.artifact_origin = transitdep.artifact_source
db_dep.is_direct = False
db_dep.position = transitdep.position
deps_to_update.append(db_dep)
return deps_to_update
def _check_visibility(context, artifact):
if context.is_admin:
return True
if not artifact.owner:
return True
if artifact.visibility == Visibility.PUBLIC.value:
return True
if artifact.visibility == Visibility.PRIVATE.value:
if context.owner and context.owner == artifact.owner:
return True
else:
return False
if artifact.visibility == Visibility.SHARED.value:
return False
return False
def _set_version_fields(values):
if 'type_version' in values:
values['type_version'] = semver_db.parse(values['type_version'])
if 'version' in values:
values['version'] = semver_db.parse(values['version'])
def _validate_values(values):
if 'state' in values:
try:
State(values['state'])
except ValueError:
msg = "Invalid artifact state '%s'" % values['state']
raise exception.Invalid(msg)
if 'visibility' in values:
try:
Visibility(values['visibility'])
except ValueError:
msg = "Invalid artifact visibility '%s'" % values['visibility']
raise exception.Invalid(msg)
# TODO(mfedosin): it's an idea to validate tags someday
# (check that all tags match the regexp)
def _drop_protected_attrs(model_class, values):
"""
Removed protected attributes from values dictionary using the models
__protected_attributes__ field.
"""
for attr in model_class.__protected_attributes__:
if attr in values:
del values[attr]

View File

@ -52,6 +52,9 @@ BigInteger = lambda: sqlalchemy.types.BigInteger()
PickleType = lambda: sqlalchemy.types.PickleType()
Numeric = lambda: sqlalchemy.types.Numeric()
def from_migration_import(module_name, fromlist):
"""
Import a migration file and return the module

View File

@ -0,0 +1,206 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.schema import (Column, ForeignKey, Index, MetaData, Table)
from glance.db.sqlalchemy.migrate_repo.schema import (
BigInteger, Boolean, DateTime, Integer, Numeric, String, Text,
create_tables) # noqa
def define_artifacts_table(meta):
artifacts = Table('artifacts',
meta,
Column('id', String(36), primary_key=True,
nullable=False),
Column('name', String(255), nullable=False),
Column('type_name', String(255), nullable=False),
Column('type_version_prefix', BigInteger(),
nullable=False),
Column('type_version_suffix', String(255)),
Column('type_version_meta', String(255)),
Column('version_prefix', BigInteger(), nullable=False),
Column('version_suffix', String(255)),
Column('version_meta', String(255)),
Column('description', Text()),
Column('visibility', String(32), nullable=False),
Column('state', String(32), nullable=False),
Column('owner', String(255), nullable=False),
Column('created_at', DateTime(), nullable=False),
Column('updated_at', DateTime(),
nullable=False),
Column('deleted_at', DateTime()),
Column('published_at', DateTime()),
mysql_engine='InnoDB',
extend_existing=True)
Index('ix_artifact_name_and_version', artifacts.c.name,
artifacts.c.version_prefix, artifacts.c.version_suffix)
Index('ix_artifact_type', artifacts.c.type_name,
artifacts.c.type_version_prefix, artifacts.c.type_version_suffix)
Index('ix_artifact_state', artifacts.c.state)
Index('ix_artifact_owner', artifacts.c.owner)
Index('ix_artifact_visibility', artifacts.c.visibility)
return artifacts
def define_artifact_tags_table(meta):
artifact_tags = Table('artifact_tags',
meta,
Column('id', String(36), primary_key=True,
nullable=False),
Column('artifact_id', String(36),
ForeignKey('artifacts.id'), nullable=False),
Column('value', String(255), nullable=False),
Column('created_at', DateTime(), nullable=False),
Column('updated_at', DateTime(),
nullable=False),
mysql_engine='InnoDB',
extend_existing=True)
Index('ix_artifact_tags_artifact_id', artifact_tags.c.artifact_id)
Index('ix_artifact_tags_artifact_id_tag_value',
artifact_tags.c.artifact_id, artifact_tags.c.value)
return artifact_tags
def define_artifact_dependencies_table(meta):
artifact_dependencies = Table('artifact_dependencies',
meta,
Column('id', String(36), primary_key=True,
nullable=False),
Column('artifact_source', String(36),
ForeignKey('artifacts.id'),
nullable=False),
Column('artifact_dest', String(36),
ForeignKey('artifacts.id'),
nullable=False),
Column('artifact_origin', String(36),
ForeignKey('artifacts.id'),
nullable=False),
Column('is_direct', Boolean(),
nullable=False),
Column('position', Integer()),
Column('name', String(36)),
Column('created_at', DateTime(),
nullable=False),
Column('updated_at', DateTime(),
nullable=False),
mysql_engine='InnoDB',
extend_existing=True)
Index('ix_artifact_dependencies_source_id',
artifact_dependencies.c.artifact_source)
Index('ix_artifact_dependencies_dest_id',
artifact_dependencies.c.artifact_dest),
Index('ix_artifact_dependencies_origin_id',
artifact_dependencies.c.artifact_origin)
Index('ix_artifact_dependencies_direct_dependencies',
artifact_dependencies.c.artifact_source,
artifact_dependencies.c.is_direct)
return artifact_dependencies
def define_artifact_blobs_table(meta):
artifact_blobs = Table('artifact_blobs',
meta,
Column('id', String(36), primary_key=True,
nullable=False),
Column('artifact_id', String(36),
ForeignKey('artifacts.id'),
nullable=False),
Column('size', BigInteger(), nullable=False),
Column('checksum', String(32)),
Column('name', String(255), nullable=False),
Column('item_key', String(329)),
Column('position', Integer()),
Column('created_at', DateTime(), nullable=False),
Column('updated_at', DateTime(),
nullable=False),
mysql_engine='InnoDB',
extend_existing=True)
Index('ix_artifact_blobs_artifact_id',
artifact_blobs.c.artifact_id)
Index('ix_artifact_blobs_name',
artifact_blobs.c.name)
return artifact_blobs
def define_artifact_properties_table(meta):
artifact_properties = Table('artifact_properties',
meta,
Column('id', String(36),
primary_key=True,
nullable=False),
Column('artifact_id', String(36),
ForeignKey('artifacts.id'),
nullable=False),
Column('name', String(255),
nullable=False),
Column('string_value', String(255)),
Column('int_value', Integer()),
Column('numeric_value', Numeric()),
Column('bool_value', Boolean()),
Column('text_value', Text()),
Column('created_at', DateTime(),
nullable=False),
Column('updated_at', DateTime(),
nullable=False),
Column('position', Integer()),
mysql_engine='InnoDB',
extend_existing=True)
Index('ix_artifact_properties_artifact_id',
artifact_properties.c.artifact_id)
Index('ix_artifact_properties_name', artifact_properties.c.name)
return artifact_properties
def define_artifact_blob_locations_table(meta):
artifact_blob_locations = Table('artifact_blob_locations',
meta,
Column('id', String(36),
primary_key=True,
nullable=False),
Column('blob_id', String(36),
ForeignKey('artifact_blobs.id'),
nullable=False),
Column('value', Text(), nullable=False),
Column('created_at', DateTime(),
nullable=False),
Column('updated_at', DateTime(),
nullable=False),
Column('position', Integer()),
Column('status', String(36),
nullable=True),
mysql_engine='InnoDB',
extend_existing=True)
Index('ix_artifact_blob_locations_blob_id',
artifact_blob_locations.c.blob_id)
return artifact_blob_locations
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
tables = [define_artifacts_table(meta),
define_artifact_tags_table(meta),
define_artifact_properties_table(meta),
define_artifact_blobs_table(meta),
define_artifact_blob_locations_table(meta),
define_artifact_dependencies_table(meta)]
create_tables(tables)

View File

@ -0,0 +1,336 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from oslo_db.sqlalchemy import models
from oslo_utils import timeutils
from sqlalchemy import BigInteger
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy.ext import declarative
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import Numeric
from sqlalchemy.orm import backref
from sqlalchemy.orm import composite
from sqlalchemy.orm import relationship
from sqlalchemy import String
from sqlalchemy import Text
import glance.artifacts as ga
from glance.common import semver_db
from glance import i18n
from oslo_log import log as os_logging
BASE = declarative.declarative_base()
LOG = os_logging.getLogger(__name__)
_LW = i18n._LW
class ArtifactBase(models.ModelBase, models.TimestampMixin):
"""Base class for Artifact Models."""
__table_args__ = {'mysql_engine': 'InnoDB'}
__table_initialized__ = False
__protected_attributes__ = set([
"created_at", "updated_at"])
created_at = Column(DateTime, default=lambda: timeutils.utcnow(),
nullable=False)
updated_at = Column(DateTime, default=lambda: timeutils.utcnow(),
nullable=False, onupdate=lambda: timeutils.utcnow())
def save(self, session=None):
from glance.db.sqlalchemy import api as db_api
super(ArtifactBase, self).save(session or db_api.get_session())
def keys(self):
return self.__dict__.keys()
def values(self):
return self.__dict__.values()
def items(self):
return self.__dict__.items()
def to_dict(self):
d = {}
for c in self.__table__.columns:
d[c.name] = self[c.name]
return d
def _parse_property_type_value(prop, show_text_properties=True):
columns = [
'int_value',
'string_value',
'bool_value',
'numeric_value']
if show_text_properties:
columns.append('text_value')
for prop_type in columns:
if getattr(prop, prop_type) is not None:
return prop_type.rpartition('_')[0], getattr(prop, prop_type)
return None, None
class Artifact(BASE, ArtifactBase):
__tablename__ = 'artifacts'
__table_args__ = (
Index('ix_artifact_name_and_version', 'name', 'version_prefix',
'version_suffix'),
Index('ix_artifact_type', 'type_name', 'type_version_prefix',
'type_version_suffix'),
Index('ix_artifact_state', 'state'),
Index('ix_artifact_owner', 'owner'),
Index('ix_artifact_visibility', 'visibility'),
{'mysql_engine': 'InnoDB'})
__protected_attributes__ = ArtifactBase.__protected_attributes__.union(
set(['published_at', 'deleted_at']))
id = Column(String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
name = Column(String(255), nullable=False)
type_name = Column(String(255), nullable=False)
type_version_prefix = Column(BigInteger, nullable=False)
type_version_suffix = Column(String(255))
type_version_meta = Column(String(255))
type_version = composite(semver_db.DBVersion, type_version_prefix,
type_version_suffix, type_version_meta)
version_prefix = Column(BigInteger, nullable=False)
version_suffix = Column(String(255))
version_meta = Column(String(255))
version = composite(semver_db.DBVersion, version_prefix,
version_suffix, version_meta)
description = Column(Text)
visibility = Column(String(32), nullable=False)
state = Column(String(32), nullable=False)
owner = Column(String(255), nullable=False)
published_at = Column(DateTime)
deleted_at = Column(DateTime)
def to_dict(self, show_level=ga.Showlevel.BASIC,
show_text_properties=True):
d = super(Artifact, self).to_dict()
d.pop('type_version_prefix')
d.pop('type_version_suffix')
d.pop('type_version_meta')
d.pop('version_prefix')
d.pop('version_suffix')
d.pop('version_meta')
d['type_version'] = str(self.type_version)
d['version'] = str(self.version)
tags = []
for tag in self.tags:
tags.append(tag.value)
d['tags'] = tags
if show_level == ga.Showlevel.NONE:
return d
properties = {}
# sort properties
self.properties.sort(key=lambda elem: (elem.name, elem.position))
for prop in self.properties:
proptype, propvalue = _parse_property_type_value(
prop, show_text_properties)
if proptype is None:
continue
if prop.position is not None:
# make array
for p in properties.keys():
if p == prop.name:
# add value to array
properties[p]['value'].append(dict(type=proptype,
value=propvalue))
break
else:
# create new array
p = dict(type='array',
value=[])
p['value'].append(dict(type=proptype,
value=propvalue))
properties[prop.name] = p
else:
# make scalar
properties[prop.name] = dict(type=proptype,
value=propvalue)
d['properties'] = properties
blobs = {}
# sort blobs
self.blobs.sort(key=lambda elem: elem.position)
for blob in self.blobs:
locations = []
# sort locations
blob.locations.sort(key=lambda elem: elem.position)
for loc in blob.locations:
locations.append(dict(value=loc.value,
status=loc.status))
if blob.name in blobs:
blobs[blob.name].append(dict(size=blob.size,
checksum=blob.checksum,
locations=locations,
item_key=blob.item_key))
else:
blobs[blob.name] = []
blobs[blob.name].append(dict(size=blob.size,
checksum=blob.checksum,
locations=locations,
item_key=blob.item_key))
d['blobs'] = blobs
return d
class ArtifactDependency(BASE, ArtifactBase):
__tablename__ = 'artifact_dependencies'
__table_args__ = (Index('ix_artifact_dependencies_source_id',
'artifact_source'),
Index('ix_artifact_dependencies_origin_id',
'artifact_origin'),
Index('ix_artifact_dependencies_dest_id',
'artifact_dest'),
Index('ix_artifact_dependencies_direct_dependencies',
'artifact_source', 'is_direct'),
{'mysql_engine': 'InnoDB'})
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_source = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
artifact_dest = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
artifact_origin = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
is_direct = Column(Boolean, nullable=False)
position = Column(Integer)
name = Column(String(36))
source = relationship('Artifact',
backref=backref('dependencies', cascade="all, "
"delete"),
foreign_keys="ArtifactDependency.artifact_source")
dest = relationship('Artifact',
foreign_keys="ArtifactDependency.artifact_dest")
origin = relationship('Artifact',
foreign_keys="ArtifactDependency.artifact_origin")
class ArtifactTag(BASE, ArtifactBase):
__tablename__ = 'artifact_tags'
__table_args__ = (Index('ix_artifact_tags_artifact_id', 'artifact_id'),
Index('ix_artifact_tags_artifact_id_tag_value',
'artifact_id', 'value'),
{'mysql_engine': 'InnoDB'},)
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_id = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
artifact = relationship(Artifact,
backref=backref('tags',
cascade="all, delete-orphan"))
value = Column(String(255), nullable=False)
class ArtifactProperty(BASE, ArtifactBase):
__tablename__ = 'artifact_properties'
__table_args__ = (
Index('ix_artifact_properties_artifact_id', 'artifact_id'),
Index('ix_artifact_properties_name', 'name'),
{'mysql_engine': 'InnoDB'},)
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_id = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
artifact = relationship(Artifact,
backref=backref('properties',
cascade="all, delete-orphan"))
name = Column(String(255), nullable=False)
string_value = Column(String(255))
int_value = Column(Integer)
numeric_value = Column(Numeric)
bool_value = Column(Boolean)
text_value = Column(Text)
position = Column(Integer)
class ArtifactBlob(BASE, ArtifactBase):
__tablename__ = 'artifact_blobs'
__table_args__ = (
Index('ix_artifact_blobs_artifact_id', 'artifact_id'),
Index('ix_artifact_blobs_name', 'name'),
{'mysql_engine': 'InnoDB'},)
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_id = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
name = Column(String(255), nullable=False)
item_key = Column(String(329))
size = Column(BigInteger(), nullable=False)
checksum = Column(String(32))
position = Column(Integer)
artifact = relationship(Artifact,
backref=backref('blobs',
cascade="all, delete-orphan"))
class ArtifactBlobLocation(BASE, ArtifactBase):
__tablename__ = 'artifact_blob_locations'
__table_args__ = (Index('ix_artifact_blob_locations_blob_id',
'blob_id'),
{'mysql_engine': 'InnoDB'})
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
blob_id = Column(String(36), ForeignKey('artifact_blobs.id'),
nullable=False)
value = Column(Text, nullable=False)
position = Column(Integer)
status = Column(String(36), default='active', nullable=True)
blob = relationship(ArtifactBlob,
backref=backref('locations',
cascade="all, delete-orphan"))
def register_models(engine):
"""Create database tables for all models with the given engine."""
models = (Artifact, ArtifactTag, ArtifactProperty,
ArtifactBlob, ArtifactBlobLocation, ArtifactDependency)
for model in models:
model.metadata.create_all(engine)
def unregister_models(engine):
"""Drop database tables for all models with the given engine."""
models = (ArtifactDependency, ArtifactBlobLocation, ArtifactBlob,
ArtifactProperty, ArtifactTag, Artifact)
for model in models:
model.metadata.drop_all(engine)

View File

@ -0,0 +1,905 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import glance.artifacts as ga
from glance.common import exception as exc
from glance import context
import glance.tests.functional.db as db_tests
from glance.tests import utils as test_utils
UUID1, UUID2 = ('80cc6551-9db4-42aa-bb58-51c48757f285',
'f89c675a-e01c-436c-a384-7d2e784fb2d9')
TYPE_NAME = u'TestArtifactType'
TYPE_VERSION = u'1.0.0'
class ArtifactsTestDriver(test_utils.BaseTestCase):
def setUp(self):
super(ArtifactsTestDriver, self).setUp()
context_cls = context.RequestContext
self.adm_context = context_cls(is_admin=True,
auth_token='user:user:admin',
tenant='admin-tenant')
self.context = context_cls(is_admin=False,
auth_token='user:user:user',
tenant='test-tenant')
self.db_api = db_tests.get_db(self.config)
db_tests.reset_db(self.db_api)
self.create_test_artifacts()
def create_test_artifacts(self):
dependency = {'2->1': [UUID1]}
self.db_api.artifact_create(self.adm_context,
get_fixture(id=UUID1,
name="TestArtifact1",
visibility="public"),
TYPE_NAME,
TYPE_VERSION)
self.db_api.artifact_create(self.adm_context,
get_fixture(id=UUID2,
name="TestArtifact2",
visibility="public",
dependencies=dependency),
TYPE_NAME,
TYPE_VERSION)
self.art1 = self.db_api.artifact_get(self.context, UUID1, TYPE_NAME,
TYPE_VERSION)
self.art2 = self.db_api.artifact_get(self.context, UUID2, TYPE_NAME,
TYPE_VERSION)
class ArtifactTests(object):
def test_artifact_create(self):
artifact = get_fixture()
created = self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(created)
self.assertEqual(artifact['name'], created['name'])
self.assertEqual(artifact['type_name'], created['type_name'])
self.assertEqual(artifact['type_version'], created['type_version'])
def test_artifact_create_none_valued_props(self):
artifact = get_fixture()
artifact['properties']['lylyly'] = dict(value=None, type='int')
artifact['properties']['hihihi'] = dict(value=5, type='int')
created = self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(created)
self.assertIn('hihihi', created['properties'])
self.assertNotIn('lylyly', created['properties'])
def test_artifact_update(self):
fixture = {'name': 'UpdatedName'}
updated = self.db_api.artifact_update(self.context, fixture, UUID1,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(updated)
self.assertEqual('UpdatedName', updated['name'])
self.assertNotEqual(updated['created_at'], updated['updated_at'])
def test_artifact_create_same_version_different_users(self):
tenant1 = str(uuid.uuid4())
tenant2 = str(uuid.uuid4())
ctx1 = context.RequestContext(is_admin=False, tenant=tenant1)
ctx2 = context.RequestContext(is_admin=False, tenant=tenant2)
artifact1 = get_fixture(owner=tenant1)
artifact2 = get_fixture(owner=tenant2)
self.db_api.artifact_create(ctx1, artifact1,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(
self.db_api.artifact_create(ctx2, artifact2,
TYPE_NAME, TYPE_VERSION))
def test_artifact_create_same_version_deleted(self):
artifact1 = get_fixture()
artifact2 = get_fixture(state='deleted')
artifact3 = get_fixture(state='deleted')
self.db_api.artifact_create(self.context, artifact1,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(
self.db_api.artifact_create(self.context, artifact2,
TYPE_NAME, TYPE_VERSION))
self.assertIsNotNone(
self.db_api.artifact_create(self.context, artifact3,
TYPE_NAME, TYPE_VERSION))
def test_artifact_get(self):
res = self.db_api.artifact_get(self.context, UUID1,
TYPE_NAME, TYPE_VERSION)
self.assertEqual('TestArtifact1', res['name'])
self.assertEqual('TestArtifactType', res['type_name'])
self.assertEqual('1.0.0', res['type_version'])
self.assertEqual('10.0.3-alpha+some-date', res['version'])
self.assertEqual('creating', res['state'])
self.assertEqual('test-tenant', res['owner'])
def test_artifact_get_owned(self):
tenant1 = str(uuid.uuid4())
tenant2 = str(uuid.uuid4())
ctx1 = context.RequestContext(is_admin=False, tenant=tenant1)
ctx2 = context.RequestContext(is_admin=False, tenant=tenant2)
artifact = get_fixture(owner=tenant1)
created = self.db_api.artifact_create(ctx1, artifact,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(self.db_api.artifact_get(ctx1, created['id'],
TYPE_NAME, TYPE_VERSION))
self.assertRaises(exc.ArtifactForbidden, self.db_api.artifact_get,
ctx2, created['id'], TYPE_NAME, TYPE_VERSION)
def test_artifact_get_public(self):
tenant1 = str(uuid.uuid4())
tenant2 = str(uuid.uuid4())
ctx1 = context.RequestContext(is_admin=False, tenant=tenant1)
ctx2 = context.RequestContext(is_admin=False, tenant=tenant2)
artifact = get_fixture(owner=tenant1, visibility='public')
created = self.db_api.artifact_create(ctx1, artifact,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(self.db_api.artifact_get(ctx1, created['id'],
TYPE_NAME, TYPE_VERSION))
self.assertIsNotNone(self.db_api.artifact_get(ctx2, created['id'],
TYPE_NAME, TYPE_VERSION))
def test_artifact_update_state(self):
res = self.db_api.artifact_update(self.context, {'state': 'active'},
UUID1, TYPE_NAME, TYPE_VERSION)
self.assertEqual('active', res['state'])
self.assertRaises(exc.InvalidArtifactStateTransition,
self.db_api.artifact_update, self.context,
{'state': 'creating'}, UUID1,
TYPE_NAME, TYPE_VERSION)
res = self.db_api.artifact_update(self.context,
{'state': 'deactivated'}, UUID1,
TYPE_NAME, TYPE_VERSION)
self.assertEqual('deactivated', res['state'])
res = self.db_api.artifact_update(self.context, {'state': 'active'},
UUID1, TYPE_NAME, TYPE_VERSION)
self.assertEqual('active', res['state'])
res = self.db_api.artifact_update(self.context, {'state': 'deleted'},
UUID1, TYPE_NAME, TYPE_VERSION)
self.assertEqual('deleted', res['state'])
self.assertRaises(exc.InvalidArtifactStateTransition,
self.db_api.artifact_update, self.context,
{'state': 'active'}, UUID1,
TYPE_NAME, TYPE_VERSION)
self.assertRaises(exc.InvalidArtifactStateTransition,
self.db_api.artifact_update, self.context,
{'state': 'deactivated'}, UUID1,
TYPE_NAME, TYPE_VERSION)
self.assertRaises(exc.InvalidArtifactStateTransition,
self.db_api.artifact_update, self.context,
{'state': 'creating'}, UUID1,
TYPE_NAME, TYPE_VERSION)
def test_artifact_update_tags(self):
res = self.db_api.artifact_update(self.context,
{'tags': ['gagaga', 'lalala']},
UUID1, TYPE_NAME, TYPE_VERSION)
self.assertEqual(set(['gagaga', 'lalala']), set(res['tags']))
def test_artifact_update_properties(self):
new_properties = {'properties': {
'propname1': {
'type': 'string',
'value': 'qeqeqe'},
'propname2': {
'type': 'int',
'value': 6},
'propname3': {
'type': 'int',
'value': '5'},
'proparray': {
'type': 'string',
'value': 'notarray'
}}
}
res = self.db_api.artifact_update(self.context,
new_properties,
UUID1, TYPE_NAME, TYPE_VERSION)
bd_properties = res['properties']
self.assertEqual(4, len(bd_properties))
for prop in bd_properties:
self.assertIn(prop, new_properties['properties'])
def test_artifact_update_blobs(self):
new_blobs = {'blobs': {
'blob1': [{
'size': 2600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 200000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'newURL21',
'status': 'active'},
{'value': 'URL22',
'status': 'passive'}]
}
],
'blob2': [{
'size': 120000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}, {
'size': 300000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'bl1URL2',
'status': 'passive'}]
}
]
}
}
res = self.db_api.artifact_update(self.context,
new_blobs,
UUID1, TYPE_NAME, TYPE_VERSION)
bd_blobs = res['blobs']
self.assertEqual(2, len(bd_blobs))
for blob in bd_blobs:
self.assertIn(blob, new_blobs['blobs'])
def test_artifact_create_with_dependency(self):
dependencies = {"new->2": [UUID2]}
artifact = get_fixture(dependencies=dependencies)
res = self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(res)
created = self.db_api. \
artifact_get(self.context, res['id'], TYPE_NAME, TYPE_VERSION,
show_level=ga.Showlevel.DIRECT)
bd_dependencies = created['dependencies']
self.assertEqual(1, len(bd_dependencies))
# now try to update artifact with the same dependency
new_dependencies = {"dependencies": {"new->2": [UUID2],
"new->3": [UUID2]}}
res = self.db_api.artifact_update(self.context,
new_dependencies,
UUID1, TYPE_NAME, TYPE_VERSION)
retrieved = self.db_api.artifact_get(
self.context, res['id'],
TYPE_NAME, TYPE_VERSION, show_level=ga.Showlevel.DIRECT)
self.assertEqual(2, len(retrieved["dependencies"]))
def test_artifact_create_transitive_dependencies(self):
dependencies = {"new->2": [UUID2]}
artifact = get_fixture(dependencies=dependencies, id='new')
res = self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(res)
created = self.db_api. \
artifact_get(self.context, res['id'], TYPE_NAME, TYPE_VERSION,
show_level=ga.Showlevel.DIRECT)
bd_dependencies = created['dependencies']
self.assertEqual(1, len(bd_dependencies))
res = self.db_api.artifact_publish(
self.context,
res['id'], TYPE_NAME, TYPE_VERSION
)
res = self.db_api. \
artifact_get(self.context, res['id'], TYPE_NAME, TYPE_VERSION,
show_level=ga.Showlevel.TRANSITIVE)
self.assertIsNotNone(res.pop('created_at'))
self.assertIsNotNone(res.pop('updated_at'))
# NOTE(mfedosin): tags is a set, so we have to check it separately
tags = res.pop('tags', None)
self.assertIsNotNone(tags)
self.assertEqual(set(['gugugu', 'lalala']), set(tags))
tags = res['dependencies']['new->2'][0].pop('tags', None)
self.assertIsNotNone(tags)
self.assertEqual(set(['gugugu', 'lalala']), set(tags))
tags = res['dependencies']['new->2'][0]['dependencies']['2->1'][0]\
.pop('tags', None)
self.assertIsNotNone(tags)
self.assertEqual(set(['gugugu', 'lalala']), set(tags))
expected = {
'id': 'new',
'name': u'SomeArtifact',
'description': None,
'type_name': TYPE_NAME,
'type_version': TYPE_VERSION,
'version': u'10.0.3-alpha+some-date',
'visibility': u'private',
'state': u'active',
'owner': u'test-tenant',
'published_at': None,
'deleted_at': None,
'properties': {
'propname1': {
'type': 'string',
'value': 'tututu'},
'propname2': {
'type': 'int',
'value': 5},
'propname3': {
'type': 'string',
'value': 'vavava'},
'proparray': {
'type': 'array',
'value': [
{'type': 'int',
'value': 6},
{'type': 'string',
'value': 'rerere'}
]
}
},
'blobs': {
'blob1': [{
'size': 1600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 100000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}]
},
'dependencies': {
'new->2': [
{
'id': UUID2,
'created_at': self.art2['created_at'],
'updated_at': self.art2['updated_at'],
'published_at': None,
'deleted_at': None,
'name': u'TestArtifact2',
'description': None,
'type_name': TYPE_NAME,
'type_version': TYPE_VERSION,
'version': u'10.0.3-alpha+some-date',
'visibility': 'public',
'state': u'creating',
'owner': u'test-tenant',
'properties': {
'propname1': {
'type': 'string',
'value': 'tututu'},
'propname2': {
'type': 'int',
'value': 5},
'propname3': {
'type': 'string',
'value': 'vavava'},
'proparray': {
'type': 'array',
'value': [
{'type': 'int',
'value': 6},
{'type': 'string',
'value': 'rerere'}
]
}
},
'blobs': {
'blob1': [{
'size': 1600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 100000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}]
},
'dependencies': {
'2->1': [
{
'id': UUID1,
'created_at': self.art1['created_at'],
'updated_at': self.art1['updated_at'],
'published_at': None,
'deleted_at': None,
'dependencies': {},
'name': u'TestArtifact1',
'description': None,
'type_name': TYPE_NAME,
'type_version': TYPE_VERSION,
'version': u'10.0.3-alpha+some-date',
'visibility': 'public',
'state': u'creating',
'owner': u'test-tenant',
'properties': {
'propname1': {
'type': 'string',
'value': 'tututu'},
'propname2': {
'type': 'int',
'value': 5},
'propname3': {
'type': 'string',
'value': 'vavava'},
'proparray': {
'type': 'array',
'value': [
{'type': 'int',
'value': 6},
{'type': 'string',
'value': 'rerere'}
]
}
},
'blobs': {
'blob1': [{
'size': 1600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 100000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}]
}
}
]
}
}
]
}
}
self.assertIsNotNone(res['published_at'])
published_at = res['published_at']
expected['published_at'] = published_at
for key, value in expected.iteritems():
self.assertEqual(expected[key], res[key])
def test_artifact_get_all(self):
artifact = get_fixture(name='new_artifact')
self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
artifacts = self.db_api.artifact_get_all(self.context)
self.assertEqual(3, len(artifacts))
def test_artifact_sort_order(self):
arts = [get_fixture(version='1.2.3-alpha.4.df.00f'),
get_fixture(version='1.2.2'),
get_fixture(version='1.2.3+some-metadata'),
get_fixture(version='1.2.4'),
get_fixture(version='1.2.3-release.2'),
get_fixture(version='1.2.3-release.1+metadata'),
get_fixture(version='1.2.3-final'),
get_fixture(version='1.2.3-alpha.14.df.00f')]
for art in arts:
self.db_api.artifact_create(self.context, art, TYPE_NAME,
TYPE_VERSION)
artifacts = self.db_api.artifact_get_all(self.context,
sort_keys=[('version',
None)],
sort_dirs=['asc'])
expected_versions = [
'1.2.2',
'1.2.3-alpha.4.df.00f',
'1.2.3-alpha.14.df.00f',
'1.2.3-final',
'1.2.3-release.1+metadata',
'1.2.3-release.2',
'1.2.3+some-metadata',
'1.2.4']
for i in xrange(len(expected_versions)):
self.assertEqual(expected_versions[i], artifacts[i]['version'])
def test_artifact_get_all_show_level(self):
artifacts = self.db_api.artifact_get_all(self.context)
self.assertEqual(2, len(artifacts))
self.assertRaises(KeyError, lambda: artifacts[0]['properties'])
artifacts = self.db_api. \
artifact_get_all(self.context,
show_level=ga.Showlevel.BASIC)
self.assertEqual(2, len(artifacts))
self.assertEqual(4, len(artifacts[0]['properties']))
self.assertRaises(exc.ArtifactUnsupportedShowLevel,
self.db_api.artifact_get_all, self.context,
show_level=ga.Showlevel.DIRECT)
def test_artifact_get_all_tags(self):
artifact = get_fixture(name='new_artifact',
tags=['qwerty', 'uiop'])
self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
artifacts = self.db_api.artifact_get_all(self.context)
self.assertEqual(3, len(artifacts))
filters = {'tags': {
'value': ['notag'],
}}
artifacts = self.db_api.artifact_get_all(self.context, filters=filters)
self.assertEqual(0, len(artifacts))
filters = {'tags': {
'value': ['lalala'],
}}
artifacts = self.db_api.artifact_get_all(self.context, filters=filters)
self.assertEqual(2, len(artifacts))
for artifact in artifacts:
self.assertIn(artifact['name'], ['TestArtifact1', 'TestArtifact2'])
def test_artifact_get_all_properties(self):
artifact = get_fixture(
name='new_artifact',
properties={
'newprop2': {
'type': 'string',
'value': 'tututu'},
'propname2': {
'type': 'int',
'value': 3},
'propname3': {
'type': 'string',
'value': 'vavava'},
'proptext': {
'type': 'text',
'value': 'bebebe' * 100},
'proparray': {
'type': 'array',
'value': [
{'type': 'int',
'value': 17},
{'type': 'string',
'value': 'rerere'}
]
}})
self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
filters = {'propname2': {
'value': 4,
'operator': 'GT',
'type': 'int'}}
artifacts = self.db_api.artifact_get_all(self.context, filters=filters)
self.assertEqual(2, len(artifacts))
for artifact in artifacts:
self.assertIn(artifact['name'], ['TestArtifact1', 'TestArtifact2'])
# position hasn't been set
filters = {'proparray': {
'value': 6,
'operator': 'LE',
'type': 'int'}}
artifacts = self.db_api.artifact_get_all(self.context, filters=filters)
self.assertEqual(0, len(artifacts))
for artifact in artifacts:
self.assertIn(artifact['name'], ['TestArtifact1', 'TestArtifact2'])
# position has been set
filters = {'proparray': {
'value': 6,
'position': 0,
'operator': 'LE',
'type': 'int'}}
artifacts = self.db_api.artifact_get_all(self.context, filters=filters)
self.assertEqual(2, len(artifacts))
for artifact in artifacts:
self.assertIn(artifact['name'], ['TestArtifact1', 'TestArtifact2'])
filters = {'proparray': {
'value': 6,
'operator': 'IN',
'type': 'int'}}
artifacts = self.db_api.artifact_get_all(self.context, filters=filters)
self.assertEqual(2, len(artifacts))
for artifact in artifacts:
self.assertIn(artifact['name'], ['TestArtifact1', 'TestArtifact2'])
filters = {'name': {'value': 'new_artifact'}}
artifacts = self.db_api.artifact_get_all(self.context,
filters=filters,
show_level=ga.Showlevel.BASIC)
self.assertEqual(1, len(artifacts))
artifact = artifacts[0]
self.assertEqual('new_artifact', artifact['name'])
for prop in artifact['properties'].keys():
self.assertNotEqual('proptext', prop)
filters = {'propname2': {
'value': 4,
'operator': 'FOO',
'type': 'int'}}
self.assertRaises(
exc.ArtifactUnsupportedPropertyOperator,
self.db_api.artifact_get_all, self.context, filters=filters)
def test_artifact_delete(self):
res = self.db_api.artifact_delete(self.context, UUID1,
TYPE_NAME, TYPE_VERSION)
self.assertEqual('TestArtifact1', res['name'])
self.assertEqual('deleted', res['state'])
self.assertIsNotNone(res['deleted_at'])
artifacts = self.db_api.artifact_get_all(self.context)
self.assertEqual(1, len(artifacts))
def test_artifact_delete_property(self):
new_properties = {'properties': {
'proparray': {'value': [],
'type': 'array'}
}
}
res = self.db_api.artifact_update(self.context,
new_properties,
UUID1, TYPE_NAME, TYPE_VERSION)
bd_properties = res['properties']
self.assertEqual(3, len(bd_properties))
expected = {
'propname1': {
'type': 'string',
'value': 'tututu'},
'propname2': {
'type': 'int',
'value': 5},
'propname3': {
'type': 'string',
'value': 'vavava'}
}
for prop in bd_properties:
self.assertIn(prop, expected)
def test_artifact_delete_blob(self):
new_blobs = {'blobs': {
'blob2': [{
'size': 2600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 200000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'newURL21',
'status': 'active'},
{'value': 'URL22',
'status': 'passive'}]
}
],
'blob3': [{
'size': 120000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}, {
'size': 300000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'bl1URL2',
'status': 'passive'}]
}
]
}
}
expected = {'blobs': {
'blob1': [{
'size': 1600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 100000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}
],
'blob2': [{
'size': 2600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 200000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'newURL21',
'status': 'active'},
{'value': 'URL22',
'status': 'passive'}]
}
],
'blob3': [{
'size': 120000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}, {
'size': 300000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'bl1URL2',
'status': 'passive'}]
}
]
}
}
res = self.db_api.artifact_update(self.context,
new_blobs,
UUID1, TYPE_NAME, TYPE_VERSION)
bd_blobs = res['blobs']
self.assertEqual(3, len(bd_blobs))
for blob in bd_blobs:
self.assertIn(blob, expected['blobs'])
del_blobs = {'blobs': {
'blob1': []}
}
res = self.db_api.artifact_update(self.context,
del_blobs,
UUID1, TYPE_NAME, TYPE_VERSION)
bd_blobs = res['blobs']
self.assertEqual(2, len(bd_blobs))
for blob in bd_blobs:
self.assertIn(blob, new_blobs['blobs'])
def get_fixture(**kwargs):
artifact = {
'name': u'SomeArtifact',
'type_name': TYPE_NAME,
'type_version': TYPE_VERSION,
'version': u'10.0.3-alpha+some-date',
'visibility': u'private',
'state': u'creating',
'owner': u'test-tenant',
'tags': ['lalala', 'gugugu'],
'properties': {
'propname1': {
'type': 'string',
'value': 'tututu'},
'propname2': {
'type': 'int',
'value': 5},
'propname3': {
'type': 'string',
'value': 'vavava'},
'proparray': {
'type': 'array',
'value': [
{'type': 'int',
'value': 6},
{'type': 'string',
'value': 'rerere'}
]
}
},
'blobs': {
'blob1': [{
'size': 1600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 100000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}
]
}
}
artifact.update(kwargs)
return artifact

View File

@ -20,9 +20,11 @@ from oslo_db import options
from glance.common import exception
import glance.db.sqlalchemy.api
from glance.db.sqlalchemy import models as db_models
from glance.db.sqlalchemy import models_artifacts as artifact_models
from glance.db.sqlalchemy import models_metadef as metadef_models
import glance.tests.functional.db as db_tests
from glance.tests.functional.db import base
from glance.tests.functional.db import base_artifacts
from glance.tests.functional.db import base_metadef
CONF = cfg.CONF
@ -45,6 +47,11 @@ def reset_db_metadef(db_api):
metadef_models.register_models(db_api.get_engine())
def reset_db_artifacts(db_api):
artifact_models.unregister_models(db_api.get_engine())
artifact_models.register_models(db_api.get_engine())
class TestSqlAlchemyDriver(base.TestDriver,
base.DriverTests,
base.FunctionalInitWrapper):
@ -153,6 +160,14 @@ class TestSqlAlchemyQuota(base.DriverQuotaTests,
self.addCleanup(db_tests.reset)
class TestArtifacts(base_artifacts.ArtifactsTestDriver,
base_artifacts.ArtifactTests):
def setUp(self):
db_tests.load(get_db, reset_db_artifacts)
super(TestArtifacts, self).setUp()
self.addCleanup(db_tests.reset)
class TestMetadefSqlAlchemyDriver(base_metadef.TestMetadefDriver,
base_metadef.MetadefDriverTests,
base.FunctionalInitWrapper):

View File

@ -51,7 +51,9 @@ from glance.db import migration
from glance.db.sqlalchemy import migrate_repo
from glance.db.sqlalchemy.migrate_repo.schema import from_migration_import
from glance.db.sqlalchemy import models
from glance.db.sqlalchemy import models_artifacts
from glance.db.sqlalchemy import models_metadef
from glance import i18n
_ = i18n._
@ -1495,6 +1497,142 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
self.assertFalse(index_exist('namespace_id',
metadef_tags.name, engine))
def _pre_upgrade_041(self, engine):
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
db_utils.get_table, engine,
'artifacts')
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
db_utils.get_table, engine,
'artifact_tags')
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
db_utils.get_table, engine,
'artifact_properties')
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
db_utils.get_table, engine,
'artifact_blobs')
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
db_utils.get_table, engine,
'artifact_dependencies')
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
db_utils.get_table, engine,
'artifact_locations')
def _check_041(self, engine, data):
artifacts_indices = [('ix_artifact_name_and_version',
['name', 'version_prefix', 'version_suffix']),
('ix_artifact_type',
['type_name',
'type_version_prefix',
'type_version_suffix']),
('ix_artifact_state', ['state']),
('ix_artifact_visibility', ['visibility']),
('ix_artifact_owner', ['owner'])]
artifacts_columns = ['id',
'name',
'type_name',
'type_version_prefix',
'type_version_suffix',
'type_version_meta',
'version_prefix',
'version_suffix',
'version_meta',
'description',
'visibility',
'state',
'owner',
'created_at',
'updated_at',
'deleted_at',
'published_at']
self.assert_table(engine, 'artifacts', artifacts_indices,
artifacts_columns)
tags_indices = [('ix_artifact_tags_artifact_id', ['artifact_id']),
('ix_artifact_tags_artifact_id_tag_value',
['artifact_id',
'value'])]
tags_columns = ['id',
'artifact_id',
'value',
'created_at',
'updated_at']
self.assert_table(engine, 'artifact_tags', tags_indices, tags_columns)
prop_indices = [
('ix_artifact_properties_artifact_id', ['artifact_id']),
('ix_artifact_properties_name', ['name'])]
prop_columns = ['id',
'artifact_id',
'name',
'string_value',
'int_value',
'numeric_value',
'bool_value',
'text_value',
'created_at',
'updated_at',
'position']
self.assert_table(engine, 'artifact_properties', prop_indices,
prop_columns)
blobs_indices = [
('ix_artifact_blobs_artifact_id', ['artifact_id']),
('ix_artifact_blobs_name', ['name'])]
blobs_columns = ['id',
'artifact_id',
'size',
'checksum',
'name',
'item_key',
'position',
'created_at',
'updated_at']
self.assert_table(engine, 'artifact_blobs', blobs_indices,
blobs_columns)
dependencies_indices = [
('ix_artifact_dependencies_source_id', ['artifact_source']),
('ix_artifact_dependencies_direct_dependencies',
['artifact_source', 'is_direct']),
('ix_artifact_dependencies_dest_id', ['artifact_dest']),
('ix_artifact_dependencies_origin_id', ['artifact_origin'])]
dependencies_columns = ['id',
'artifact_source',
'artifact_dest',
'artifact_origin',
'is_direct',
'position',
'name',
'created_at',
'updated_at']
self.assert_table(engine, 'artifact_dependencies',
dependencies_indices,
dependencies_columns)
locations_indices = [
('ix_artifact_blob_locations_blob_id', ['blob_id'])]
locations_columns = ['id',
'blob_id',
'value',
'created_at',
'updated_at',
'position',
'status']
self.assert_table(engine, 'artifact_blob_locations', locations_indices,
locations_columns)
def assert_table(self, engine, table_name, indices, columns):
table = db_utils.get_table(engine, table_name)
index_data = [(index.name, index.columns.keys()) for index in
table.indexes]
column_data = [column.name for column in table.columns]
# instead of calling assertItemsEqual, which is not present in py26
# asserting equality of lengths and sorted collections
self.assertEqual(len(columns), len(column_data))
self.assertEqual(sorted(columns), sorted(column_data))
self.assertEqual(len(indices), len(index_data))
self.assertEqual(sorted(indices), sorted(index_data))
class TestMysqlMigrations(test_base.MySQLOpportunisticTestCase,
MigrationsMixin):
@ -1537,6 +1675,8 @@ class ModelsMigrationSyncMixin(object):
def get_metadata(self):
for table in models_metadef.BASE_DICT.metadata.sorted_tables:
models.BASE.metadata._add_table(table.name, table.schema, table)
for table in models_artifacts.BASE.metadata.sorted_tables:
models.BASE.metadata._add_table(table.name, table.schema, table)
return models.BASE.metadata
def get_engine(self):