Plugin links handlers now returning 409 on existing url

Previous behaviour allowed to create multiple links with the same url
what was occurring during plugin tasks re-run.

Now 409 error is returned when plugin- or cluster-level link creating
or updating with the non-unique URL value.
Plugin developers should handle such exception by themselves
in plugin tasks.

DocImpact
Closes-Bug: #1538209

Change-Id: I1de1e9f3957cbe11e91cd8cd4806ac1cd714d630
This commit is contained in:
Ilya Kutukov 2016-01-28 19:49:06 +03:00
parent f38238175f
commit dcd4757e24
12 changed files with 517 additions and 45 deletions

View File

@ -16,14 +16,14 @@
from nailgun.api.v1.handlers import base
from nailgun.api.v1.handlers.base import content
from nailgun.api.v1.validators import plugin_link
from nailgun.api.v1.validators import cluster_plugin_link
from nailgun import errors
from nailgun import objects
class ClusterPluginLinkHandler(base.SingleHandler):
validator = plugin_link.PluginLinkValidator
validator = cluster_plugin_link.ClusterPluginLinkValidator
single = objects.ClusterPluginLink
def GET(self, cluster_id, obj_id):
@ -46,7 +46,6 @@ class ClusterPluginLinkHandler(base.SingleHandler):
* 404 (object not found in db)
"""
obj = self.get_object_or_404(self.single, obj_id)
data = self.checked_data(
self.validator.validate_update,
instance=obj
@ -78,7 +77,7 @@ class ClusterPluginLinkHandler(base.SingleHandler):
class ClusterPluginLinkCollectionHandler(base.CollectionHandler):
collection = objects.ClusterPluginLinkCollection
validator = plugin_link.PluginLinkValidator
validator = cluster_plugin_link.ClusterPluginLinkValidator
@content
def GET(self, cluster_id):
@ -99,7 +98,7 @@ class ClusterPluginLinkCollectionHandler(base.CollectionHandler):
:http: * 201 (object successfully created)
* 400 (invalid object data specified)
"""
data = self.checked_data()
data = self.checked_data(cluster_id=cluster_id)
try:
new_obj = self.collection.create_with_cluster_id(data, cluster_id)

View File

@ -56,8 +56,7 @@ class PluginLinkHandler(base.SingleHandler):
obj = self._get_plugin_link_object(plugin_id, obj_id)
data = self.checked_data(
self.validator.validate_update,
instance=obj
)
instance=obj)
self.single.update(obj, data)
return self.single.to_json(obj)

View File

@ -0,0 +1,57 @@
# -*- coding: utf-8 -*-
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nailgun.api.v1.validators.base import BasicValidator
from nailgun.api.v1.validators.json_schema import plugin_link
from nailgun import errors
from nailgun import objects
class ClusterPluginLinkValidator(BasicValidator):
collection_schema = plugin_link.PLUGIN_LINKS_SCHEMA
@classmethod
def validate(cls, data, **kwargs):
parsed = super(ClusterPluginLinkValidator, cls).validate(data)
cls.validate_schema(parsed, plugin_link.PLUGIN_LINK_SCHEMA)
if objects.ClusterPluginLinkCollection.filter_by(
None,
url=parsed['url'],
cluster_id=kwargs['cluster_id']
).first():
raise errors.AlreadyExists(
"Cluster plugin link with URL {0} and cluster ID={1} already "
"exists".format(parsed['url'], kwargs['cluster_id']),
log_message=True)
return parsed
@classmethod
def validate_update(cls, data, instance):
parsed = super(ClusterPluginLinkValidator, cls).validate(data)
cls.validate_schema(parsed, plugin_link.PLUGIN_LINK_UPDATE_SCHEMA)
cluster_id = parsed.get('cluster_id', instance.cluster_id)
if objects.ClusterPluginLinkCollection.filter_by_not(
objects.ClusterPluginLinkCollection.filter_by(
None,
url=parsed.get('url', instance.url),
cluster_id=cluster_id,
),
id=instance.id
).first():
raise errors.AlreadyExists(
"Cluster plugin link with URL {0} and cluster ID={1} already "
"exists".format(parsed['url'], cluster_id),
log_message=True)
return parsed

View File

@ -13,9 +13,10 @@
# License for the specific language governing permissions and limitations
# under the License.
from nailgun.api.v1.validators.base import BasicValidator
from nailgun.api.v1.validators.json_schema import plugin_link
from nailgun import errors
from nailgun import objects
class PluginLinkValidator(BasicValidator):
@ -28,17 +29,29 @@ class PluginLinkValidator(BasicValidator):
parsed,
plugin_link.PLUGIN_LINK_SCHEMA
)
if objects.PluginLinkCollection.filter_by(
None,
url=parsed['url']
).first():
raise errors.AlreadyExists(
"Plugin link with URL {0} already exists".format(
parsed['url']),
log_message=True)
return parsed
@classmethod
def validate_update(cls, data, instance):
parsed = super(PluginLinkValidator, cls).validate(data)
cls.validate_schema(
parsed,
plugin_link.PLUGIN_LINK_UPDATE_SCHEMA
)
cls.validate_schema(parsed, plugin_link.PLUGIN_LINK_UPDATE_SCHEMA)
if objects.PluginLinkCollection.filter_by_not(
objects.PluginLinkCollection.filter_by(
None,
url=parsed.get('url', instance.url)
),
id=instance.id
).first():
raise errors.AlreadyExists(
"Plugin link with URL {0} already exists".format(
parsed['url']),
log_message=True)
return parsed
@classmethod
def validate_create(cls, data):
return cls.validate(data)

View File

@ -20,14 +20,63 @@ Create Date: 2016-04-08 15:20:43.989472
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c6edea552f1e'
down_revision = '675105097a69'
def upgrade():
pass
upgrade_plugin_links_constraints()
def downgrade():
pass
downgrade_plugin_links_constraints()
def upgrade_plugin_links_constraints():
connection = op.get_bind()
# plugin links
plugin_links_remove_duplicates_query = sa.text("""
DELETE FROM plugin_links
WHERE id
NOT IN (
SELECT MIN(id)
FROM plugin_links
GROUP BY url
)
""")
connection.execute(plugin_links_remove_duplicates_query)
op.create_unique_constraint(
'plugin_links_url_uc',
'plugin_links',
['url'])
# cluster plugin links
cluster_plugin_links_remove_duplicates_query = sa.text("""
DELETE FROM cluster_plugin_links
WHERE id
NOT IN (
SELECT MIN(id)
FROM cluster_plugin_links
GROUP BY cluster_id,url
)
""")
connection.execute(cluster_plugin_links_remove_duplicates_query)
op.create_unique_constraint(
'cluster_plugin_links_cluster_id_url_uc',
'cluster_plugin_links',
['cluster_id', 'url'])
def downgrade_plugin_links_constraints():
op.drop_constraint('cluster_plugin_links_cluster_id_url_uc',
'cluster_plugin_links')
op.drop_constraint('plugin_links_url_uc', 'plugin_links')

View File

@ -14,24 +14,26 @@
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import Text
import sqlalchemy as sa
from nailgun.db.sqlalchemy.models.base import Base
class ClusterPluginLink(Base):
__tablename__ = 'cluster_plugin_links'
id = Column(Integer, primary_key=True)
cluster_id = Column(
Integer,
ForeignKey('clusters.id', ondelete='CASCADE'),
__table_args__ = (
sa.UniqueConstraint(
'cluster_id',
'url',
name='cluster_plugin_links_cluster_id_url_uc'),
)
id = sa.Column(sa.Integer, primary_key=True)
cluster_id = sa.Column(
sa.Integer,
sa.ForeignKey('clusters.id', ondelete='CASCADE'),
nullable=False
)
title = Column(Text, nullable=False)
url = Column(Text, nullable=False)
description = Column(Text)
hidden = Column(Boolean, default=False)
title = sa.Column(sa.Text, nullable=False)
url = sa.Column(sa.Text, nullable=False)
description = sa.Column(sa.Text)
hidden = sa.Column(sa.Boolean, default=False)

View File

@ -14,24 +14,20 @@
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import Text
import sqlalchemy as sa
from nailgun.db.sqlalchemy.models.base import Base
class PluginLink(Base):
__tablename__ = 'plugin_links'
id = Column(Integer, primary_key=True)
plugin_id = Column(
Integer,
ForeignKey('plugins.id', ondelete='CASCADE'),
id = sa.Column(sa.Integer, primary_key=True)
plugin_id = sa.Column(
sa.Integer,
sa.ForeignKey('plugins.id', ondelete='CASCADE'),
nullable=False
)
title = Column(Text, nullable=False)
url = Column(Text, nullable=False)
description = Column(Text)
hidden = Column(Boolean, default=False)
title = sa.Column(sa.Text, nullable=False)
url = sa.Column(sa.Text, nullable=False, unique=True)
description = sa.Column(sa.Text)
hidden = sa.Column(sa.Boolean, default=False)

View File

@ -27,6 +27,10 @@ class TestAssignmentHandlers(BaseIntegrationTest):
cluster_kwargs={"api": True},
nodes_kwargs=[{}]
)
self.second_cluster = self.env.create(
cluster_kwargs={"api": True},
nodes_kwargs=[{}]
)
self.link_data = {
'title': 'test title',
'url': 'http://test.com/url',
@ -65,6 +69,45 @@ class TestAssignmentHandlers(BaseIntegrationTest):
plugin_link.description
)
def test_cluster_plugin_link_creation_fail_duplicate(self):
self.env.create_cluster_plugin_link(
cluster_id=self.cluster['id'],
url='http://uniq1.com'
)
resp = self.app.post(
reverse(
'ClusterPluginLinkCollectionHandler',
kwargs={'cluster_id': self.cluster['id']}
),
params=jsonutils.dumps({
'title': 'My Plugin',
'url': 'http://uniq1.com'
}),
headers=self.default_headers,
expect_errors=True
)
self.assertEqual(409, resp.status_code)
def test_creation_not_fail_on_duplicate_in_different_clusters(self):
resp = self.app.post(
reverse(
'ClusterPluginLinkCollectionHandler',
kwargs={'cluster_id': self.cluster['id']}
),
params=jsonutils.dumps(self.link_data),
headers=self.default_headers
)
self.assertEqual(201, resp.status_code)
resp = self.app.post(
reverse(
'ClusterPluginLinkCollectionHandler',
kwargs={'cluster_id': self.second_cluster['id']}
),
params=jsonutils.dumps(self.link_data),
headers=self.default_headers
)
self.assertEqual(201, resp.status_code)
def test_cluster_plugin_link_fail_creation(self):
resp = self.app.post(
reverse(

View File

@ -79,6 +79,50 @@ class TestHandlers(BaseIntegrationTest):
)
self.assertEqual(404, resp.status_code)
def test_cluster_plugin_link_fail_duplicate(self):
self.env.create_cluster_plugin_link(
cluster_id=self.cluster.id,
url='http://uniq1.com'
)
existing_plugin_link2 = self.env.create_cluster_plugin_link(
cluster_id=self.cluster.id,
url='http://uniq2.com'
)
resp = self.app.put(
reverse(
'ClusterPluginLinkHandler',
kwargs={'cluster_id': self.cluster['id'],
'obj_id': existing_plugin_link2.id}
),
jsonutils.dumps({'url': 'http://uniq1.com'}),
headers=self.default_headers,
expect_errors=True
)
self.assertEqual(409, resp.status_code)
def test_plugin_link_update_with_same_url_ok(self):
existing_plugin_link = self.env.create_cluster_plugin_link(
cluster_id=self.cluster.id,
url='http://uniq1.com'
)
resp = self.app.put(
reverse(
'ClusterPluginLinkHandler',
kwargs={'cluster_id': self.cluster.id,
'obj_id': existing_plugin_link.id}
),
jsonutils.dumps({
'url': 'http://uniq1.com',
'title': 'new name'
}),
headers=self.default_headers
)
self.assertEqual(200, resp.status_code)
def test_cluster_plugin_link_delete(self):
resp = self.app.delete(
reverse(

View File

@ -62,6 +62,27 @@ class TestAssignmentHandlers(BaseIntegrationTest):
plugin_link.description
)
def test_plugin_link_creation_fail_duplicate(self):
self.env.create_plugin_link(
plugin_id=self.plugin.id,
url='http://uniq1.com'
)
resp = self.app.post(
reverse(
'PluginLinkCollectionHandler',
kwargs={
'plugin_id': self.plugin['id']
}
),
params=jsonutils.dumps({
'title': 'My Plugin',
'url': 'http://uniq1.com'
}),
headers=self.default_headers,
expect_errors=True
)
self.assertEqual(409, resp.status_code)
def test_plugin_link_fail_creation(self):
resp = self.app.post(
reverse(

View File

@ -80,6 +80,50 @@ class TestHandlers(BaseIntegrationTest):
)
self.assertEqual(404, resp.status_code)
def test_plugin_link_fail_duplicate(self):
self.env.create_plugin_link(
plugin_id=self.plugin.id,
url='http://uniq1.com'
)
existing_plugin_link2 = self.env.create_plugin_link(
plugin_id=self.plugin.id,
url='http://uniq2.com'
)
resp = self.app.put(
reverse(
'PluginLinkHandler',
kwargs={'plugin_id': self.plugin['id'],
'obj_id': existing_plugin_link2.id}
),
jsonutils.dumps({'url': 'http://uniq1.com'}),
headers=self.default_headers,
expect_errors=True
)
self.assertEqual(409, resp.status_code)
def test_plugin_link_update_with_same_url_ok(self):
existing_plugin_link = self.env.create_plugin_link(
plugin_id=self.plugin.id,
url='http://uniq1.com'
)
resp = self.app.put(
reverse(
'PluginLinkHandler',
kwargs={'plugin_id': self.plugin['id'],
'obj_id': existing_plugin_link.id}
),
jsonutils.dumps({
'url': 'http://uniq1.com',
'title': 'new name'
}),
headers=self.default_headers
)
self.assertEqual(200, resp.status_code)
def test_plugin_link_delete(self):
resp = self.app.delete(
reverse(

View File

@ -13,13 +13,46 @@
# under the License.
import alembic
from oslo_serialization import jsonutils
import sqlalchemy as sa
from nailgun.db import db
from nailgun.db import dropdb
from nailgun.db.migration import ALEMBIC_CONFIG
from nailgun.test import base
_prepare_revision = '675105097a69'
_test_revision = 'c6edea552f1e'
JSON_TASKS = [
{
'id': 'post_deployment_end',
'type': 'stage',
'requires': ['post_deployment_start']
},
{
'id': 'primary-controller',
'parameters': {'strategy': {'type': 'one_by_one'}},
'required_for': ['deploy_end'],
'requires': ['deploy_start'],
'role': ['primary-controller'], # legacy notation should be converted
# to `roles`
'type': 'group'
},
{
'id': 'cross-dep-test',
'type': 'puppet',
'cross-depended-by': ['a', 'b'],
'cross-depends': ['c', 'd']
},
{
'id': 'custom-test',
'type': 'puppet',
'test_pre': {'k': 'v'},
'test_post': {'k': 'v'}
}
]
def setup_module():
dropdb()
@ -29,4 +62,176 @@ def setup_module():
def prepare():
pass
meta = base.reflect_db_metadata()
result = db.execute(
meta.tables['releases'].insert(),
[{
'name': 'test_name',
'version': '2015.1-8.0',
'operating_system': 'ubuntu',
'state': 'available',
}])
release_id = result.inserted_primary_key[0]
cluster_ids = []
for cluster_name in ['test_env1', 'test_env2']:
result = db.execute(
meta.tables['clusters'].insert(),
[{
'name': cluster_name,
'release_id': release_id,
'mode': 'ha_compact',
'status': 'new',
'net_provider': 'neutron',
'grouping': 'roles',
'fuel_version': '10.0'
}])
cluster_ids.append(result.inserted_primary_key[0])
result = db.execute(
meta.tables['plugins'].insert(),
[{
'name': 'test_plugin_a',
'title': 'Test plugin A',
'version': '2.0.0',
'description': 'Test plugin A for Fuel',
'homepage': 'http://fuel_plugins.test_plugin.com',
'package_version': '4.0.0',
'groups': jsonutils.dumps(['tgroup']),
'authors': jsonutils.dumps(['tauthor']),
'licenses': jsonutils.dumps(['tlicense']),
'releases': jsonutils.dumps([
{'repository_path': 'repositories/ubuntu'}
]),
'deployment_tasks': jsonutils.dumps(JSON_TASKS),
'fuel_version': jsonutils.dumps(['8.0']),
'network_roles_metadata': jsonutils.dumps([{
'id': 'admin/vip',
'default_mapping': 'fuelweb_admin',
'properties': {
'subnet': True,
'gateway': False,
'vip': [
{
'name': 'my-vip1',
'namespace': 'my-namespace1',
},
{
'name': 'my-vip2',
'namespace': 'my-namespace2',
}
]
}
}])
}]
)
plugin_a_id = result.inserted_primary_key[0]
result = db.execute(
meta.tables['plugins'].insert(),
[{
'name': 'test_plugin_b',
'title': 'Test plugin B',
'version': '2.0.0',
'description': 'Test plugin B for Fuel',
'homepage': 'http://fuel_plugins.test_plugin.com',
'package_version': '4.0.0',
'groups': jsonutils.dumps(['tgroup']),
'authors': jsonutils.dumps(['tauthor']),
'licenses': jsonutils.dumps(['tlicense']),
'releases': jsonutils.dumps([
{'repository_path': 'repositories/ubuntu'}
]),
'fuel_version': jsonutils.dumps(['8.0']),
'network_roles_metadata': jsonutils.dumps([{
'id': 'admin/vip',
'default_mapping': 'fuelweb_admin',
'properties': {
'subnet': True,
'gateway': False,
'vip': [
{
'name': 'my-vip3',
'namespace': 'my-namespace3',
},
{
'name': 'my-vip4',
'namespace': 'my-namespace4',
}
]
}
}])
}]
)
plugin_b_id = result.inserted_primary_key[0]
db.execute(
meta.tables['cluster_plugin_links'].insert(),
[
{
'cluster_id': cluster_ids[0],
'title': 'title',
'url': 'http://www.zzz.com',
'description': 'description',
'hidden': False
},
# this is duplicate, should be deleted during migration
{
'cluster_id': cluster_ids[1],
'title': 'title',
'url': 'http://www.zzz.com',
'description': 'description_duplicate',
'hidden': False
},
# duplicate by URL but in another cluster, should
# not be deleted
{
'cluster_id': cluster_ids[0],
'title': 'title',
'url': 'http://www.zzz.com',
'description': 'description',
'hidden': False
}
]
)
db.execute(
meta.tables['plugin_links'].insert(),
[
{
'plugin_id': plugin_a_id,
'title': 'title',
'url': 'http://www.zzz.com',
'description': 'description',
'hidden': False
},
# this is duplicate, should be deleted during migration
{
'plugin_id': plugin_b_id,
'title': 'title',
'url': 'http://www.zzz.com',
'description': 'description_duplicate',
'hidden': False
}
]
)
db.commit()
class TestPluginLinksConstraints(base.BaseAlembicMigrationTest):
# see initial data in setup section
def test_plugin_links_duplicate_cleanup(self):
links_count = db.execute(
sa.select(
[sa.func.count(self.meta.tables['plugin_links'].c.id)]
)).fetchone()[0]
self.assertEqual(links_count, 1)
def test_cluster_plugin_links_duplicate_cleanup(self):
links_count = db.execute(
sa.select(
[sa.func.count(self.meta.tables['cluster_plugin_links'].c.id)]
)).fetchone()[0]
self.assertEqual(links_count, 2)