diff --git a/.zuul.yaml b/.zuul.yaml index 07c6316d34..acbf02ec95 100644 --- a/.zuul.yaml +++ b/.zuul.yaml @@ -93,6 +93,7 @@ - openstack-infra/devstack-gate - openstack/manila - openstack/manila-image-elements + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -106,6 +107,7 @@ - openstack-infra/devstack-gate - openstack/manila - openstack/manila-image-elements + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -119,6 +121,7 @@ - openstack-infra/devstack-gate - openstack/manila - openstack/manila-image-elements + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -132,6 +135,7 @@ - openstack-infra/devstack-gate - openstack/devstack-plugin-glusterfs - openstack/manila + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -145,6 +149,7 @@ - openstack-infra/devstack-gate - openstack/devstack-plugin-glusterfs - openstack/manila + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -158,6 +163,7 @@ - openstack-infra/devstack-gate - openstack/devstack-plugin-glusterfs - openstack/manila + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -171,6 +177,7 @@ - openstack-infra/devstack-gate - openstack/devstack-plugin-glusterfs - openstack/manila + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -184,6 +191,7 @@ - openstack-infra/devstack-gate - openstack/devstack-plugin-hdfs - openstack/manila + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -197,6 +205,7 @@ - openstack-infra/devstack-gate - openstack/manila - openstack/manila-image-elements + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -210,6 +219,7 @@ - openstack-infra/devstack-gate - openstack/manila - openstack/manila-image-elements + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -223,6 +233,7 @@ - openstack-infra/devstack-gate - openstack/manila - openstack/manila-image-elements + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -236,6 +247,7 @@ - openstack-infra/devstack-gate - openstack/manila - openstack/manila-image-elements + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -249,6 +261,7 @@ - openstack-infra/devstack-gate - openstack/manila - openstack/manila-image-elements + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -263,6 +276,7 @@ - openstack-infra/devstack-gate - openstack/devstack-plugin-ceph - openstack/manila + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -277,6 +291,7 @@ - openstack-infra/devstack-gate - openstack/devstack-plugin-ceph - openstack/manila + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -289,6 +304,7 @@ required-projects: - openstack-infra/devstack-gate - openstack/manila + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -302,6 +318,7 @@ required-projects: - openstack-infra/devstack-gate - openstack/manila + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -316,6 +333,7 @@ - openstack-infra/devstack-gate - openstack/devstack-plugin-ceph - openstack/manila + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest @@ -330,6 +348,7 @@ - openstack-infra/devstack-gate - openstack/devstack-plugin-ceph - openstack/manila + - openstack/manila-tempest-plugin - openstack/python-manilaclient - openstack/tempest diff --git a/contrib/ci/post_test_hook.sh b/contrib/ci/post_test_hook.sh index 02a2e06612..b7188c8fbf 100755 --- a/contrib/ci/post_test_hook.sh +++ b/contrib/ci/post_test_hook.sh @@ -329,7 +329,7 @@ for port in ${UDP_PORTS[*]}; do done echo "Running tempest manila test suites" -sudo -H -u $USER tox -eall-plugin $MANILA_TESTS -- --concurrency=$MANILA_TEMPEST_CONCURRENCY +sudo -H -u $USER tox -eall -- $MANILA_TESTS --concurrency=$MANILA_TEMPEST_CONCURRENCY RETVAL=$? @@ -351,7 +351,7 @@ if [[ "$DRIVER" == "dummy" ]]; then manila type-key default set driver_handles_share_servers=False echo "Running tempest manila test suites for DHSS=False mode" - sudo -H -u $USER tox -eall-plugin $MANILA_TESTS -- --concurrency=$MANILA_TEMPEST_CONCURRENCY + sudo -H -u $USER tox -eall -- $MANILA_TESTS --concurrency=$MANILA_TEMPEST_CONCURRENCY RETVAL2=$? save_tempest_results 2 diff --git a/devstack/settings b/devstack/settings index 8e06673bb6..c9fd8cb4b2 100644 --- a/devstack/settings +++ b/devstack/settings @@ -192,6 +192,13 @@ MANILA_DATA_NODE_IP=${MANILA_DATA_NODE_IP:=$MANILA_ADMIN_NET_RANGE} # Data Service copy validation MANILA_DATA_COPY_CHECK_HASH=${MANILA_DATA_COPY_CHECK_HASH:=True} +MANILA_TEMPEST_PLUGIN_PATH=$DEST/manila-tempest-plugin +if [[ $TEMPEST_PLUGINS != 0 ]] ; then + TEMPEST_PLUGINS="$MANILA_TEMPEST_PLUGIN_PATH $TEMPEST_PLUGINS" +else + TEMPEST_PLUGINS=$MANILA_TEMPEST_PLUGIN_PATH +fi + # Enable manila services # ---------------------- # We have to add Manila to enabled services for screen_it to work diff --git a/manila_tempest_tests/README.rst b/manila_tempest_tests/README.rst deleted file mode 100644 index 7569d9610e..0000000000 --- a/manila_tempest_tests/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -==================== -Tempest Integration -==================== - -This directory contains Tempest tests to cover Manila project. - diff --git a/manila_tempest_tests/__init__.py b/manila_tempest_tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/manila_tempest_tests/clients.py b/manila_tempest_tests/clients.py deleted file mode 100644 index c49b7e809e..0000000000 --- a/manila_tempest_tests/clients.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2016 Hewlett-Packard Enterprise Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. - -from tempest import config -from tempest.lib.services import clients - - -CONF = config.CONF - - -class Clients(clients.ServiceClients): - """Tempest stable service clients and loaded plugins service clients""" - - def __init__(self, credentials, service=None): - """Emulate the interface of Tempest's clients.Manager""" - # Identity settings - if CONF.identity.auth_version == 'v2': - identity_uri = CONF.identity.uri - else: - identity_uri = CONF.identity.uri_v3 - super(Clients, self).__init__(credentials, identity_uri) diff --git a/manila_tempest_tests/common/__init__.py b/manila_tempest_tests/common/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/manila_tempest_tests/common/constants.py b/manila_tempest_tests/common/constants.py deleted file mode 100644 index 25cf4ee7b2..0000000000 --- a/manila_tempest_tests/common/constants.py +++ /dev/null @@ -1,85 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# Shares -STATUS_ERROR = 'error' -STATUS_AVAILABLE = 'available' -STATUS_ERROR_DELETING = 'error_deleting' -STATUS_MIGRATING = 'migrating' - -TEMPEST_MANILA_PREFIX = 'tempest-manila' - -# Replication -REPLICATION_STYLE_READABLE = 'readable' -REPLICATION_STYLE_WRITABLE = 'writable' -REPLICATION_STYLE_DR = 'dr' -REPLICATION_TYPE_CHOICES = ( - REPLICATION_STYLE_READABLE, - REPLICATION_STYLE_WRITABLE, - REPLICATION_STYLE_DR, -) -REPLICATION_PROMOTION_CHOICES = ( - REPLICATION_STYLE_READABLE, - REPLICATION_STYLE_DR, -) -REPLICATION_STATE_ACTIVE = 'active' -REPLICATION_STATE_IN_SYNC = 'in_sync' -REPLICATION_STATE_OUT_OF_SYNC = 'out_of_sync' - -# Access Rules -RULE_STATE_ACTIVE = 'active' -RULE_STATE_OUT_OF_SYNC = 'out_of_sync' -RULE_STATE_ERROR = 'error' - -TASK_STATE_MIGRATION_STARTING = 'migration_starting' -TASK_STATE_MIGRATION_IN_PROGRESS = 'migration_in_progress' -TASK_STATE_MIGRATION_COMPLETING = 'migration_completing' -TASK_STATE_MIGRATION_SUCCESS = 'migration_success' -TASK_STATE_MIGRATION_ERROR = 'migration_error' -TASK_STATE_MIGRATION_CANCELLED = 'migration_cancelled' -TASK_STATE_MIGRATION_DRIVER_STARTING = 'migration_driver_starting' -TASK_STATE_MIGRATION_DRIVER_IN_PROGRESS = 'migration_driver_in_progress' -TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE = 'migration_driver_phase1_done' -TASK_STATE_DATA_COPYING_STARTING = 'data_copying_starting' -TASK_STATE_DATA_COPYING_IN_PROGRESS = 'data_copying_in_progress' -TASK_STATE_DATA_COPYING_COMPLETING = 'data_copying_completing' -TASK_STATE_DATA_COPYING_COMPLETED = 'data_copying_completed' -TASK_STATE_DATA_COPYING_CANCELLED = 'data_copying_cancelled' -TASK_STATE_DATA_COPYING_ERROR = 'data_copying_error' - -# Revert to snapshot -REVERT_TO_SNAPSHOT_MICROVERSION = '2.27' -REVERT_TO_SNAPSHOT_SUPPORT = 'revert_to_snapshot_support' -STATUS_RESTORING = 'restoring' -STATUS_REVERTING = 'reverting' -STATUS_REVERTING_ERROR = 'reverting_error' - -# Share groups -MIN_SHARE_GROUP_MICROVERSION = '2.31' -SHARE_GROUP_SIMPLE_KEYS = { - 'id', 'name', 'links', -} -SHARE_GROUP_DETAIL_REQUIRED_KEYS = { - 'id', 'name', 'description', 'created_at', 'status', 'share_types', - 'project_id', 'host', 'links', 'share_group_type_id', -} -SHARE_GROUP_SNAPSHOT_SIMPLE_KEYS = { - 'id', 'name', 'links', -} -SHARE_GROUP_SNAPSHOT_DETAIL_REQUIRED_KEYS = { - 'id', 'name', 'description', 'created_at', 'status', 'project_id', - 'links', 'share_group_id', 'members', -} - -SHARE_GROUP_TYPE_REQUIRED_KEYS = { - 'id', 'name', 'share_types', 'is_public', 'group_specs', -} diff --git a/manila_tempest_tests/common/remote_client.py b/manila_tempest_tests/common/remote_client.py deleted file mode 100644 index 45f85d85f7..0000000000 --- a/manila_tempest_tests/common/remote_client.py +++ /dev/null @@ -1,92 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import six -import sys - -from oslo_log import log - -from tempest import config -from tempest.lib.common import ssh -from tempest.lib.common.utils import test_utils -import tempest.lib.exceptions - -CONF = config.CONF - -LOG = log.getLogger(__name__) - - -def debug_ssh(function): - """Decorator to generate extra debug info in case of ssh failure""" - def wrapper(self, *args, **kwargs): - try: - return function(self, *args, **kwargs) - except tempest.lib.exceptions.SSHTimeout: - try: - original_exception = sys.exc_info() - caller = test_utils.find_test_caller() or "not found" - if self.server: - msg = 'Caller: %s. Timeout trying to ssh to server %s' - LOG.debug(msg, caller, self.server) - if self.log_console and self.servers_client: - try: - msg = 'Console log for server %s: %s' - console_log = ( - self.servers_client.get_console_output( - self.server['id'])['output']) - LOG.debug(msg, self.server['id'], console_log) - except Exception: - msg = 'Could not get console_log for server %s' - LOG.debug(msg, self.server['id']) - # re-raise the original ssh timeout exception - six.reraise(*original_exception) - finally: - # Delete the traceback to avoid circular references - _, _, trace = original_exception - del trace - return wrapper - - -class RemoteClient(object): - - def __init__(self, ip_address, username, password=None, pkey=None, - server=None, servers_client=None): - """Executes commands in a VM over ssh - - :param ip_address: IP address to ssh to - :param username: ssh username - :param password: ssh password (optional) - :param pkey: ssh public key (optional) - :param server: server dict, used for debugging purposes - :param servers_client: servers client, used for debugging purposes - """ - self.server = server - self.servers_client = servers_client - self.log_console = CONF.compute_feature_enabled.console_output - - self.ssh_client = ssh.Client(ip_address, username, password, pkey=pkey) - - @debug_ssh - def exec_command(self, cmd): - # Shell options below add more clearness on failures, - # path is extended for some non-cirros guest oses (centos7) - cmd = CONF.validation.ssh_shell_prologue + " " + cmd - LOG.debug("Remote command: %s", cmd) - return self.ssh_client.exec_command(cmd) - - @debug_ssh - def validate_authentication(self): - """Validate ssh connection and authentication - - This method raises an Exception when the validation fails. - """ - self.ssh_client.test_connection_auth() diff --git a/manila_tempest_tests/config.py b/manila_tempest_tests/config.py deleted file mode 100644 index e33dba8b34..0000000000 --- a/manila_tempest_tests/config.py +++ /dev/null @@ -1,247 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import print_function - -from oslo_config import cfg - -service_option = cfg.BoolOpt("manila", - default=True, - help="Whether or not manila is expected to be " - "available") - -share_group = cfg.OptGroup(name="share", title="Share Service Options") - -ShareGroup = [ - cfg.StrOpt("min_api_microversion", - default="2.0", - help="The minimum api microversion is configured to be the " - "value of the minimum microversion supported by Manila."), - cfg.StrOpt("max_api_microversion", - default="2.42", - help="The maximum api microversion is configured to be the " - "value of the latest microversion supported by Manila."), - cfg.StrOpt("region", - default="", - help="The share region name to use. If empty, the value " - "of identity.region is used instead. If no such region " - "is found in the service catalog, the first found one is " - "used."), - cfg.StrOpt("catalog_type", - default="share", - help="Catalog type of the Share service."), - cfg.StrOpt('endpoint_type', - default='publicURL', - choices=['public', 'admin', 'internal', - 'publicURL', 'adminURL', 'internalURL'], - help="The endpoint type to use for the share service."), - cfg.BoolOpt("multitenancy_enabled", - default=True, - help="This option used to determine backend driver type, " - "multitenant driver uses share-networks, but " - "single-tenant doesn't."), - cfg.BoolOpt("create_networks_when_multitenancy_enabled", - default=True, - help="This option is used only when other " - "'multitenancy_enabled' option is set to 'True'. " - "If this one is set to True, then tempest will create " - "neutron networks for each new manila share-network " - "it creates. Else it will use manila share-networks with " - "empty values (case of StandAloneNetworkPlugin and " - "NeutronSingleNetworkPlugin)."), - cfg.ListOpt("enable_protocols", - default=["nfs", "cifs"], - help="First value of list is protocol by default, " - "items of list show enabled protocols at all."), - cfg.ListOpt("enable_ip_rules_for_protocols", - default=["nfs", "cifs", ], - help="Selection of protocols, that should " - "be covered with ip rule tests"), - cfg.ListOpt("enable_user_rules_for_protocols", - default=[], - help="Selection of protocols, that should " - "be covered with user rule tests"), - cfg.ListOpt("enable_cert_rules_for_protocols", - default=["glusterfs", ], - help="Protocols that should be covered with cert rule tests."), - cfg.ListOpt("enable_cephx_rules_for_protocols", - default=["cephfs", ], - help="Protocols to be covered with cephx rule tests."), - cfg.StrOpt("username_for_user_rules", - default="Administrator", - help="Username, that will be used in user tests."), - cfg.ListOpt("enable_ro_access_level_for_protocols", - default=["nfs", ], - help="List of protocols to run tests with ro access level."), - - # Capabilities - cfg.StrOpt("capability_storage_protocol", - deprecated_name="storage_protocol", - default="NFS_CIFS", - help="Backend protocol to target when creating volume types."), - cfg.BoolOpt("capability_snapshot_support", - help="Defines extra spec that satisfies specific back end " - "capability called 'snapshot_support' and will be used " - "for setting up custom share type. Defaults to value of " - "other config option 'run_snapshot_tests'."), - cfg.BoolOpt("capability_create_share_from_snapshot_support", - help="Defines extra spec that satisfies specific back end " - "capability called 'create_share_from_snapshot_support' " - "and will be used for setting up a custom share type. " - "Defaults to the value of run_snapshot_tests. Set it to " - "False if the driver being tested does not support " - "creating shares from snapshots."), - cfg.BoolOpt("capability_revert_to_snapshot_support", - deprecated_for_removal=True, - deprecated_reason="Redundant configuration option. Please use " - "'run_revert_to_snapshot_tests' config " - "option instead.", - help="Defines extra spec that satisfies specific back end " - "capability called 'revert_to_snapshot_support' " - "and will be used for setting up custom share type. " - "Defaults to the value of run_revert_to_snapshot_tests."), - cfg.StrOpt("capability_sg_consistent_snapshot_support", - choices=["host", "pool", None], - help="Backend capability to create consistent snapshots of " - "share group members. Will be used with creation " - "of new share group types as group spec."), - cfg.StrOpt("share_network_id", - default="", - help="Some backend drivers requires share network " - "for share creation. Share network id, that will be " - "used for shares. If not set, it won't be used."), - cfg.StrOpt("alt_share_network_id", - default="", - help="Share network id, that will be used for shares" - " in alt tenant. If not set, it won't be used"), - cfg.StrOpt("admin_share_network_id", - default="", - help="Share network id, that will be used for shares" - " in admin tenant. If not set, it won't be used"), - cfg.BoolOpt("multi_backend", - default=False, - help="Runs Manila multi-backend tests."), - cfg.ListOpt("backend_names", - default=[], - help="Names of share backends, that will be used with " - "multibackend tests. Tempest will use first two values."), - cfg.IntOpt("share_creation_retry_number", - default=0, - help="Defines number of retries for share creation. " - "It is useful to avoid failures caused by unstable " - "environment."), - cfg.IntOpt("build_interval", - default=3, - help="Time in seconds between share availability checks."), - cfg.IntOpt("build_timeout", - default=500, - help="Timeout in seconds to wait for a share to become" - "available."), - cfg.BoolOpt("suppress_errors_in_cleanup", - default=False, - help="Whether to suppress errors with clean up operation " - "or not. There are cases when we may want to skip " - "such errors and catch only test errors."), - - # Switching ON/OFF test suites filtered by features - cfg.BoolOpt("run_quota_tests", - default=True, - help="Defines whether to run quota tests or not."), - cfg.BoolOpt("run_extend_tests", - default=True, - help="Defines whether to run share extend tests or not. " - "Disable this feature if used driver doesn't " - "support it."), - cfg.BoolOpt("run_shrink_tests", - default=True, - help="Defines whether to run share shrink tests or not. " - "Disable this feature if used driver doesn't " - "support it."), - cfg.BoolOpt("run_snapshot_tests", - default=True, - help="Defines whether to run tests that use share snapshots " - "or not. Disable this feature if used driver doesn't " - "support it."), - cfg.BoolOpt("run_revert_to_snapshot_tests", - default=False, - help="Defines whether to run tests that revert shares " - "to snapshots or not. Enable this feature if used " - "driver supports it."), - cfg.BoolOpt("run_share_group_tests", - default=True, - deprecated_name="run_consistency_group_tests", - help="Defines whether to run share group tests or not."), - cfg.BoolOpt("run_replication_tests", - default=False, - help="Defines whether to run replication tests or not. " - "Enable this feature if the driver is configured " - "for replication."), - cfg.BoolOpt("run_multiple_share_replicas_tests", - default=True, - help="Defines whether to run multiple replicas creation test " - "or not. Enable this if the driver can create more than " - "one replica for a share."), - cfg.BoolOpt("run_host_assisted_migration_tests", - deprecated_name="run_migration_tests", - default=False, - help="Enable or disable host-assisted migration tests."), - cfg.BoolOpt("run_driver_assisted_migration_tests", - deprecated_name="run_migration_tests", - default=False, - help="Enable or disable driver-assisted migration tests."), - cfg.BoolOpt("run_migration_with_preserve_snapshots_tests", - default=False, - help="Enable or disable migration with " - "preserve_snapshots tests set to True."), - cfg.BoolOpt("run_manage_unmanage_tests", - default=False, - help="Defines whether to run manage/unmanage tests or not. " - "These test may leave orphaned resources, so be careful " - "enabling this opt."), - cfg.BoolOpt("run_manage_unmanage_snapshot_tests", - default=False, - help="Defines whether to run manage/unmanage snapshot tests " - "or not. These tests may leave orphaned resources, so be " - "careful enabling this opt."), - cfg.BoolOpt("run_mount_snapshot_tests", - default=False, - help="Enable or disable mountable snapshot tests."), - - cfg.StrOpt("image_with_share_tools", - default="manila-service-image-master", - help="Image name for vm booting with nfs/smb clients tool."), - cfg.StrOpt("image_username", - default="manila", - help="Image username."), - cfg.StrOpt("image_password", - help="Image password. Should be used for " - "'image_with_share_tools' without Nova Metadata support."), - cfg.StrOpt("client_vm_flavor_ref", - default="100", - help="Flavor used for client vm in scenario tests."), - cfg.IntOpt("migration_timeout", - default=1500, - help="Time to wait for share migration before " - "timing out (seconds)."), - cfg.StrOpt("default_share_type_name", - help="Default share type name to use in tempest tests."), - cfg.StrOpt("backend_replication_type", - default='none', - choices=['none', 'writable', 'readable', 'dr'], - help="Specify the replication type supported by the backend."), - cfg.IntOpt("share_size", - default=1, - help="Default size in GB for shares created by share tests."), -] diff --git a/manila_tempest_tests/plugin.py b/manila_tempest_tests/plugin.py deleted file mode 100644 index 0c32ce1183..0000000000 --- a/manila_tempest_tests/plugin.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright 2015 Deutsche Telekom AG -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import os - -from tempest import config -from tempest.test_discover import plugins - -from manila_tempest_tests import config as config_share - - -class ManilaTempestPlugin(plugins.TempestPlugin): - def load_tests(self): - base_path = os.path.split(os.path.dirname( - os.path.abspath(__file__)))[0] - test_dir = "manila_tempest_tests/tests" - full_test_dir = os.path.join(base_path, test_dir) - return full_test_dir, base_path - - def register_opts(self, conf): - conf.register_opt(config_share.service_option, - group='service_available') - conf.register_group(config_share.share_group) - conf.register_opts(config_share.ShareGroup, group='share') - - # NOTE(vponomaryov): Set options 'capability_snapshot_support' and - # 'capability_create_share_from_snapshot_support' to opt - # 'run_snapshot_tests' if not configured. - if conf.share.capability_snapshot_support is None: - conf.set_default( - "capability_snapshot_support", - conf.share.run_snapshot_tests, - group="share", - ) - if conf.share.capability_create_share_from_snapshot_support is None: - conf.set_default( - "capability_create_share_from_snapshot_support", - conf.share.run_snapshot_tests, - group="share", - ) - - def get_opt_lists(self): - return [(config_share.share_group.name, config_share.ShareGroup), - ('service_available', [config_share.service_option])] - - def get_service_clients(self): - shares_config = config.service_client_config('share') - v1_params = { - 'name': 'share_v1', - 'service_version': 'share.v1', - 'module_path': 'manila_tempest_tests.services.share.json', - 'client_names': ['SharesClient'], - } - v2_params = { - 'name': 'share_v2', - 'service_version': 'share.v2', - 'module_path': 'manila_tempest_tests.services.share.v2', - 'client_names': ['SharesV2Client'], - } - v1_params.update(shares_config) - v2_params.update(shares_config) - return [v1_params, v2_params] diff --git a/manila_tempest_tests/services/__init__.py b/manila_tempest_tests/services/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/manila_tempest_tests/services/share/__init__.py b/manila_tempest_tests/services/share/__init__.py deleted file mode 100644 index 8e9e9df898..0000000000 --- a/manila_tempest_tests/services/share/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2016 Hewlett-Packard Enterprise Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. - -from manila_tempest_tests.services.share import json as v1 -from manila_tempest_tests.services.share.v2 import json as v2 - -__all__ = ['v1', 'v2'] diff --git a/manila_tempest_tests/services/share/json/__init__.py b/manila_tempest_tests/services/share/json/__init__.py deleted file mode 100644 index c92c80bcdd..0000000000 --- a/manila_tempest_tests/services/share/json/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2016 Hewlett-Packard Enterprise Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. - -from manila_tempest_tests.services.share.json.shares_client import SharesClient - -__all__ = ['SharesClient'] diff --git a/manila_tempest_tests/services/share/json/shares_client.py b/manila_tempest_tests/services/share/json/shares_client.py deleted file mode 100644 index 69aeba8b59..0000000000 --- a/manila_tempest_tests/services/share/json/shares_client.py +++ /dev/null @@ -1,760 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import json -import time - -import six -from six.moves.urllib import parse as urlparse - -from tempest import config -from tempest.lib.common import rest_client -from tempest.lib.common.utils import data_utils -from tempest.lib import exceptions - -from manila_tempest_tests import share_exceptions - -CONF = config.CONF - - -class SharesClient(rest_client.RestClient): - """Tempest REST client for Manila. - - It handles shares and access to it in OpenStack. - """ - - def __init__(self, auth_provider, **kwargs): - super(SharesClient, self).__init__(auth_provider, **kwargs) - self.share_protocol = None - if CONF.share.enable_protocols: - self.share_protocol = CONF.share.enable_protocols[0] - self.share_network_id = CONF.share.share_network_id - self.share_size = CONF.share.share_size - - def create_share(self, share_protocol=None, size=None, - name=None, snapshot_id=None, description=None, - metadata=None, share_network_id=None, - share_type_id=None, is_public=False): - metadata = metadata or {} - if name is None: - name = data_utils.rand_name("tempest-created-share") - if description is None: - description = data_utils.rand_name("tempest-created-share-desc") - if size is None: - size = self.share_size - if share_protocol is None: - share_protocol = self.share_protocol - if share_protocol is None: - raise share_exceptions.ShareProtocolNotSpecified() - post_body = { - "share": { - "share_proto": share_protocol, - "description": description, - "snapshot_id": snapshot_id, - "name": name, - "size": size, - "metadata": metadata, - "is_public": is_public, - } - } - if share_network_id: - post_body["share"]["share_network_id"] = share_network_id - if share_type_id: - post_body["share"]["share_type"] = share_type_id - body = json.dumps(post_body) - resp, body = self.post("shares", body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_share(self, share_id): - resp, body = self.delete("shares/%s" % share_id) - self.expected_success(202, resp.status) - return body - - def manage_share(self, service_host, protocol, export_path, - share_type_id, name=None, description=None): - post_body = { - "share": { - "export_path": export_path, - "service_host": service_host, - "protocol": protocol, - "share_type": share_type_id, - "name": name, - "description": description, - } - } - body = json.dumps(post_body) - resp, body = self.post("os-share-manage", body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def unmanage_share(self, share_id): - resp, body = self.post( - "os-share-unmanage/%s/unmanage" % share_id, None) - self.expected_success(202, resp.status) - return body - - def list_shares(self, detailed=False, params=None): - """Get list of shares w/o filters.""" - uri = 'shares/detail' if detailed else 'shares' - uri += '?%s' % urlparse.urlencode(params) if params else '' - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_shares_with_detail(self, params=None): - """Get detailed list of shares w/o filters.""" - return self.list_shares(detailed=True, params=params) - - def get_share(self, share_id): - resp, body = self.get("shares/%s" % share_id) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def create_access_rule(self, share_id, access_type="ip", - access_to="0.0.0.0", access_level=None): - post_body = { - "os-allow_access": { - "access_type": access_type, - "access_to": access_to, - "access_level": access_level, - } - } - body = json.dumps(post_body) - resp, body = self.post("shares/%s/action" % share_id, body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_access_rules(self, share_id): - body = {"os-access_list": None} - resp, body = self.post("shares/%s/action" % share_id, json.dumps(body)) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_access_rule(self, share_id, rule_id): - post_body = { - "os-deny_access": { - "access_id": rule_id, - } - } - body = json.dumps(post_body) - resp, body = self.post("shares/%s/action" % share_id, body) - self.expected_success(202, resp.status) - return body - - def extend_share(self, share_id, new_size): - post_body = { - "os-extend": { - "new_size": new_size, - } - } - body = json.dumps(post_body) - resp, body = self.post("shares/%s/action" % share_id, body) - self.expected_success(202, resp.status) - return body - - def shrink_share(self, share_id, new_size): - post_body = { - "os-shrink": { - "new_size": new_size, - } - } - body = json.dumps(post_body) - resp, body = self.post("shares/%s/action" % share_id, body) - self.expected_success(202, resp.status) - return body - - def create_snapshot(self, share_id, name=None, description=None, - force=False): - if name is None: - name = data_utils.rand_name("tempest-created-share-snap") - if description is None: - description = data_utils.rand_name( - "tempest-created-share-snap-desc") - post_body = { - "snapshot": { - "name": name, - "force": force, - "description": description, - "share_id": share_id, - } - } - body = json.dumps(post_body) - resp, body = self.post("snapshots", body) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def get_snapshot(self, snapshot_id): - resp, body = self.get("snapshots/%s" % snapshot_id) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_snapshots(self, detailed=False, params=None): - """Get list of share snapshots w/o filters.""" - uri = 'snapshots/detail' if detailed else 'snapshots' - uri += '?%s' % urlparse.urlencode(params) if params else '' - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_snapshots_with_detail(self, params=None): - """Get detailed list of share snapshots w/o filters.""" - return self.list_snapshots(detailed=True, params=params) - - def delete_snapshot(self, snap_id): - resp, body = self.delete("snapshots/%s" % snap_id) - self.expected_success(202, resp.status) - return body - - def wait_for_share_status(self, share_id, status): - """Waits for a share to reach a given status.""" - body = self.get_share(share_id) - share_name = body['name'] - share_status = body['status'] - start = int(time.time()) - - while share_status != status: - time.sleep(self.build_interval) - body = self.get_share(share_id) - share_status = body['status'] - if share_status == status: - return - elif 'error' in share_status.lower(): - raise share_exceptions.ShareBuildErrorException( - share_id=share_id) - - if int(time.time()) - start >= self.build_timeout: - message = ('Share %s failed to reach %s status within ' - 'the required time (%s s).' % - (share_name, status, self.build_timeout)) - raise exceptions.TimeoutException(message) - - def wait_for_snapshot_status(self, snapshot_id, status): - """Waits for a snapshot to reach a given status.""" - body = self.get_snapshot(snapshot_id) - snapshot_name = body['name'] - snapshot_status = body['status'] - start = int(time.time()) - - while snapshot_status != status: - time.sleep(self.build_interval) - body = self.get_snapshot(snapshot_id) - snapshot_status = body['status'] - if 'error' in snapshot_status: - raise share_exceptions.SnapshotBuildErrorException( - snapshot_id=snapshot_id) - - if int(time.time()) - start >= self.build_timeout: - message = ('Share Snapshot %s failed to reach %s status ' - 'within the required time (%s s).' % - (snapshot_name, status, self.build_timeout)) - raise exceptions.TimeoutException(message) - - def wait_for_access_rule_status(self, share_id, rule_id, status): - """Waits for an access rule to reach a given status.""" - rule_status = "new" - start = int(time.time()) - while rule_status != status: - time.sleep(self.build_interval) - rules = self.list_access_rules(share_id) - for rule in rules: - if rule["id"] in rule_id: - rule_status = rule['state'] - break - if 'error' in rule_status: - raise share_exceptions.AccessRuleBuildErrorException( - rule_id=rule_id) - - if int(time.time()) - start >= self.build_timeout: - message = ('Share Access Rule %s failed to reach %s status ' - 'within the required time (%s s).' % - (rule_id, status, self.build_timeout)) - raise exceptions.TimeoutException(message) - - def default_quotas(self, tenant_id): - resp, body = self.get("os-quota-sets/%s/defaults" % tenant_id) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def show_quotas(self, tenant_id, user_id=None): - uri = "os-quota-sets/%s" % tenant_id - if user_id is not None: - uri += "?user_id=%s" % user_id - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def reset_quotas(self, tenant_id, user_id=None): - uri = "os-quota-sets/%s" % tenant_id - if user_id is not None: - uri += "?user_id=%s" % user_id - resp, body = self.delete(uri) - self.expected_success(202, resp.status) - return body - - def update_quotas(self, tenant_id, user_id=None, shares=None, - snapshots=None, gigabytes=None, snapshot_gigabytes=None, - share_networks=None, force=True): - uri = "os-quota-sets/%s" % tenant_id - if user_id is not None: - uri += "?user_id=%s" % user_id - - put_body = {"tenant_id": tenant_id} - if force: - put_body["force"] = "true" - if shares is not None: - put_body["shares"] = shares - if snapshots is not None: - put_body["snapshots"] = snapshots - if gigabytes is not None: - put_body["gigabytes"] = gigabytes - if snapshot_gigabytes is not None: - put_body["snapshot_gigabytes"] = snapshot_gigabytes - if share_networks is not None: - put_body["share_networks"] = share_networks - put_body = json.dumps({"quota_set": put_body}) - - resp, body = self.put(uri, put_body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_limits(self): - resp, body = self.get("limits") - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def is_resource_deleted(self, *args, **kwargs): - """Verifies whether provided resource deleted or not. - - :param kwargs: dict with expected keys 'share_id', 'snapshot_id', - :param kwargs: 'sn_id', 'ss_id', 'vt_id' and 'server_id' - :raises share_exceptions.InvalidResource - """ - if "share_id" in kwargs: - if "rule_id" in kwargs: - rule_id = kwargs.get("rule_id") - share_id = kwargs.get("share_id") - rules = self.list_access_rules(share_id) - for rule in rules: - if rule["id"] == rule_id: - return False - return True - else: - return self._is_resource_deleted( - self.get_share, kwargs.get("share_id")) - elif "snapshot_id" in kwargs: - return self._is_resource_deleted( - self.get_snapshot, kwargs.get("snapshot_id")) - elif "sn_id" in kwargs: - return self._is_resource_deleted( - self.get_share_network, kwargs.get("sn_id")) - elif "ss_id" in kwargs: - return self._is_resource_deleted( - self.get_security_service, kwargs.get("ss_id")) - elif "vt_id" in kwargs: - return self._is_resource_deleted( - self.get_volume_type, kwargs.get("vt_id")) - elif "st_id" in kwargs: - return self._is_resource_deleted( - self.get_share_type, kwargs.get("st_id")) - elif "server_id" in kwargs: - return self._is_resource_deleted( - self.show_share_server, kwargs.get("server_id")) - else: - raise share_exceptions.InvalidResource( - message=six.text_type(kwargs)) - - def _is_resource_deleted(self, func, res_id): - try: - res = func(res_id) - except exceptions.NotFound: - return True - if res.get('status') in ['error_deleting', 'error']: - # Resource has "error_deleting" status and can not be deleted. - resource_type = func.__name__.split('_', 1)[-1] - raise share_exceptions.ResourceReleaseFailed( - res_type=resource_type, res_id=res_id) - return False - - def wait_for_resource_deletion(self, *args, **kwargs): - """Waits for a resource to be deleted.""" - start_time = int(time.time()) - while True: - if self.is_resource_deleted(*args, **kwargs): - return - if int(time.time()) - start_time >= self.build_timeout: - raise exceptions.TimeoutException - time.sleep(self.build_interval) - - def list_extensions(self): - resp, extensions = self.get("extensions") - self.expected_success(200, resp.status) - return self._parse_resp(extensions) - - def update_share(self, share_id, name=None, desc=None, is_public=None): - body = {"share": {}} - if name is not None: - body["share"].update({"display_name": name}) - if desc is not None: - body["share"].update({"display_description": desc}) - if is_public is not None: - body["share"].update({"is_public": is_public}) - body = json.dumps(body) - resp, body = self.put("shares/%s" % share_id, body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def rename_snapshot(self, snapshot_id, name, desc=None): - body = {"snapshot": {"display_name": name}} - if desc is not None: - body["snapshot"].update({"display_description": desc}) - body = json.dumps(body) - resp, body = self.put("snapshots/%s" % snapshot_id, body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def reset_state(self, s_id, status="error", s_type="shares"): - """Resets the state of a share or a snapshot. - - status: available, error, creating, deleting, error_deleting - s_type: shares, snapshots - """ - body = {"os-reset_status": {"status": status}} - body = json.dumps(body) - resp, body = self.post("%s/%s/action" % (s_type, s_id), body) - self.expected_success(202, resp.status) - return body - - def force_delete(self, s_id, s_type="shares"): - """Force delete share or snapshot. - - s_type: shares, snapshots - """ - body = {"os-force_delete": None} - body = json.dumps(body) - resp, body = self.post("%s/%s/action" % (s_type, s_id), body) - self.expected_success(202, resp.status) - return body - -############### - - def list_services(self, params=None): - """List services.""" - uri = 'os-services' - if params: - uri += '?%s' % urlparse.urlencode(params) - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) - -############### - - def _update_metadata(self, share_id, metadata=None, method="post"): - uri = "shares/%s/metadata" % share_id - if metadata is None: - metadata = {} - post_body = {"metadata": metadata} - body = json.dumps(post_body) - if method is "post": - resp, metadata = self.post(uri, body) - if method is "put": - resp, metadata = self.put(uri, body) - self.expected_success(200, resp.status) - return self._parse_resp(metadata) - - def set_metadata(self, share_id, metadata=None): - return self._update_metadata(share_id, metadata) - - def update_all_metadata(self, share_id, metadata=None): - return self._update_metadata(share_id, metadata, method="put") - - def delete_metadata(self, share_id, key): - resp, body = self.delete("shares/%s/metadata/%s" % (share_id, key)) - self.expected_success(200, resp.status) - return body - - def get_metadata(self, share_id): - resp, body = self.get("shares/%s/metadata" % share_id) - self.expected_success(200, resp.status) - return self._parse_resp(body) - -############### - - def create_security_service(self, ss_type="ldap", **kwargs): - # ss_type: ldap, kerberos, active_directory - # kwargs: name, description, dns_ip, server, domain, user, password - post_body = {"type": ss_type} - post_body.update(kwargs) - body = json.dumps({"security_service": post_body}) - resp, body = self.post("security-services", body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def update_security_service(self, ss_id, **kwargs): - # ss_id - id of security-service entity - # kwargs: dns_ip, server, domain, user, password, name, description - # for 'active' status can be changed - # only 'name' and 'description' fields - body = json.dumps({"security_service": kwargs}) - resp, body = self.put("security-services/%s" % ss_id, body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_security_service(self, ss_id): - resp, body = self.get("security-services/%s" % ss_id) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_security_services(self, detailed=False, params=None): - uri = "security-services" - if detailed: - uri += '/detail' - if params: - uri += "?%s" % urlparse.urlencode(params) - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_security_service(self, ss_id): - resp, body = self.delete("security-services/%s" % ss_id) - self.expected_success(202, resp.status) - return body - -############### - - def create_share_network(self, **kwargs): - # kwargs: name, description - # + for neutron: neutron_net_id, neutron_subnet_id - body = json.dumps({"share_network": kwargs}) - resp, body = self.post("share-networks", body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def update_share_network(self, sn_id, **kwargs): - # kwargs: name, description - # + for neutron: neutron_net_id, neutron_subnet_id - body = json.dumps({"share_network": kwargs}) - resp, body = self.put("share-networks/%s" % sn_id, body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_share_network(self, sn_id): - resp, body = self.get("share-networks/%s" % sn_id) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_share_networks(self): - resp, body = self.get("share-networks") - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_share_networks_with_detail(self, params=None): - """List the details of all shares.""" - uri = "share-networks/detail" - if params: - uri += "?%s" % urlparse.urlencode(params) - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_share_network(self, sn_id): - resp, body = self.delete("share-networks/%s" % sn_id) - self.expected_success(202, resp.status) - return body - -############### - - def _map_security_service_and_share_network(self, sn_id, ss_id, - action="add"): - # sn_id: id of share_network_entity - # ss_id: id of security service entity - # action: add, remove - data = { - "%s_security_service" % action: { - "security_service_id": ss_id, - } - } - body = json.dumps(data) - resp, body = self.post("share-networks/%s/action" % sn_id, body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def add_sec_service_to_share_network(self, sn_id, ss_id): - body = self._map_security_service_and_share_network(sn_id, ss_id) - return body - - def remove_sec_service_from_share_network(self, sn_id, ss_id): - body = self._map_security_service_and_share_network( - sn_id, ss_id, "remove") - return body - - def list_sec_services_for_share_network(self, sn_id): - resp, body = self.get("security-services?share_network_id=%s" % sn_id) - self.expected_success(200, resp.status) - return self._parse_resp(body) - -############### - - def list_share_types(self, params=None): - uri = 'types' - if params is not None: - uri += '?%s' % urlparse.urlencode(params) - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def create_share_type(self, name, is_public=True, **kwargs): - post_body = { - 'name': name, - 'extra_specs': kwargs.get('extra_specs'), - 'os-share-type-access:is_public': is_public, - } - post_body = json.dumps({'share_type': post_body}) - resp, body = self.post('types', post_body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_share_type(self, share_type_id): - resp, body = self.delete("types/%s" % share_type_id) - self.expected_success(202, resp.status) - return body - - def get_share_type(self, share_type_id): - resp, body = self.get("types/%s" % share_type_id) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def add_access_to_share_type(self, share_type_id, project_id): - uri = 'types/%s/action' % share_type_id - post_body = {'project': project_id} - post_body = json.dumps({'addProjectAccess': post_body}) - resp, body = self.post(uri, post_body) - self.expected_success(202, resp.status) - return body - - def remove_access_from_share_type(self, share_type_id, project_id): - uri = 'types/%s/action' % share_type_id - post_body = {'project': project_id} - post_body = json.dumps({'removeProjectAccess': post_body}) - resp, body = self.post(uri, post_body) - self.expected_success(202, resp.status) - return body - - def list_access_to_share_type(self, share_type_id): - uri = 'types/%s/os-share-type-access' % share_type_id - resp, body = self.get(uri) - # [{"share_type_id": "%st_id%", "project_id": "%project_id%"}, ] - self.expected_success(200, resp.status) - return self._parse_resp(body) - -############### - - def create_share_type_extra_specs(self, share_type_id, extra_specs): - url = "types/%s/extra_specs" % share_type_id - post_body = json.dumps({'extra_specs': extra_specs}) - resp, body = self.post(url, post_body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_share_type_extra_spec(self, share_type_id, extra_spec_name): - uri = "types/%s/extra_specs/%s" % (share_type_id, extra_spec_name) - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_share_type_extra_specs(self, share_type_id, params=None): - uri = "types/%s/extra_specs" % share_type_id - if params is not None: - uri += '?%s' % urlparse.urlencode(params) - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def update_share_type_extra_spec(self, share_type_id, spec_name, - spec_value): - uri = "types/%s/extra_specs/%s" % (share_type_id, spec_name) - extra_spec = {spec_name: spec_value} - post_body = json.dumps(extra_spec) - resp, body = self.put(uri, post_body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def update_share_type_extra_specs(self, share_type_id, extra_specs): - uri = "types/%s/extra_specs" % share_type_id - extra_specs = {"extra_specs": extra_specs} - post_body = json.dumps(extra_specs) - resp, body = self.post(uri, post_body) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_share_type_extra_spec(self, share_type_id, extra_spec_name): - uri = "types/%s/extra_specs/%s" % (share_type_id, extra_spec_name) - resp, body = self.delete(uri) - self.expected_success(202, resp.status) - return body - -############### - - def list_share_servers(self, search_opts=None): - """Get list of share servers.""" - uri = "share-servers" - if search_opts: - uri += "?%s" % urlparse.urlencode(search_opts) - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_share_server(self, share_server_id): - """Delete share server by its ID.""" - uri = "share-servers/%s" % share_server_id - resp, body = self.delete(uri) - self.expected_success(202, resp.status) - return body - - def show_share_server(self, share_server_id): - """Get share server info.""" - uri = "share-servers/%s" % share_server_id - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def show_share_server_details(self, share_server_id): - """Get share server details only.""" - uri = "share-servers/%s/details" % share_server_id - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) - -############### - - def list_pools(self, detail=False, search_opts=None): - """Get list of scheduler pools.""" - uri = 'scheduler-stats/pools' - if detail: - uri += '/detail' - if search_opts: - uri += "?%s" % urlparse.urlencode(search_opts) - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return json.loads(body) - -############### - - def list_availability_zones(self): - """Get list of availability zones.""" - uri = 'os-availability-zone' - resp, body = self.get(uri) - self.expected_success(200, resp.status) - return self._parse_resp(body) diff --git a/manila_tempest_tests/services/share/v2/__init__.py b/manila_tempest_tests/services/share/v2/__init__.py deleted file mode 100644 index 301a8fda1e..0000000000 --- a/manila_tempest_tests/services/share/v2/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2016 Hewlett-Packard Enterprise Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. - -from manila_tempest_tests.services.share.v2.json.shares_client import \ - SharesV2Client - -__all__ = ['SharesV2Client'] diff --git a/manila_tempest_tests/services/share/v2/json/__init__.py b/manila_tempest_tests/services/share/v2/json/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/manila_tempest_tests/services/share/v2/json/shares_client.py b/manila_tempest_tests/services/share/v2/json/shares_client.py deleted file mode 100644 index fdcb3cb70a..0000000000 --- a/manila_tempest_tests/services/share/v2/json/shares_client.py +++ /dev/null @@ -1,1735 +0,0 @@ -# Copyright 2015 Andrew Kerr -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import json -import six -import time - -from six.moves.urllib import parse as urlparse -from tempest import config -from tempest.lib.common.utils import data_utils -from tempest.lib import exceptions - -from manila_tempest_tests.common import constants -from manila_tempest_tests.services.share.json import shares_client -from manila_tempest_tests import share_exceptions -from manila_tempest_tests import utils - -CONF = config.CONF -LATEST_MICROVERSION = CONF.share.max_api_microversion -EXPERIMENTAL = {'X-OpenStack-Manila-API-Experimental': 'True'} - - -class SharesV2Client(shares_client.SharesClient): - """Tempest REST client for Manila. - - It handles shares and access to it in OpenStack. - """ - api_version = 'v2' - - def __init__(self, auth_provider, **kwargs): - super(SharesV2Client, self).__init__(auth_provider, **kwargs) - self.API_MICROVERSIONS_HEADER = 'x-openstack-manila-api-version' - - def inject_microversion_header(self, headers, version, - extra_headers=False): - """Inject the required manila microversion header.""" - new_headers = self.get_headers() - new_headers[self.API_MICROVERSIONS_HEADER] = version - if extra_headers and headers: - new_headers.update(headers) - elif headers: - new_headers = headers - return new_headers - - # Overwrite all http verb calls to inject the micro version header - def post(self, url, body, headers=None, extra_headers=False, - version=LATEST_MICROVERSION): - headers = self.inject_microversion_header(headers, version, - extra_headers=extra_headers) - return super(SharesV2Client, self).post(url, body, headers=headers) - - def get(self, url, headers=None, extra_headers=False, - version=LATEST_MICROVERSION): - headers = self.inject_microversion_header(headers, version, - extra_headers=extra_headers) - return super(SharesV2Client, self).get(url, headers=headers) - - def delete(self, url, headers=None, body=None, extra_headers=False, - version=LATEST_MICROVERSION): - headers = self.inject_microversion_header(headers, version, - extra_headers=extra_headers) - return super(SharesV2Client, self).delete(url, headers=headers, - body=body) - - def patch(self, url, body, headers=None, extra_headers=False, - version=LATEST_MICROVERSION): - headers = self.inject_microversion_header(headers, version, - extra_headers=extra_headers) - return super(SharesV2Client, self).patch(url, body, headers=headers) - - def put(self, url, body, headers=None, extra_headers=False, - version=LATEST_MICROVERSION): - headers = self.inject_microversion_header(headers, version, - extra_headers=extra_headers) - return super(SharesV2Client, self).put(url, body, headers=headers) - - def head(self, url, headers=None, extra_headers=False, - version=LATEST_MICROVERSION): - headers = self.inject_microversion_header(headers, version, - extra_headers=extra_headers) - return super(SharesV2Client, self).head(url, headers=headers) - - def copy(self, url, headers=None, extra_headers=False, - version=LATEST_MICROVERSION): - headers = self.inject_microversion_header(headers, version, - extra_headers=extra_headers) - return super(SharesV2Client, self).copy(url, headers=headers) - - def reset_state(self, s_id, status="error", s_type="shares", - headers=None, version=LATEST_MICROVERSION, - action_name=None): - """Resets the state of a share, snapshot, cg, or a cgsnapshot. - - status: available, error, creating, deleting, error_deleting - s_type: shares, share_instances, snapshots, consistency-groups, - cgsnapshots. - """ - if action_name is None: - if utils.is_microversion_gt(version, "2.6"): - action_name = 'reset_status' - else: - action_name = 'os-reset_status' - body = {action_name: {"status": status}} - body = json.dumps(body) - resp, body = self.post("%s/%s/action" % (s_type, s_id), body, - headers=headers, extra_headers=True, - version=version) - self.expected_success(202, resp.status) - return body - - def force_delete(self, s_id, s_type="shares", headers=None, - version=LATEST_MICROVERSION, action_name=None): - """Force delete share or snapshot. - - s_type: shares, snapshots - """ - if action_name is None: - if utils.is_microversion_gt(version, "2.6"): - action_name = 'force_delete' - else: - action_name = 'os-force_delete' - body = {action_name: None} - body = json.dumps(body) - resp, body = self.post("%s/%s/action" % (s_type, s_id), body, - headers=headers, extra_headers=True, - version=version) - self.expected_success(202, resp.status) - return body - - def send_microversion_request(self, version=None, script_name=None): - """Prepare and send the HTTP GET Request to the base URL. - - Extracts the base URL from the shares_client endpoint and makes a GET - request with the microversions request header. - :param version: The string to send for the value of the microversion - header, or None to omit the header. - :param script_name: The first part of the URL (v1 or v2), or None to - omit it. - """ - - headers = self.get_headers() - url, headers, body = self.auth_provider.auth_request( - 'GET', 'shares', headers, None, self.filters) - url = '/'.join(url.split('/')[:3]) + '/' - if script_name: - url += script_name + '/' - if version: - headers[self.API_MICROVERSIONS_HEADER] = version - resp, resp_body = self.raw_request(url, 'GET', headers=headers) - self.response_checker('GET', resp, resp_body) - resp_body = json.loads(resp_body) - return resp, resp_body - - def is_resource_deleted(self, *args, **kwargs): - """Verifies whether provided resource deleted or not. - - :param kwargs: dict with expected keys 'share_id', 'snapshot_id', - :param kwargs: 'sn_id', 'ss_id', 'vt_id' and 'server_id' - :raises share_exceptions.InvalidResource - """ - if "share_instance_id" in kwargs: - return self._is_resource_deleted( - self.get_share_instance, kwargs.get("share_instance_id")) - elif "share_group_id" in kwargs: - return self._is_resource_deleted( - self.get_share_group, kwargs.get("share_group_id")) - elif "share_group_snapshot_id" in kwargs: - return self._is_resource_deleted( - self.get_share_group_snapshot, - kwargs.get("share_group_snapshot_id")) - elif "share_group_type_id" in kwargs: - return self._is_resource_deleted( - self.get_share_group_type, kwargs.get("share_group_type_id")) - elif "replica_id" in kwargs: - return self._is_resource_deleted( - self.get_share_replica, kwargs.get("replica_id")) - elif "message_id" in kwargs: - return self._is_resource_deleted( - self.get_message, kwargs.get("message_id")) - else: - return super(SharesV2Client, self).is_resource_deleted( - *args, **kwargs) - -############### - - def create_share(self, share_protocol=None, size=None, - name=None, snapshot_id=None, description=None, - metadata=None, share_network_id=None, - share_type_id=None, is_public=False, - share_group_id=None, availability_zone=None, - version=LATEST_MICROVERSION, experimental=False): - headers = EXPERIMENTAL if experimental else None - metadata = metadata or {} - if name is None: - name = data_utils.rand_name("tempest-created-share") - if description is None: - description = data_utils.rand_name("tempest-created-share-desc") - if size is None: - size = self.share_size - if share_protocol is None: - share_protocol = self.share_protocol - if share_protocol is None: - raise share_exceptions.ShareProtocolNotSpecified() - post_body = { - "share": { - "share_proto": share_protocol, - "description": description, - "snapshot_id": snapshot_id, - "name": name, - "size": size, - "metadata": metadata, - "is_public": is_public, - } - } - if availability_zone: - post_body["share"]["availability_zone"] = availability_zone - if share_network_id: - post_body["share"]["share_network_id"] = share_network_id - if share_type_id: - post_body["share"]["share_type"] = share_type_id - if share_group_id: - post_body["share"]["share_group_id"] = share_group_id - body = json.dumps(post_body) - resp, body = self.post("shares", body, headers=headers, - extra_headers=experimental, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_shares(self, detailed=False, params=None, - version=LATEST_MICROVERSION, experimental=False): - """Get list of shares w/o filters.""" - headers = EXPERIMENTAL if experimental else None - uri = 'shares/detail' if detailed else 'shares' - uri += '?%s' % urlparse.urlencode(params) if params else '' - resp, body = self.get(uri, headers=headers, extra_headers=experimental, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_shares_with_detail(self, params=None, - version=LATEST_MICROVERSION, - experimental=False): - """Get detailed list of shares w/o filters.""" - return self.list_shares(detailed=True, params=params, - version=version, experimental=experimental) - - def get_share(self, share_id, version=LATEST_MICROVERSION, - experimental=False): - headers = EXPERIMENTAL if experimental else None - resp, body = self.get("shares/%s" % share_id, headers=headers, - extra_headers=experimental, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_share_export_location( - self, share_id, export_location_uuid, version=LATEST_MICROVERSION): - resp, body = self.get( - "shares/%(share_id)s/export_locations/%(el_uuid)s" % { - "share_id": share_id, "el_uuid": export_location_uuid}, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_share_export_locations( - self, share_id, version=LATEST_MICROVERSION): - resp, body = self.get( - "shares/%(share_id)s/export_locations" % {"share_id": share_id}, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_share(self, share_id, params=None, - version=LATEST_MICROVERSION): - uri = "shares/%s" % share_id - uri += '?%s' % (urlparse.urlencode(params) if params else '') - resp, body = self.delete(uri, version=version) - self.expected_success(202, resp.status) - return body - -############### - - def get_instances_of_share(self, share_id, version=LATEST_MICROVERSION): - resp, body = self.get("shares/%s/instances" % share_id, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_share_instances(self, version=LATEST_MICROVERSION, - params=None): - uri = 'share_instances' - uri += '?%s' % urlparse.urlencode(params) if params else '' - resp, body = self.get(uri, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_share_instance(self, instance_id, version=LATEST_MICROVERSION): - resp, body = self.get("share_instances/%s" % instance_id, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_share_instance_export_location( - self, instance_id, export_location_uuid, - version=LATEST_MICROVERSION): - resp, body = self.get( - "share_instances/%(instance_id)s/export_locations/%(el_uuid)s" % { - "instance_id": instance_id, "el_uuid": export_location_uuid}, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_share_instance_export_locations( - self, instance_id, version=LATEST_MICROVERSION): - resp, body = self.get( - "share_instances/%s/export_locations" % instance_id, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def wait_for_share_instance_status(self, instance_id, status, - version=LATEST_MICROVERSION): - """Waits for a share to reach a given status.""" - body = self.get_share_instance(instance_id, version=version) - instance_status = body['status'] - start = int(time.time()) - - while instance_status != status: - time.sleep(self.build_interval) - body = self.get_share(instance_id) - instance_status = body['status'] - if instance_status == status: - return - elif 'error' in instance_status.lower(): - raise share_exceptions.ShareInstanceBuildErrorException( - id=instance_id) - - if int(time.time()) - start >= self.build_timeout: - message = ('Share instance %s failed to reach %s status within' - ' the required time (%s s).' % - (instance_id, status, self.build_timeout)) - raise exceptions.TimeoutException(message) - - def wait_for_share_status(self, share_id, status, status_attr='status', - version=LATEST_MICROVERSION): - """Waits for a share to reach a given status.""" - body = self.get_share(share_id, version=version) - share_status = body[status_attr] - start = int(time.time()) - - while share_status != status: - time.sleep(self.build_interval) - body = self.get_share(share_id, version=version) - share_status = body[status_attr] - if share_status == status: - return - elif 'error' in share_status.lower(): - raise share_exceptions.ShareBuildErrorException( - share_id=share_id) - - if int(time.time()) - start >= self.build_timeout: - message = ("Share's %(status_attr)s failed to transition to " - "%(status)s within the required time %(seconds)s." % - {"status_attr": status_attr, "status": status, - "seconds": self.build_timeout}) - raise exceptions.TimeoutException(message) - -############### - - def extend_share(self, share_id, new_size, version=LATEST_MICROVERSION, - action_name=None): - if action_name is None: - if utils.is_microversion_gt(version, "2.6"): - action_name = 'extend' - else: - action_name = 'os-extend' - post_body = { - action_name: { - "new_size": new_size, - } - } - body = json.dumps(post_body) - resp, body = self.post( - "shares/%s/action" % share_id, body, version=version) - self.expected_success(202, resp.status) - return body - - def shrink_share(self, share_id, new_size, version=LATEST_MICROVERSION, - action_name=None): - if action_name is None: - if utils.is_microversion_gt(version, "2.6"): - action_name = 'shrink' - else: - action_name = 'os-shrink' - post_body = { - action_name: { - "new_size": new_size, - } - } - body = json.dumps(post_body) - resp, body = self.post( - "shares/%s/action" % share_id, body, version=version) - self.expected_success(202, resp.status) - return body - -############### - - def manage_share(self, service_host, protocol, export_path, - share_type_id, name=None, description=None, - is_public=False, version=LATEST_MICROVERSION, - url=None): - post_body = { - "share": { - "export_path": export_path, - "service_host": service_host, - "protocol": protocol, - "share_type": share_type_id, - "name": name, - "description": description, - "is_public": is_public, - } - } - if url is None: - if utils.is_microversion_gt(version, "2.6"): - url = 'shares/manage' - else: - url = 'os-share-manage' - body = json.dumps(post_body) - resp, body = self.post(url, body, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def unmanage_share(self, share_id, version=LATEST_MICROVERSION, url=None, - action_name=None, body=None): - if url is None: - if utils.is_microversion_gt(version, "2.6"): - url = 'shares' - else: - url = 'os-share-unmanage' - if action_name is None: - if utils.is_microversion_gt(version, "2.6"): - action_name = 'action' - else: - action_name = 'unmanage' - if body is None and utils.is_microversion_gt(version, "2.6"): - body = json.dumps({'unmanage': {}}) - resp, body = self.post( - "%(url)s/%(share_id)s/%(action_name)s" % { - 'url': url, 'share_id': share_id, 'action_name': action_name}, - body, - version=version) - self.expected_success(202, resp.status) - return body - -############### - - def create_snapshot(self, share_id, name=None, description=None, - force=False, version=LATEST_MICROVERSION): - if name is None: - name = data_utils.rand_name("tempest-created-share-snap") - if description is None: - description = data_utils.rand_name( - "tempest-created-share-snap-desc") - post_body = { - "snapshot": { - "name": name, - "force": force, - "description": description, - "share_id": share_id, - } - } - body = json.dumps(post_body) - resp, body = self.post("snapshots", body, version=version) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def get_snapshot(self, snapshot_id, version=LATEST_MICROVERSION): - resp, body = self.get("snapshots/%s" % snapshot_id, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_snapshots(self, detailed=False, params=None, - version=LATEST_MICROVERSION): - """Get list of share snapshots w/o filters.""" - uri = 'snapshots/detail' if detailed else 'snapshots' - uri += '?%s' % urlparse.urlencode(params) if params else '' - resp, body = self.get(uri, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_snapshots_for_share(self, share_id, detailed=False, - version=LATEST_MICROVERSION): - """Get list of snapshots for given share.""" - uri = ('snapshots/detail?share_id=%s' % share_id - if detailed else 'snapshots?share_id=%s' % share_id) - resp, body = self.get(uri, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_snapshots_with_detail(self, params=None, - version=LATEST_MICROVERSION): - """Get detailed list of share snapshots w/o filters.""" - return self.list_snapshots(detailed=True, params=params, - version=version) - - def delete_snapshot(self, snap_id, version=LATEST_MICROVERSION): - resp, body = self.delete("snapshots/%s" % snap_id, version=version) - self.expected_success(202, resp.status) - return body - - def wait_for_snapshot_status(self, snapshot_id, status, - version=LATEST_MICROVERSION): - """Waits for a snapshot to reach a given status.""" - body = self.get_snapshot(snapshot_id, version=version) - snapshot_name = body['name'] - snapshot_status = body['status'] - start = int(time.time()) - - while snapshot_status != status: - time.sleep(self.build_interval) - body = self.get_snapshot(snapshot_id, version=version) - snapshot_status = body['status'] - if 'error' in snapshot_status: - raise (share_exceptions. - SnapshotBuildErrorException(snapshot_id=snapshot_id)) - - if int(time.time()) - start >= self.build_timeout: - message = ('Share Snapshot %s failed to reach %s status ' - 'within the required time (%s s).' % - (snapshot_name, status, self.build_timeout)) - raise exceptions.TimeoutException(message) - - def manage_snapshot(self, share_id, provider_location, - name=None, description=None, - version=LATEST_MICROVERSION, - driver_options=None): - if name is None: - name = data_utils.rand_name("tempest-manage-snapshot") - if description is None: - description = data_utils.rand_name("tempest-manage-snapshot-desc") - post_body = { - "snapshot": { - "share_id": share_id, - "provider_location": provider_location, - "name": name, - "description": description, - "driver_options": driver_options if driver_options else {}, - } - } - url = 'snapshots/manage' - body = json.dumps(post_body) - resp, body = self.post(url, body, version=version) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def unmanage_snapshot(self, snapshot_id, version=LATEST_MICROVERSION, - body=None): - url = 'snapshots' - action_name = 'action' - if body is None: - body = json.dumps({'unmanage': {}}) - resp, body = self.post( - "%(url)s/%(snapshot_id)s/%(action_name)s" % { - 'url': url, 'snapshot_id': snapshot_id, - 'action_name': action_name}, - body, - version=version) - self.expected_success(202, resp.status) - return body - -############### - - def revert_to_snapshot(self, share_id, snapshot_id, - version=LATEST_MICROVERSION): - url = 'shares/%s/action' % share_id - body = json.dumps({'revert': {'snapshot_id': snapshot_id}}) - resp, body = self.post(url, body, version=version) - self.expected_success(202, resp.status) - return self._parse_resp(body) - -############### - - def create_share_type_extra_specs(self, share_type_id, extra_specs, - version=LATEST_MICROVERSION): - url = "types/%s/extra_specs" % share_type_id - post_body = json.dumps({'extra_specs': extra_specs}) - resp, body = self.post(url, post_body, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_share_type_extra_spec(self, share_type_id, extra_spec_name, - version=LATEST_MICROVERSION): - uri = "types/%s/extra_specs/%s" % (share_type_id, extra_spec_name) - resp, body = self.get(uri, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_share_type_extra_specs(self, share_type_id, params=None, - version=LATEST_MICROVERSION): - uri = "types/%s/extra_specs" % share_type_id - if params is not None: - uri += '?%s' % urlparse.urlencode(params) - resp, body = self.get(uri, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def update_share_type_extra_spec(self, share_type_id, spec_name, - spec_value, version=LATEST_MICROVERSION): - uri = "types/%s/extra_specs/%s" % (share_type_id, spec_name) - extra_spec = {spec_name: spec_value} - post_body = json.dumps(extra_spec) - resp, body = self.put(uri, post_body, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def update_share_type_extra_specs(self, share_type_id, extra_specs, - version=LATEST_MICROVERSION): - uri = "types/%s/extra_specs" % share_type_id - extra_specs = {"extra_specs": extra_specs} - post_body = json.dumps(extra_specs) - resp, body = self.post(uri, post_body, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_share_type_extra_spec(self, share_type_id, extra_spec_name, - version=LATEST_MICROVERSION): - uri = "types/%s/extra_specs/%s" % (share_type_id, extra_spec_name) - resp, body = self.delete(uri, version=version) - self.expected_success(202, resp.status) - return body - -############### - - def get_snapshot_instance(self, instance_id, version=LATEST_MICROVERSION): - resp, body = self.get("snapshot-instances/%s" % instance_id, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_snapshot_instances(self, detail=False, snapshot_id=None, - version=LATEST_MICROVERSION): - """Get list of share snapshot instances.""" - uri = "snapshot-instances%s" % ('/detail' if detail else '') - if snapshot_id is not None: - uri += '?snapshot_id=%s' % snapshot_id - resp, body = self.get(uri, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def reset_snapshot_instance_status(self, instance_id, - status=constants.STATUS_AVAILABLE, - version=LATEST_MICROVERSION): - """Reset the status.""" - uri = 'snapshot-instances/%s/action' % instance_id - post_body = { - 'reset_status': { - 'status': status - } - } - body = json.dumps(post_body) - resp, body = self.post(uri, body, extra_headers=True, version=version) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def wait_for_snapshot_instance_status(self, instance_id, expected_status): - """Waits for a snapshot instance status to reach a given status.""" - body = self.get_snapshot_instance(instance_id) - instance_status = body['status'] - start = int(time.time()) - - while instance_status != expected_status: - time.sleep(self.build_interval) - body = self.get_snapshot_instance(instance_id) - instance_status = body['status'] - if instance_status == expected_status: - return - if 'error' in instance_status: - raise share_exceptions.SnapshotInstanceBuildErrorException( - id=instance_id) - - if int(time.time()) - start >= self.build_timeout: - message = ('The status of snapshot instance %(id)s failed to ' - 'reach %(expected_status)s status within the ' - 'required time (%(time)ss). Current ' - 'status: %(current_status)s.' % - { - 'expected_status': expected_status, - 'time': self.build_timeout, - 'id': instance_id, - 'current_status': instance_status, - }) - raise exceptions.TimeoutException(message) - - def get_snapshot_instance_export_location( - self, instance_id, export_location_uuid, - version=LATEST_MICROVERSION): - resp, body = self.get( - "snapshot-instances/%(instance_id)s/export-locations/%(" - "el_uuid)s" % { - "instance_id": instance_id, - "el_uuid": export_location_uuid}, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_snapshot_instance_export_locations( - self, instance_id, version=LATEST_MICROVERSION): - resp, body = self.get( - "snapshot-instances/%s/export-locations" % instance_id, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - -############### - - def _get_access_action_name(self, version, action): - if utils.is_microversion_gt(version, "2.6"): - return action.split('os-')[-1] - return action - - def create_access_rule(self, share_id, access_type="ip", - access_to="0.0.0.0", access_level=None, - version=LATEST_MICROVERSION, action_name=None): - post_body = { - self._get_access_action_name(version, 'os-allow_access'): { - "access_type": access_type, - "access_to": access_to, - "access_level": access_level, - } - } - body = json.dumps(post_body) - resp, body = self.post( - "shares/%s/action" % share_id, body, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_access_rules(self, share_id, version=LATEST_MICROVERSION, - action_name=None): - body = {self._get_access_action_name(version, 'os-access_list'): None} - resp, body = self.post( - "shares/%s/action" % share_id, json.dumps(body), version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_access_rule(self, share_id, rule_id, - version=LATEST_MICROVERSION, action_name=None): - post_body = { - self._get_access_action_name(version, 'os-deny_access'): { - "access_id": rule_id, - } - } - body = json.dumps(post_body) - resp, body = self.post( - "shares/%s/action" % share_id, body, version=version) - self.expected_success(202, resp.status) - return body - -############### - - def list_availability_zones(self, url='availability-zones', - version=LATEST_MICROVERSION): - """Get list of availability zones.""" - if url is None: - if utils.is_microversion_gt(version, "2.6"): - url = 'availability-zones' - else: - url = 'os-availability-zone' - resp, body = self.get(url, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - -############### - - def list_services(self, params=None, url=None, - version=LATEST_MICROVERSION): - """List services.""" - if url is None: - if utils.is_microversion_gt(version, "2.6"): - url = 'services' - else: - url = 'os-services' - if params: - url += '?%s' % urlparse.urlencode(params) - resp, body = self.get(url, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - -############### - - def list_share_types(self, params=None, default=False, - version=LATEST_MICROVERSION): - uri = 'types' - if default: - uri += '/default' - if params is not None: - uri += '?%s' % urlparse.urlencode(params) - resp, body = self.get(uri, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def create_share_type(self, name, is_public=True, - version=LATEST_MICROVERSION, **kwargs): - if utils.is_microversion_gt(version, "2.6"): - is_public_keyname = 'share_type_access:is_public' - else: - is_public_keyname = 'os-share-type-access:is_public' - post_body = { - 'name': name, - 'extra_specs': kwargs.get('extra_specs'), - is_public_keyname: is_public, - } - if kwargs.get('description'): - post_body['description'] = kwargs.get('description') - post_body = json.dumps({'share_type': post_body}) - resp, body = self.post('types', post_body, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_share_type(self, share_type_id, version=LATEST_MICROVERSION): - resp, body = self.delete("types/%s" % share_type_id, version=version) - self.expected_success(202, resp.status) - return body - - def get_share_type(self, share_type_id, version=LATEST_MICROVERSION): - resp, body = self.get("types/%s" % share_type_id, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_access_to_share_type(self, share_type_id, - version=LATEST_MICROVERSION, - action_name=None): - if action_name is None: - if utils.is_microversion_gt(version, "2.6"): - action_name = 'share_type_access' - else: - action_name = 'os-share-type-access' - url = 'types/%(st_id)s/%(action_name)s' % { - 'st_id': share_type_id, 'action_name': action_name} - resp, body = self.get(url, version=version) - # [{"share_type_id": "%st_id%", "project_id": "%project_id%"}, ] - self.expected_success(200, resp.status) - return self._parse_resp(body) - -############### - - @staticmethod - def _get_quotas_url(version): - if utils.is_microversion_gt(version, "2.6"): - return 'quota-sets' - return 'os-quota-sets' - - @staticmethod - def _get_quotas_url_arguments_as_str(user_id=None, share_type=None): - args_str = '' - if not (user_id is None or share_type is None): - args_str = "?user_id=%s&share_type=%s" % (user_id, share_type) - elif user_id is not None: - args_str = "?user_id=%s" % user_id - elif share_type is not None: - args_str = "?share_type=%s" % share_type - return args_str - - def default_quotas(self, tenant_id, url=None, version=LATEST_MICROVERSION): - if url is None: - url = self._get_quotas_url(version) - url += '/%s' % tenant_id - resp, body = self.get("%s/defaults" % url, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def show_quotas(self, tenant_id, user_id=None, share_type=None, url=None, - version=LATEST_MICROVERSION): - if url is None: - url = self._get_quotas_url(version) - url += '/%s' % tenant_id - url += self._get_quotas_url_arguments_as_str(user_id, share_type) - resp, body = self.get(url, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def reset_quotas(self, tenant_id, user_id=None, share_type=None, url=None, - version=LATEST_MICROVERSION): - if url is None: - url = self._get_quotas_url(version) - url += '/%s' % tenant_id - url += self._get_quotas_url_arguments_as_str(user_id, share_type) - resp, body = self.delete(url, version=version) - self.expected_success(202, resp.status) - return body - - def detail_quotas(self, tenant_id, user_id=None, share_type=None, url=None, - version=LATEST_MICROVERSION): - if url is None: - url = self._get_quotas_url(version) - url += '/%s/detail' % tenant_id - url += self._get_quotas_url_arguments_as_str(user_id, share_type) - resp, body = self.get(url, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def update_quotas(self, tenant_id, user_id=None, shares=None, - snapshots=None, gigabytes=None, snapshot_gigabytes=None, - share_networks=None, - share_groups=None, share_group_snapshots=None, - force=True, share_type=None, - url=None, version=LATEST_MICROVERSION): - if url is None: - url = self._get_quotas_url(version) - url += '/%s' % tenant_id - url += self._get_quotas_url_arguments_as_str(user_id, share_type) - - put_body = {"tenant_id": tenant_id} - if force: - put_body["force"] = "true" - if shares is not None: - put_body["shares"] = shares - if snapshots is not None: - put_body["snapshots"] = snapshots - if gigabytes is not None: - put_body["gigabytes"] = gigabytes - if snapshot_gigabytes is not None: - put_body["snapshot_gigabytes"] = snapshot_gigabytes - if share_networks is not None: - put_body["share_networks"] = share_networks - if share_groups is not None: - put_body["share_groups"] = share_groups - if share_group_snapshots is not None: - put_body["share_group_snapshots"] = share_group_snapshots - put_body = json.dumps({"quota_set": put_body}) - - resp, body = self.put(url, put_body, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - -############### - - def create_share_group(self, name=None, description=None, - share_group_type_id=None, share_type_ids=(), - share_network_id=None, - source_share_group_snapshot_id=None, - availability_zone=None, - version=LATEST_MICROVERSION): - """Create a new share group.""" - uri = 'share-groups' - post_body = {} - if name: - post_body['name'] = name - if description: - post_body['description'] = description - if share_group_type_id: - post_body['share_group_type_id'] = share_group_type_id - if share_type_ids: - post_body['share_types'] = share_type_ids - if source_share_group_snapshot_id: - post_body['source_share_group_snapshot_id'] = ( - source_share_group_snapshot_id) - if share_network_id: - post_body['share_network_id'] = share_network_id - if availability_zone: - post_body['availability_zone'] = availability_zone - body = json.dumps({'share_group': post_body}) - - resp, body = self.post(uri, body, headers=EXPERIMENTAL, - extra_headers=True, version=version) - - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def delete_share_group(self, share_group_id, version=LATEST_MICROVERSION): - """Delete a share group.""" - uri = 'share-groups/%s' % share_group_id - resp, body = self.delete(uri, headers=EXPERIMENTAL, - extra_headers=True, version=version) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def list_share_groups(self, detailed=False, params=None, - version=LATEST_MICROVERSION): - """Get list of share groups w/o filters.""" - uri = 'share-groups%s' % ('/detail' if detailed else '') - uri += '?%s' % (urlparse.urlencode(params) if params else '') - resp, body = self.get(uri, headers=EXPERIMENTAL, extra_headers=True, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_share_group(self, share_group_id, version=LATEST_MICROVERSION): - """Get share group info.""" - uri = 'share-groups/%s' % share_group_id - resp, body = self.get(uri, headers=EXPERIMENTAL, extra_headers=True, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def update_share_group(self, share_group_id, name=None, description=None, - version=LATEST_MICROVERSION, **kwargs): - """Update an existing share group.""" - uri = 'share-groups/%s' % share_group_id - post_body = {} - if name: - post_body['name'] = name - if description: - post_body['description'] = description - if kwargs: - post_body.update(kwargs) - body = json.dumps({'share_group': post_body}) - - resp, body = self.put(uri, body, headers=EXPERIMENTAL, - extra_headers=True, version=version) - - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def share_group_reset_state(self, share_group_id, status='error', - version=LATEST_MICROVERSION): - self.reset_state(share_group_id, status=status, s_type='groups', - headers=EXPERIMENTAL, version=version) - - def share_group_force_delete(self, share_group_id, - version=LATEST_MICROVERSION): - self.force_delete(share_group_id, s_type='share-groups', - headers=EXPERIMENTAL, version=version) - - def wait_for_share_group_status(self, share_group_id, status): - """Waits for a share group to reach a given status.""" - body = self.get_share_group(share_group_id) - sg_name = body['name'] - sg_status = body['status'] - start = int(time.time()) - - while sg_status != status: - time.sleep(self.build_interval) - body = self.get_share_group(share_group_id) - sg_status = body['status'] - if 'error' in sg_status and status != 'error': - raise share_exceptions.ShareGroupBuildErrorException( - share_group_id=share_group_id) - - if int(time.time()) - start >= self.build_timeout: - sg_name = sg_name or share_group_id - message = ('Share Group %s failed to reach %s status ' - 'within the required time (%s s). ' - 'Current status: %s' % - (sg_name, status, self.build_timeout, sg_status)) - raise exceptions.TimeoutException(message) - -############### - - def create_share_group_type(self, name=None, share_types=(), - is_public=None, group_specs=None, - version=LATEST_MICROVERSION): - """Create a new share group type.""" - uri = 'share-group-types' - post_body = {} - if isinstance(share_types, (tuple, list)): - share_types = list(share_types) - else: - share_types = [share_types] - if name is not None: - post_body['name'] = name - if share_types: - post_body['share_types'] = share_types - if is_public is not None: - post_body['is_public'] = is_public - if group_specs: - post_body['group_specs'] = group_specs - body = json.dumps({'share_group_type': post_body}) - resp, body = self.post(uri, body, headers=EXPERIMENTAL, - extra_headers=True, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_share_group_types(self, detailed=False, params=None, - version=LATEST_MICROVERSION): - """Get list of share group types.""" - uri = 'share-group-types%s' % ('/detail' if detailed else '') - uri += '?%s' % (urlparse.urlencode(params) if params else '') - resp, body = self.get(uri, headers=EXPERIMENTAL, extra_headers=True, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_share_group_type(self, share_group_type_id, - version=LATEST_MICROVERSION): - """Get share group type info.""" - uri = 'share-group-types/%s' % share_group_type_id - resp, body = self.get(uri, headers=EXPERIMENTAL, extra_headers=True, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_default_share_group_type(self, version=LATEST_MICROVERSION): - """Get default share group type info.""" - uri = 'share-group-types/default' - resp, body = self.get(uri, headers=EXPERIMENTAL, extra_headers=True, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_share_group_type(self, share_group_type_id, - version=LATEST_MICROVERSION): - """Delete an existing share group type.""" - uri = 'share-group-types/%s' % share_group_type_id - resp, body = self.delete(uri, headers=EXPERIMENTAL, - extra_headers=True, version=version) - self.expected_success(204, resp.status) - return self._parse_resp(body) - - def add_access_to_share_group_type(self, share_group_type_id, project_id, - version=LATEST_MICROVERSION): - uri = 'share-group-types/%s/action' % share_group_type_id - post_body = {'project': project_id} - post_body = json.dumps({'addProjectAccess': post_body}) - resp, body = self.post(uri, post_body, headers=EXPERIMENTAL, - extra_headers=True, version=version) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def remove_access_from_share_group_type(self, share_group_type_id, - project_id, - version=LATEST_MICROVERSION): - uri = 'share-group-types/%s/action' % share_group_type_id - post_body = {'project': project_id} - post_body = json.dumps({'removeProjectAccess': post_body}) - resp, body = self.post(uri, post_body, headers=EXPERIMENTAL, - extra_headers=True, version=version) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def list_access_to_share_group_type(self, share_group_type_id, - version=LATEST_MICROVERSION): - uri = 'share-group-types/%s/access' % share_group_type_id - resp, body = self.get(uri, headers=EXPERIMENTAL, extra_headers=True, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - -############### - - def create_share_group_type_specs(self, share_group_type_id, - group_specs_dict, - version=LATEST_MICROVERSION): - url = "share-group-types/%s/group-specs" % share_group_type_id - post_body = json.dumps({'group_specs': group_specs_dict}) - resp, body = self.post(url, post_body, headers=EXPERIMENTAL, - extra_headers=True, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_share_group_type_spec(self, share_group_type_id, group_spec_key, - version=LATEST_MICROVERSION): - uri = "group-types/%s/group_specs/%s" % ( - share_group_type_id, group_spec_key) - resp, body = self.get(uri, headers=EXPERIMENTAL, extra_headers=True, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_share_group_type_specs(self, share_group_type_id, params=None, - version=LATEST_MICROVERSION): - uri = "share-group-types/%s/group_specs" % share_group_type_id - if params is not None: - uri += '?%s' % urlparse.urlencode(params) - resp, body = self.get(uri, headers=EXPERIMENTAL, extra_headers=True, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def update_share_group_type_spec(self, share_group_type_id, group_spec_key, - group_spec_value, - version=LATEST_MICROVERSION): - uri = "share-group-types/%s/group-specs/%s" % ( - share_group_type_id, group_spec_key) - group_spec = {group_spec_key: group_spec_value} - post_body = json.dumps(group_spec) - resp, body = self.put(uri, post_body, headers=EXPERIMENTAL, - extra_headers=True, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def update_share_group_type_specs(self, share_group_type_id, - group_specs_dict, - version=LATEST_MICROVERSION): - return self.create_share_group_type_specs( - share_group_type_id, group_specs_dict, version=version) - - def delete_share_group_type_spec(self, share_type_id, group_spec_key, - version=LATEST_MICROVERSION): - uri = "share-group-types/%s/group-specs/%s" % ( - share_type_id, group_spec_key) - resp, body = self.delete(uri, headers=EXPERIMENTAL, extra_headers=True, - version=version) - self.expected_success(204, resp.status) - return body - -############### - - def create_share_group_snapshot(self, share_group_id, name=None, - description=None, - version=LATEST_MICROVERSION): - """Create a new share group snapshot of an existing share group.""" - uri = 'share-group-snapshots' - post_body = {'share_group_id': share_group_id} - if name: - post_body['name'] = name - if description: - post_body['description'] = description - body = json.dumps({'share_group_snapshot': post_body}) - resp, body = self.post(uri, body, headers=EXPERIMENTAL, - extra_headers=True, version=version) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def delete_share_group_snapshot(self, share_group_snapshot_id, - version=LATEST_MICROVERSION): - """Delete an existing share group snapshot.""" - uri = 'share-group-snapshots/%s' % share_group_snapshot_id - resp, body = self.delete(uri, headers=EXPERIMENTAL, - extra_headers=True, version=version) - self.expected_success(202, resp.status) - return body - - def list_share_group_snapshots(self, detailed=False, params=None, - version=LATEST_MICROVERSION): - """Get list of share group snapshots w/o filters.""" - uri = 'share-group-snapshots%s' % ('/detail' if detailed else '') - uri += '?%s' % (urlparse.urlencode(params) if params else '') - resp, body = self.get(uri, headers=EXPERIMENTAL, extra_headers=True, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def get_share_group_snapshot(self, share_group_snapshot_id, - version=LATEST_MICROVERSION): - """Get share group snapshot info.""" - uri = 'share-group-snapshots/%s' % share_group_snapshot_id - resp, body = self.get(uri, headers=EXPERIMENTAL, extra_headers=True, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def update_share_group_snapshot(self, share_group_snapshot_id, name=None, - description=None, - version=LATEST_MICROVERSION): - """Update an existing share group snapshot.""" - uri = 'share-group-snapshots/%s' % share_group_snapshot_id - post_body = {} - if name: - post_body['name'] = name - if description: - post_body['description'] = description - body = json.dumps({'share_group_snapshot': post_body}) - resp, body = self.put(uri, body, headers=EXPERIMENTAL, - extra_headers=True, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def share_group_snapshot_reset_state(self, share_group_snapshot_id, - status='error', - version=LATEST_MICROVERSION): - self.reset_state( - share_group_snapshot_id, status=status, - s_type='group-snapshots', headers=EXPERIMENTAL, version=version) - - def share_group_snapshot_force_delete(self, share_group_snapshot_id, - version=LATEST_MICROVERSION): - self.force_delete( - share_group_snapshot_id, s_type='share-group-snapshots', - headers=EXPERIMENTAL, version=version) - - def wait_for_share_group_snapshot_status(self, share_group_snapshot_id, - status): - """Waits for a share group snapshot to reach a given status.""" - body = self.get_share_group_snapshot(share_group_snapshot_id) - sg_snapshot_name = body['name'] - sg_snapshot_status = body['status'] - start = int(time.time()) - - while sg_snapshot_status != status: - time.sleep(self.build_interval) - body = self.get_share_group_snapshot(share_group_snapshot_id) - sg_snapshot_status = body['status'] - if 'error' in sg_snapshot_status and status != 'error': - raise share_exceptions.ShareGroupSnapshotBuildErrorException( - share_group_snapshot_id=share_group_snapshot_id) - - if int(time.time()) - start >= self.build_timeout: - message = ('Share Group Snapshot %s failed to reach %s status ' - 'within the required time (%s s).' % - (sg_snapshot_name, status, self.build_timeout)) - raise exceptions.TimeoutException(message) - -############### - - def migrate_share(self, share_id, host, - force_host_assisted_migration=False, - new_share_network_id=None, writable=False, - preserve_metadata=False, preserve_snapshots=False, - nondisruptive=False, new_share_type_id=None, - version=LATEST_MICROVERSION): - - body = { - 'migration_start': { - 'host': host, - 'force_host_assisted_migration': force_host_assisted_migration, - 'new_share_network_id': new_share_network_id, - 'new_share_type_id': new_share_type_id, - 'writable': writable, - 'preserve_metadata': preserve_metadata, - 'preserve_snapshots': preserve_snapshots, - 'nondisruptive': nondisruptive, - } - } - - body = json.dumps(body) - return self.post('shares/%s/action' % share_id, body, - headers=EXPERIMENTAL, extra_headers=True, - version=version) - - def migration_complete(self, share_id, version=LATEST_MICROVERSION, - action_name='migration_complete'): - post_body = { - action_name: None, - } - body = json.dumps(post_body) - return self.post('shares/%s/action' % share_id, body, - headers=EXPERIMENTAL, extra_headers=True, - version=version) - - def migration_cancel(self, share_id, version=LATEST_MICROVERSION, - action_name='migration_cancel'): - post_body = { - action_name: None, - } - body = json.dumps(post_body) - return self.post('shares/%s/action' % share_id, body, - headers=EXPERIMENTAL, extra_headers=True, - version=version) - - def migration_get_progress(self, share_id, version=LATEST_MICROVERSION, - action_name='migration_get_progress'): - post_body = { - action_name: None, - } - body = json.dumps(post_body) - result = self.post('shares/%s/action' % share_id, body, - headers=EXPERIMENTAL, extra_headers=True, - version=version) - return json.loads(result[1]) - - def reset_task_state( - self, share_id, task_state, version=LATEST_MICROVERSION, - action_name='reset_task_state'): - post_body = { - action_name: { - 'task_state': task_state, - } - } - body = json.dumps(post_body) - return self.post('shares/%s/action' % share_id, body, - headers=EXPERIMENTAL, extra_headers=True, - version=version) - - def wait_for_migration_status(self, share_id, dest_host, status_to_wait, - version=LATEST_MICROVERSION): - """Waits for a share to migrate to a certain host.""" - statuses = ((status_to_wait,) - if not isinstance(status_to_wait, (tuple, list, set)) - else status_to_wait) - share = self.get_share(share_id, version=version) - migration_timeout = CONF.share.migration_timeout - start = int(time.time()) - while share['task_state'] not in statuses: - time.sleep(self.build_interval) - share = self.get_share(share_id, version=version) - if share['task_state'] in statuses: - break - elif share['task_state'] == 'migration_error': - raise share_exceptions.ShareMigrationException( - share_id=share['id'], src=share['host'], dest=dest_host) - elif int(time.time()) - start >= migration_timeout: - message = ('Share %(share_id)s failed to reach a status in' - '%(status)s when migrating from host %(src)s to ' - 'host %(dest)s within the required time ' - '%(timeout)s.' % { - 'src': share['host'], - 'dest': dest_host, - 'share_id': share['id'], - 'timeout': self.build_timeout, - 'status': six.text_type(statuses), - }) - raise exceptions.TimeoutException(message) - return share - -################ - - def create_share_replica(self, share_id, availability_zone=None, - version=LATEST_MICROVERSION): - """Add a share replica of an existing share.""" - uri = "share-replicas" - post_body = { - 'share_id': share_id, - 'availability_zone': availability_zone, - } - - body = json.dumps({'share_replica': post_body}) - resp, body = self.post(uri, body, - headers=EXPERIMENTAL, - extra_headers=True, - version=version) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def get_share_replica(self, replica_id, version=LATEST_MICROVERSION): - """Get the details of share_replica.""" - resp, body = self.get("share-replicas/%s" % replica_id, - headers=EXPERIMENTAL, - extra_headers=True, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_share_replicas(self, share_id=None, version=LATEST_MICROVERSION): - """Get list of replicas.""" - uri = "share-replicas/detail" - uri += ("?share_id=%s" % share_id) if share_id is not None else '' - resp, body = self.get(uri, headers=EXPERIMENTAL, - extra_headers=True, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_share_replicas_summary(self, share_id=None, - version=LATEST_MICROVERSION): - """Get summary list of replicas.""" - uri = "share-replicas" - uri += ("?share_id=%s" % share_id) if share_id is not None else '' - resp, body = self.get(uri, headers=EXPERIMENTAL, - extra_headers=True, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_share_replica(self, replica_id, version=LATEST_MICROVERSION): - """Delete share_replica.""" - uri = "share-replicas/%s" % replica_id - resp, body = self.delete(uri, - headers=EXPERIMENTAL, - extra_headers=True, - version=version) - self.expected_success(202, resp.status) - return body - - def promote_share_replica(self, replica_id, expected_status=202, - version=LATEST_MICROVERSION): - """Promote a share replica to active state.""" - uri = "share-replicas/%s/action" % replica_id - post_body = { - 'promote': None, - } - body = json.dumps(post_body) - resp, body = self.post(uri, body, - headers=EXPERIMENTAL, - extra_headers=True, - version=version) - self.expected_success(expected_status, resp.status) - return self._parse_resp(body) - - def wait_for_share_replica_status(self, replica_id, expected_status, - status_attr='status'): - """Waits for a replica's status_attr to reach a given status.""" - body = self.get_share_replica(replica_id) - replica_status = body[status_attr] - start = int(time.time()) - - while replica_status != expected_status: - time.sleep(self.build_interval) - body = self.get_share_replica(replica_id) - replica_status = body[status_attr] - if replica_status == expected_status: - return - if ('error' in replica_status - and expected_status != constants.STATUS_ERROR): - raise share_exceptions.ShareInstanceBuildErrorException( - id=replica_id) - - if int(time.time()) - start >= self.build_timeout: - message = ('The %(status_attr)s of Replica %(id)s failed to ' - 'reach %(expected_status)s status within the ' - 'required time (%(time)ss). Current ' - '%(status_attr)s: %(current_status)s.' % - { - 'status_attr': status_attr, - 'expected_status': expected_status, - 'time': self.build_timeout, - 'id': replica_id, - 'current_status': replica_status, - }) - raise exceptions.TimeoutException(message) - - def reset_share_replica_status(self, replica_id, - status=constants.STATUS_AVAILABLE, - version=LATEST_MICROVERSION): - """Reset the status.""" - uri = 'share-replicas/%s/action' % replica_id - post_body = { - 'reset_status': { - 'status': status - } - } - body = json.dumps(post_body) - resp, body = self.post(uri, body, - headers=EXPERIMENTAL, - extra_headers=True, - version=version) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def reset_share_replica_state(self, replica_id, - state=constants.REPLICATION_STATE_ACTIVE, - version=LATEST_MICROVERSION): - """Reset the replication state of a replica.""" - uri = 'share-replicas/%s/action' % replica_id - post_body = { - 'reset_replica_state': { - 'replica_state': state - } - } - body = json.dumps(post_body) - resp, body = self.post(uri, body, - headers=EXPERIMENTAL, - extra_headers=True, - version=version) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def resync_share_replica(self, replica_id, expected_result=202, - version=LATEST_MICROVERSION): - """Force an immediate resync of the replica.""" - uri = 'share-replicas/%s/action' % replica_id - post_body = { - 'resync': None - } - body = json.dumps(post_body) - resp, body = self.post(uri, body, - headers=EXPERIMENTAL, - extra_headers=True, - version=version) - self.expected_success(expected_result, resp.status) - return self._parse_resp(body) - - def force_delete_share_replica(self, replica_id, - version=LATEST_MICROVERSION): - """Force delete a replica.""" - uri = 'share-replicas/%s/action' % replica_id - post_body = { - 'force_delete': None - } - body = json.dumps(post_body) - resp, body = self.post(uri, body, - headers=EXPERIMENTAL, - extra_headers=True, - version=version) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def list_share_networks(self, detailed=False, params=None, - version=LATEST_MICROVERSION): - """Get list of share networks w/o filters.""" - uri = 'share-networks/detail' if detailed else 'share-networks' - uri += '?%s' % urlparse.urlencode(params) if params else '' - resp, body = self.get(uri, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_share_networks_with_detail(self, params=None, - version=LATEST_MICROVERSION): - """Get detailed list of share networks w/o filters.""" - return self.list_share_networks( - detailed=True, params=params, version=version) - - def get_share_network(self, share_network_id, version=LATEST_MICROVERSION): - resp, body = self.get("share-networks/%s" % share_network_id, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - -################ - - def create_snapshot_access_rule(self, snapshot_id, access_type="ip", - access_to="0.0.0.0/0"): - body = { - "allow_access": { - "access_type": access_type, - "access_to": access_to - } - } - resp, body = self.post("snapshots/%s/action" % snapshot_id, - json.dumps(body), version=LATEST_MICROVERSION) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def get_snapshot_access_rule(self, snapshot_id, rule_id): - resp, body = self.get("snapshots/%s/access-list" % snapshot_id, - version=LATEST_MICROVERSION) - body = self._parse_resp(body) - found_rules = filter(lambda x: x['id'] == rule_id, body) - - return found_rules[0] if len(found_rules) > 0 else None - - def wait_for_snapshot_access_rule_status(self, snapshot_id, rule_id, - expected_state='active'): - rule = self.get_snapshot_access_rule(snapshot_id, rule_id) - state = rule['state'] - start = int(time.time()) - - while state != expected_state: - time.sleep(self.build_interval) - rule = self.get_snapshot_access_rule(snapshot_id, rule_id) - state = rule['state'] - if state == expected_state: - return - if 'error' in state: - raise share_exceptions.AccessRuleBuildErrorException( - snapshot_id) - - if int(time.time()) - start >= self.build_timeout: - message = ('The status of snapshot access rule %(id)s failed ' - 'to reach %(expected_state)s state within the ' - 'required time (%(time)ss). Current ' - 'state: %(current_state)s.' % - { - 'expected_state': expected_state, - 'time': self.build_timeout, - 'id': rule_id, - 'current_state': state, - }) - raise exceptions.TimeoutException(message) - - def delete_snapshot_access_rule(self, snapshot_id, rule_id): - body = { - "deny_access": { - "access_id": rule_id, - } - } - resp, body = self.post("snapshots/%s/action" % snapshot_id, - json.dumps(body), version=LATEST_MICROVERSION) - self.expected_success(202, resp.status) - return self._parse_resp(body) - - def wait_for_snapshot_access_rule_deletion(self, snapshot_id, rule_id): - rule = self.get_snapshot_access_rule(snapshot_id, rule_id) - start = int(time.time()) - - while rule is not None: - time.sleep(self.build_interval) - - rule = self.get_snapshot_access_rule(snapshot_id, rule_id) - - if rule is None: - return - if int(time.time()) - start >= self.build_timeout: - message = ('The snapshot access rule %(id)s failed to delete ' - 'within the required time (%(time)ss).' % - { - 'time': self.build_timeout, - 'id': rule_id, - }) - raise exceptions.TimeoutException(message) - - def get_snapshot_export_location(self, snapshot_id, export_location_uuid, - version=LATEST_MICROVERSION): - resp, body = self.get( - "snapshots/%(snapshot_id)s/export-locations/%(el_uuid)s" % { - "snapshot_id": snapshot_id, "el_uuid": export_location_uuid}, - version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_snapshot_export_locations( - self, snapshot_id, version=LATEST_MICROVERSION): - resp, body = self.get( - "snapshots/%s/export-locations" % snapshot_id, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - -############### - - def get_message(self, message_id, version=LATEST_MICROVERSION): - """Show details for a single message.""" - url = 'messages/%s' % message_id - resp, body = self.get(url, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def list_messages(self, params=None, version=LATEST_MICROVERSION): - """List all messages.""" - url = 'messages' - url += '?%s' % urlparse.urlencode(params) if params else '' - resp, body = self.get(url, version=version) - self.expected_success(200, resp.status) - return self._parse_resp(body) - - def delete_message(self, message_id, version=LATEST_MICROVERSION): - """Delete a single message.""" - url = 'messages/%s' % message_id - resp, body = self.delete(url, version=version) - self.expected_success(204, resp.status) - return self._parse_resp(body) - - def wait_for_message(self, resource_id): - """Waits until a message for a resource with given id exists""" - start = int(time.time()) - message = None - - while not message: - time.sleep(self.build_interval) - for msg in self.list_messages(): - if msg['resource_id'] == resource_id: - return msg - - if int(time.time()) - start >= self.build_timeout: - message = ('No message for resource with id %s was created in' - ' the required time (%s s).' % - (resource_id, self.build_timeout)) - raise exceptions.TimeoutException(message) diff --git a/manila_tempest_tests/share_exceptions.py b/manila_tempest_tests/share_exceptions.py deleted file mode 100644 index a309b84256..0000000000 --- a/manila_tempest_tests/share_exceptions.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest.lib import exceptions - - -class ShareBuildErrorException(exceptions.TempestException): - message = "Share %(share_id)s failed to build and is in ERROR status" - - -class ShareInstanceBuildErrorException(exceptions.TempestException): - message = "Share instance %(id)s failed to build and is in ERROR status" - - -class ShareGroupBuildErrorException(exceptions.TempestException): - message = ("Share group %(share_group_id)s failed to build and " - "is in ERROR status") - - -class AccessRuleBuildErrorException(exceptions.TempestException): - message = "Share's rule with id %(rule_id)s is in ERROR status" - - -class SnapshotBuildErrorException(exceptions.TempestException): - message = "Snapshot %(snapshot_id)s failed to build and is in ERROR status" - - -class SnapshotInstanceBuildErrorException(exceptions.TempestException): - message = ("Snapshot instance %(id)s failed to build and is in " - "ERROR status.") - - -class ShareGroupSnapshotBuildErrorException(exceptions.TempestException): - message = ("Share Group Snapshot %(share_group_snapshot_id)s failed " - "to build and is in ERROR status") - - -class ShareProtocolNotSpecified(exceptions.TempestException): - message = "Share can not be created, share protocol is not specified" - - -class ShareNetworkNotSpecified(exceptions.TempestException): - message = "Share can not be created, share network not specified" - - -class NoAvailableNetwork(exceptions.TempestException): - message = "No available network for service VM" - - -class InvalidResource(exceptions.TempestException): - message = "Provided invalid resource: %(message)s" - - -class ShareMigrationException(exceptions.TempestException): - message = ("Share %(share_id)s failed to migrate from " - "host %(src)s to host %(dest)s.") - - -class ResourceReleaseFailed(exceptions.TempestException): - message = "Failed to release resource '%(res_type)s' with id '%(res_id)s'." - - -class ShareReplicationTypeException(exceptions.TempestException): - message = ("Option backend_replication_type is set to incorrect value: " - "%(replication_type)s") diff --git a/manila_tempest_tests/tests/__init__.py b/manila_tempest_tests/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/manila_tempest_tests/tests/api/__init__.py b/manila_tempest_tests/tests/api/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/manila_tempest_tests/tests/api/admin/__init__.py b/manila_tempest_tests/tests/api/admin/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/manila_tempest_tests/tests/api/admin/test_admin_actions.py b/manila_tempest_tests/tests/api/admin/test_admin_actions.py deleted file mode 100644 index 3df0ae4788..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_admin_actions.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -CONF = config.CONF - - -class AdminActionsTest(base.BaseSharesAdminTest): - - @classmethod - def resource_setup(cls): - super(AdminActionsTest, cls).resource_setup() - cls.states = ["error", "available"] - cls.task_states = ["migration_starting", "data_copying_in_progress", - "migration_success", None] - cls.bad_status = "error_deleting" - cls.sh = cls.create_share() - cls.sh_instance = ( - cls.shares_v2_client.get_instances_of_share(cls.sh["id"])[0] - ) - if CONF.share.run_snapshot_tests: - cls.sn = cls.create_snapshot_wait_for_active(cls.sh["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_reset_share_state(self): - for status in self.states: - self.shares_v2_client.reset_state(self.sh["id"], status=status) - self.shares_v2_client.wait_for_share_status(self.sh["id"], status) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_reset_share_instance_state(self): - id = self.sh_instance["id"] - for status in self.states: - self.shares_v2_client.reset_state( - id, s_type="share_instances", status=status) - self.shares_v2_client.wait_for_share_instance_status(id, status) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_reset_snapshot_state_to_error(self): - for status in self.states: - self.shares_v2_client.reset_state( - self.sn["id"], s_type="snapshots", status=status) - self.shares_v2_client.wait_for_snapshot_status( - self.sn["id"], status) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_force_delete_share(self): - share = self.create_share() - - # Change status from 'available' to 'error_deleting' - self.shares_v2_client.reset_state(share["id"], status=self.bad_status) - - # Check that status was changed - check_status = self.shares_v2_client.get_share(share["id"]) - self.assertEqual(self.bad_status, check_status["status"]) - - # Share with status 'error_deleting' should be deleted - self.shares_v2_client.force_delete(share["id"]) - self.shares_v2_client.wait_for_resource_deletion(share_id=share["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_force_delete_share_instance(self): - share = self.create_share(cleanup_in_class=False) - instances = self.shares_v2_client.get_instances_of_share(share["id"]) - # Check that instance was created - self.assertEqual(1, len(instances)) - - instance = instances[0] - - # Change status from 'available' to 'error_deleting' - self.shares_v2_client.reset_state( - instance["id"], s_type="share_instances", status=self.bad_status) - - # Check that status was changed - check_status = self.shares_v2_client.get_share_instance(instance["id"]) - self.assertEqual(self.bad_status, check_status["status"]) - - # Share with status 'error_deleting' should be deleted - self.shares_v2_client.force_delete( - instance["id"], s_type="share_instances") - self.shares_v2_client.wait_for_resource_deletion( - share_instance_id=instance["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_force_delete_snapshot(self): - sn = self.create_snapshot_wait_for_active(self.sh["id"]) - - # Change status from 'available' to 'error_deleting' - self.shares_v2_client.reset_state( - sn["id"], s_type="snapshots", status=self.bad_status) - - # Check that status was changed - check_status = self.shares_v2_client.get_snapshot(sn["id"]) - self.assertEqual(self.bad_status, check_status["status"]) - - # Snapshot with status 'error_deleting' should be deleted - self.shares_v2_client.force_delete(sn["id"], s_type="snapshots") - self.shares_v2_client.wait_for_resource_deletion(snapshot_id=sn["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.22") - def test_reset_share_task_state(self): - for task_state in self.task_states: - self.shares_v2_client.reset_task_state(self.sh["id"], task_state) - self.shares_v2_client.wait_for_share_status( - self.sh["id"], task_state, 'task_state') diff --git a/manila_tempest_tests/tests/api/admin/test_admin_actions_negative.py b/manila_tempest_tests/tests/api/admin/test_admin_actions_negative.py deleted file mode 100644 index ea05327d1b..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_admin_actions_negative.py +++ /dev/null @@ -1,210 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from tempest import config -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -class AdminActionsNegativeTest(base.BaseSharesMixedTest): - - @classmethod - def resource_setup(cls): - super(AdminActionsNegativeTest, cls).resource_setup() - cls.admin_client = cls.admin_shares_v2_client - cls.member_client = cls.shares_v2_client - cls.sh = cls.create_share(client=cls.admin_client) - cls.sh_instance = ( - cls.admin_client.get_instances_of_share(cls.sh["id"])[0] - ) - if CONF.share.run_snapshot_tests: - cls.sn = cls.create_snapshot_wait_for_active( - cls.sh["id"], client=cls.admin_client) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_reset_share_state_to_unacceptable_state(self): - self.assertRaises(lib_exc.BadRequest, - self.admin_client.reset_state, - self.sh["id"], status="fake") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_reset_share_instance_state_to_unacceptable_state(self): - self.assertRaises( - lib_exc.BadRequest, - self.admin_client.reset_state, - self.sh_instance["id"], - s_type="share_instances", - status="fake" - ) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_reset_snapshot_state_to_unacceptable_state(self): - self.assertRaises(lib_exc.BadRequest, - self.admin_client.reset_state, - self.sn["id"], s_type="snapshots", status="fake") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_reset_share_state_with_member(self): - # Even if member from another tenant, it should be unauthorized - self.assertRaises(lib_exc.Forbidden, - self.member_client.reset_state, - self.sh["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_reset_share_instance_state_with_member(self): - # Even if member from another tenant, it should be unauthorized - self.assertRaises(lib_exc.Forbidden, - self.member_client.reset_state, - self.sh_instance["id"], s_type="share_instances") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_try_reset_snapshot_state_with_member(self): - # Even if member from another tenant, it should be unauthorized - self.assertRaises(lib_exc.Forbidden, - self.member_client.reset_state, - self.sn["id"], s_type="snapshots") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_force_delete_share_with_member(self): - # If a non-admin tries to do force_delete, it should be unauthorized - self.assertRaises(lib_exc.Forbidden, - self.member_client.force_delete, - self.sh["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_force_delete_share_instance_with_member(self): - # If a non-admin tries to do force_delete, it should be unauthorized - self.assertRaises(lib_exc.Forbidden, - self.member_client.force_delete, - self.sh_instance["id"], s_type="share_instances") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_try_force_delete_snapshot_with_member(self): - # If a non-admin tries to do force_delete, it should be unauthorized - self.assertRaises(lib_exc.Forbidden, - self.member_client.force_delete, - self.sn["id"], s_type="snapshots") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_get_share_instance_with_member(self): - # If a non-admin tries to get instance, it should be unauthorized - self.assertRaises(lib_exc.Forbidden, - self.member_client.get_share_instance, - self.sh_instance["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_get_instances_of_share_with_member(self): - # If a non-admin tries to list instances of given share, it should be - # unauthorized - self.assertRaises(lib_exc.Forbidden, - self.member_client.get_instances_of_share, - self.sh['id']) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.22") - def test_reset_task_state_invalid_state(self): - self.assertRaises( - lib_exc.BadRequest, self.admin_client.reset_task_state, - self.sh['id'], 'fake_state') - - -@ddt.ddt -class AdminActionsAPIOnlyNegativeTest(base.BaseSharesMixedTest): - - @classmethod - def resource_setup(cls): - super(AdminActionsAPIOnlyNegativeTest, cls).resource_setup() - cls.admin_client = cls.admin_shares_v2_client - cls.member_client = cls.shares_v2_client - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_list_share_instance_with_member(self): - # If a non-admin tries to list instances, it should be unauthorized - self.assertRaises(lib_exc.Forbidden, - self.member_client.list_share_instances) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.22") - def test_reset_task_state_share_not_found(self): - self.assertRaises( - lib_exc.NotFound, self.admin_client.reset_task_state, - 'fake_share', 'migration_error') - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_force_delete_nonexistent_snapshot(self): - self.assertRaises(lib_exc.NotFound, - self.admin_client.force_delete, - "fake", - s_type="snapshots") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_force_delete_nonexistent_share(self): - self.assertRaises(lib_exc.NotFound, - self.admin_client.force_delete, "fake") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_force_delete_nonexistent_share_instance(self): - self.assertRaises(lib_exc.NotFound, - self.admin_client.force_delete, - "fake", - s_type="share_instances") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_reset_nonexistent_share_state(self): - self.assertRaises(lib_exc.NotFound, - self.admin_client.reset_state, "fake") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_reset_nonexistent_share_instance_state(self): - self.assertRaises(lib_exc.NotFound, self.admin_client.reset_state, - "fake", s_type="share_instances") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_reset_nonexistent_snapshot_state(self): - self.assertRaises(lib_exc.NotFound, self.admin_client.reset_state, - "fake", s_type="snapshots") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @ddt.data('migrate_share', 'migration_complete', 'reset_task_state', - 'migration_get_progress', 'migration_cancel') - def test_migration_API_invalid_microversion(self, method_name): - if method_name == 'migrate_share': - self.assertRaises( - lib_exc.NotFound, getattr(self.shares_v2_client, method_name), - 'fake_share', 'fake_host', version='2.21') - elif method_name == 'reset_task_state': - self.assertRaises( - lib_exc.NotFound, getattr(self.shares_v2_client, method_name), - 'fake_share', 'fake_task_state', version='2.21') - else: - self.assertRaises( - lib_exc.NotFound, getattr(self.shares_v2_client, method_name), - 'fake_share', version='2.21') diff --git a/manila_tempest_tests/tests/api/admin/test_export_locations.py b/manila_tempest_tests/tests/api/admin/test_export_locations.py deleted file mode 100644 index 381717833c..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_export_locations.py +++ /dev/null @@ -1,201 +0,0 @@ -# Copyright 2015 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from oslo_utils import timeutils -from oslo_utils import uuidutils -import six -from tempest import config -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base -from manila_tempest_tests import utils - -CONF = config.CONF -LATEST_MICROVERSION = CONF.share.max_api_microversion - - -@base.skip_if_microversion_not_supported("2.9") -@ddt.ddt -class ExportLocationsTest(base.BaseSharesMixedTest): - - @classmethod - def resource_setup(cls): - super(ExportLocationsTest, cls).resource_setup() - cls.admin_client = cls.admin_shares_v2_client - cls.member_client = cls.shares_v2_client - cls.share = cls.create_share(client=cls.admin_client) - cls.share = cls.admin_client.get_share(cls.share['id']) - cls.share_instances = cls.admin_client.get_instances_of_share( - cls.share['id']) - - def _verify_export_location_structure( - self, export_locations, role='admin', version=LATEST_MICROVERSION, - format='summary'): - - # Determine which keys to expect based on role, version and format - summary_keys = ['id', 'path'] - if utils.is_microversion_ge(version, '2.14'): - summary_keys += ['preferred'] - - admin_summary_keys = summary_keys + [ - 'share_instance_id', 'is_admin_only'] - - detail_keys = summary_keys + ['created_at', 'updated_at'] - - admin_detail_keys = admin_summary_keys + ['created_at', 'updated_at'] - - if format == 'summary': - if role == 'admin': - expected_keys = admin_summary_keys - else: - expected_keys = summary_keys - else: - if role == 'admin': - expected_keys = admin_detail_keys - else: - expected_keys = detail_keys - - if not isinstance(export_locations, (list, tuple, set)): - export_locations = (export_locations, ) - - for export_location in export_locations: - - # Check that the correct keys are present - self.assertEqual(len(expected_keys), len(export_location)) - for key in expected_keys: - self.assertIn(key, export_location) - - # Check the format of ever-present summary keys - self.assertTrue(uuidutils.is_uuid_like(export_location['id'])) - self.assertIsInstance(export_location['path'], - six.string_types) - - if utils.is_microversion_ge(version, '2.14'): - self.assertIn(export_location['preferred'], (True, False)) - - if role == 'admin': - self.assertIn(export_location['is_admin_only'], (True, False)) - self.assertTrue(uuidutils.is_uuid_like( - export_location['share_instance_id'])) - - # Check the format of the detail keys - if format == 'detail': - for time in (export_location['created_at'], - export_location['updated_at']): - # If var 'time' has incorrect value then ValueError - # exception is expected to be raised. So, just try parse - # it making assertion that it has proper date value. - timeutils.parse_strtime(time) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.13') - def test_list_share_export_locations(self): - export_locations = self.admin_client.list_share_export_locations( - self.share['id'], version='2.13') - - self._verify_export_location_structure(export_locations, - version='2.13') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.14') - def test_list_share_export_locations_with_preferred_flag(self): - export_locations = self.admin_client.list_share_export_locations( - self.share['id'], version='2.14') - - self._verify_export_location_structure(export_locations, - version='2.14') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_get_share_export_location(self): - export_locations = self.admin_client.list_share_export_locations( - self.share['id']) - - for export_location in export_locations: - el = self.admin_client.get_share_export_location( - self.share['id'], export_location['id']) - self._verify_export_location_structure(el, format='detail') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_share_export_locations_by_member(self): - export_locations = self.member_client.list_share_export_locations( - self.share['id']) - - self._verify_export_location_structure(export_locations, role='member') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_get_share_export_location_by_member(self): - export_locations = self.admin_client.list_share_export_locations( - self.share['id']) - - for export_location in export_locations: - if export_location['is_admin_only']: - continue - el = self.member_client.get_share_export_location( - self.share['id'], export_location['id']) - self._verify_export_location_structure(el, role='member', - format='detail') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.13') - def test_list_share_instance_export_locations(self): - for share_instance in self.share_instances: - export_locations = ( - self.admin_client.list_share_instance_export_locations( - share_instance['id'], version='2.13')) - self._verify_export_location_structure(export_locations, - version='2.13') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.14') - def test_list_share_instance_export_locations_with_preferred_flag(self): - for share_instance in self.share_instances: - export_locations = ( - self.admin_client.list_share_instance_export_locations( - share_instance['id'], version='2.14')) - self._verify_export_location_structure(export_locations, - version='2.14') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_get_share_instance_export_location(self): - for share_instance in self.share_instances: - export_locations = ( - self.admin_client.list_share_instance_export_locations( - share_instance['id'])) - for el in export_locations: - el = self.admin_client.get_share_instance_export_location( - share_instance['id'], el['id']) - self._verify_export_location_structure(el, format='detail') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_share_contains_all_export_locations_of_all_share_instances(self): - share_export_locations = self.admin_client.list_share_export_locations( - self.share['id']) - share_instances_export_locations = [] - for share_instance in self.share_instances: - share_instance_export_locations = ( - self.admin_client.list_share_instance_export_locations( - share_instance['id'])) - share_instances_export_locations.extend( - share_instance_export_locations) - - self.assertEqual( - len(share_export_locations), - len(share_instances_export_locations) - ) - self.assertEqual( - sorted(share_export_locations, key=lambda el: el['id']), - sorted(share_instances_export_locations, key=lambda el: el['id']) - ) diff --git a/manila_tempest_tests/tests/api/admin/test_export_locations_negative.py b/manila_tempest_tests/tests/api/admin/test_export_locations_negative.py deleted file mode 100644 index 4ad4721931..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_export_locations_negative.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2015 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib import exceptions as lib_exc -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@base.skip_if_microversion_not_supported("2.9") -class ExportLocationsNegativeTest(base.BaseSharesMixedTest): - - @classmethod - def resource_setup(cls): - super(ExportLocationsNegativeTest, cls).resource_setup() - cls.admin_client = cls.admin_shares_v2_client - cls.member_client = cls.shares_v2_client - cls.share = cls.create_share(client=cls.admin_client) - cls.share = cls.admin_client.get_share(cls.share['id']) - cls.share_instances = cls.admin_client.get_instances_of_share( - cls.share['id']) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_get_inexistent_share_export_location(self): - self.assertRaises( - lib_exc.NotFound, - self.admin_client.get_share_export_location, - self.share['id'], - "fake-inexistent-share-instance-id", - ) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_get_inexistent_share_instance_export_location(self): - for share_instance in self.share_instances: - self.assertRaises( - lib_exc.NotFound, - self.admin_client.get_share_instance_export_location, - share_instance['id'], - "fake-inexistent-share-instance-id", - ) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_list_share_instance_export_locations_by_member(self): - for share_instance in self.share_instances: - self.assertRaises( - lib_exc.Forbidden, - self.member_client.list_share_instance_export_locations, - "fake-inexistent-share-instance-id", - ) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_get_share_instance_export_location_by_member(self): - for share_instance in self.share_instances: - export_locations = ( - self.admin_client.list_share_instance_export_locations( - share_instance['id'])) - for el in export_locations: - self.assertRaises( - lib_exc.Forbidden, - self.member_client.get_share_instance_export_location, - share_instance['id'], el['id'], - ) - - -@base.skip_if_microversion_not_supported("2.9") -class ExportLocationsAPIOnlyNegativeTest(base.BaseSharesAdminTest): - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_export_locations_by_nonexistent_share(self): - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.list_share_export_locations, - "fake-inexistent-share-id", - ) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_export_locations_by_nonexistent_share_instance(self): - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.list_share_instance_export_locations, - "fake-inexistent-share-instance-id", - ) diff --git a/manila_tempest_tests/tests/api/admin/test_migration.py b/manila_tempest_tests/tests/api/admin/test_migration.py deleted file mode 100644 index b32490486c..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_migration.py +++ /dev/null @@ -1,634 +0,0 @@ -# Copyright 2015 Hitachi Data Systems. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import ddt -from tempest import config -from tempest.lib.common.utils import data_utils -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests.tests.api import base -from manila_tempest_tests import utils - -CONF = config.CONF - - -class MigrationBase(base.BaseSharesAdminTest): - """Base test class for Share Migration. - - Tests share migration in multi-backend environment. - - This class covers: - 1) Driver-assisted migration: force_host_assisted_migration, nondisruptive, - writable and preserve-metadata are False. - 2) Host-assisted migration: force_host_assisted_migration is True, - nondisruptive, writable, preserve-metadata and preserve-snapshots are - False. - 3) 2-phase migration of both Host-assisted and Driver-assisted. - 4) Cancelling migration past first phase. - 5) Changing driver modes through migration. - - No need to test with writable, preserve-metadata and non-disruptive as - True, values are supplied to the driver which decides what to do. Test - should be positive, so not being writable, not preserving metadata and - being disruptive is less restrictive for drivers, which would abort if they - cannot handle them. - - Drivers that implement driver-assisted migration should enable the - configuration flag to be tested. - """ - - protocol = None - - @classmethod - def resource_setup(cls): - super(MigrationBase, cls).resource_setup() - if cls.protocol not in CONF.share.enable_protocols: - message = "%s tests are disabled." % cls.protocol - raise cls.skipException(message) - if not (CONF.share.run_host_assisted_migration_tests or - CONF.share.run_driver_assisted_migration_tests): - raise cls.skipException("Share migration tests are disabled.") - cls.pools = cls.shares_v2_client.list_pools(detail=True)['pools'] - - if len(cls.pools) < 2: - raise cls.skipException("At least two different pool entries are " - "needed to run share migration tests.") - - cls.new_type = cls.create_share_type( - name=data_utils.rand_name('new_share_type_for_migration'), - cleanup_in_class=True, - extra_specs=utils.get_configured_extra_specs()) - - cls.new_type_opposite = cls.create_share_type( - name=data_utils.rand_name('new_share_type_for_migration_opposite'), - cleanup_in_class=True, - extra_specs=utils.get_configured_extra_specs( - variation='opposite_driver_modes')) - - def _setup_migration(self, share, opposite=False): - - if opposite: - dest_type = self.new_type_opposite['share_type'] - else: - dest_type = self.new_type['share_type'] - - dest_pool = utils.choose_matching_backend(share, self.pools, dest_type) - - if opposite: - if not dest_pool: - raise self.skipException( - "This test requires two pools enabled with different " - "driver modes.") - else: - self.assertIsNotNone(dest_pool) - self.assertIsNotNone(dest_pool.get('name')) - - old_exports = self.shares_v2_client.list_share_export_locations( - share['id']) - self.assertNotEmpty(old_exports) - old_exports = [x['path'] for x in old_exports - if x['is_admin_only'] is False] - self.assertNotEmpty(old_exports) - - self.shares_v2_client.create_access_rule( - share['id'], access_to="50.50.50.50", access_level="rw") - - self.shares_v2_client.wait_for_share_status( - share['id'], constants.RULE_STATE_ACTIVE, - status_attr='access_rules_status') - - self.shares_v2_client.create_access_rule( - share['id'], access_to="51.51.51.51", access_level="ro") - - self.shares_v2_client.wait_for_share_status( - share['id'], constants.RULE_STATE_ACTIVE, - status_attr='access_rules_status') - - dest_pool = dest_pool['name'] - share = self.shares_v2_client.get_share(share['id']) - - return share, dest_pool - - def _validate_migration_successful(self, dest_pool, share, status_to_wait, - version=CONF.share.max_api_microversion, - complete=True, share_network_id=None, - share_type_id=None): - - statuses = ((status_to_wait,) - if not isinstance(status_to_wait, (tuple, list, set)) - else status_to_wait) - - new_exports = self.shares_v2_client.list_share_export_locations( - share['id'], version=version) - self.assertNotEmpty(new_exports) - new_exports = [x['path'] for x in new_exports if - x['is_admin_only'] is False] - self.assertNotEmpty(new_exports) - - self.assertIn(share['task_state'], statuses) - if share_network_id: - self.assertEqual(share_network_id, share['share_network_id']) - if share_type_id: - self.assertEqual(share_type_id, share['share_type']) - - # Share migrated - if complete: - self.assertEqual(dest_pool, share['host']) - - rules = self.shares_v2_client.list_access_rules(share['id']) - expected_rules = [{ - 'state': constants.RULE_STATE_ACTIVE, - 'access_to': '50.50.50.50', - 'access_type': 'ip', - 'access_level': 'rw', - }, { - 'state': constants.RULE_STATE_ACTIVE, - 'access_to': '51.51.51.51', - 'access_type': 'ip', - 'access_level': 'ro', - }] - filtered_rules = [{'state': rule['state'], - 'access_to': rule['access_to'], - 'access_level': rule['access_level'], - 'access_type': rule['access_type']} - for rule in rules] - - for r in expected_rules: - self.assertIn(r, filtered_rules) - self.assertEqual(len(expected_rules), len(filtered_rules)) - - # Share not migrated yet - else: - self.assertNotEqual(dest_pool, share['host']) - - def _check_migration_enabled(self, force_host_assisted): - - if force_host_assisted: - if not CONF.share.run_host_assisted_migration_tests: - raise self.skipException( - "Host-assisted migration tests are disabled.") - else: - if not CONF.share.run_driver_assisted_migration_tests: - raise self.skipException( - "Driver-assisted migration tests are disabled.") - - def _create_secondary_share_network(self, old_share_network_id): - - old_share_network = self.shares_v2_client.get_share_network( - old_share_network_id) - - new_share_network = self.create_share_network( - cleanup_in_class=True, - neutron_net_id=old_share_network['neutron_net_id'], - neutron_subnet_id=old_share_network['neutron_subnet_id']) - - return new_share_network['id'] - - def _test_resize_post_migration(self, force_host_assisted, resize): - self._check_migration_enabled(force_host_assisted) - new_size = CONF.share.share_size + 1 - share = self.create_share(self.protocol, size=new_size) - share = self.shares_v2_client.get_share(share['id']) - - share, dest_pool = self._setup_migration(share) - - task_state, new_share_network_id, new_share_type_id = ( - self._get_migration_data(share, force_host_assisted)) - - share = self.migrate_share( - share['id'], dest_pool, - force_host_assisted_migration=force_host_assisted, - wait_for_status=task_state, new_share_type_id=new_share_type_id, - new_share_network_id=new_share_network_id) - - share = self.migration_complete(share['id'], dest_pool) - if resize == 'extend': - new_size = CONF.share.share_size + 2 - self.shares_v2_client.extend_share(share['id'], new_size) - self.shares_v2_client.wait_for_share_status( - share['id'], constants.STATUS_AVAILABLE) - share = self.shares_v2_client.get_share(share["id"]) - self.assertEqual(new_size, int(share["size"])) - else: - new_size = CONF.share.share_size - self.shares_v2_client.shrink_share(share['id'], new_size) - self.shares_v2_client.wait_for_share_status( - share['id'], constants.STATUS_AVAILABLE) - share = self.shares_v2_client.get_share(share["id"]) - self.assertEqual(new_size, int(share["size"])) - - self._cleanup_share(share) - - def _get_migration_data(self, share, force_host_assisted=False): - task_state = (constants.TASK_STATE_DATA_COPYING_COMPLETED - if force_host_assisted - else constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE) - - old_share_network_id = share['share_network_id'] - - if CONF.share.multitenancy_enabled: - new_share_network_id = self._create_secondary_share_network( - old_share_network_id) - - else: - new_share_network_id = None - - new_share_type_id = self.new_type['share_type']['id'] - return task_state, new_share_network_id, new_share_type_id - - def _validate_snapshot(self, share, snapshot1, snapshot2): - snapshot_list = self.shares_v2_client.list_snapshots_for_share( - share['id']) - msg = "Share %s has no snapshot." % share['id'] - # Verify that snapshot list is not empty - self.assertNotEmpty(snapshot_list, msg) - snapshot_id_list = [snap['id'] for snap in snapshot_list] - - # verify that after migration original snapshots are retained - self.assertIn(snapshot1['id'], snapshot_id_list) - self.assertIn(snapshot2['id'], snapshot_id_list) - # Verify that a share can be created from a snapshot after migration - snapshot1_share = self.create_share( - self.protocol, size=share['size'], snapshot_id=snapshot1['id'], - share_network_id=share['share_network_id']) - self.assertEqual(snapshot1['id'], snapshot1_share['snapshot_id']) - self._cleanup_share(share) - - def _validate_share_migration_with_different_snapshot_capability_type( - self, force_host_assisted, snapshot_capable): - - self._check_migration_enabled(force_host_assisted) - ss_type, no_ss_type = self._create_share_type_for_snapshot_capability() - - if snapshot_capable: - share_type = ss_type['share_type'] - share_type_id = no_ss_type['share_type']['id'] - new_share_type_id = ss_type['share_type']['id'] - else: - share_type = no_ss_type['share_type'] - share_type_id = ss_type['share_type']['id'] - new_share_type_id = no_ss_type['share_type']['id'] - - share = self.create_share( - self.protocol, share_type_id=share_type_id) - share = self.shares_v2_client.get_share(share['id']) - - if snapshot_capable: - self.assertEqual(False, share['snapshot_support']) - else: - # Verify that share has snapshot support capability - self.assertTrue(share['snapshot_support']) - - dest_pool = utils.choose_matching_backend(share, self.pools, - share_type) - task_state, new_share_network_id, __ = ( - self._get_migration_data(share, force_host_assisted)) - share = self.migrate_share( - share['id'], dest_pool['name'], - force_host_assisted_migration=force_host_assisted, - wait_for_status=task_state, - new_share_type_id=new_share_type_id, - new_share_network_id=new_share_network_id) - share = self.migration_complete(share['id'], dest_pool) - - if snapshot_capable: - # Verify that migrated share does have snapshot support capability - self.assertTrue(share['snapshot_support']) - else: - # Verify that migrated share don't have snapshot support capability - self.assertEqual(False, share['snapshot_support']) - - self._cleanup_share(share) - - def _create_share_type_for_snapshot_capability(self): - # Share type with snapshot support - st_name = data_utils.rand_name( - 'snapshot_capable_share_type_for_migration') - extra_specs = self.add_extra_specs_to_dict({"snapshot_support": True}) - ss_type = self.create_share_type(st_name, extra_specs=extra_specs) - - # New share type with no snapshot support capability - # to which a share will be migrated - new_st_name = data_utils.rand_name( - 'snapshot_noncapable_share_type_for_migration') - extra_specs = { - "driver_handles_share_servers": CONF.share.multitenancy_enabled - } - no_ss_type = self.create_share_type(new_st_name, - extra_specs=extra_specs) - return ss_type, no_ss_type - - def _cleanup_share(self, share): - resource = {"type": "share", "id": share["id"], - "client": self.shares_v2_client} - # NOTE(Yogi1): Share needs to be cleaned up explicitly at the end of - # test otherwise, newly created share_network will not get cleaned up. - self.method_resources.insert(0, resource) - - -@ddt.ddt -class MigrationCancelNFSTest(MigrationBase): - protocol = "nfs" - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @base.skip_if_microversion_lt("2.29") - @ddt.data(True, False) - def test_migration_cancel(self, force_host_assisted): - self._check_migration_enabled(force_host_assisted) - - share = self.create_share(self.protocol) - share = self.shares_v2_client.get_share(share['id']) - share, dest_pool = self._setup_migration(share) - task_state = (constants.TASK_STATE_DATA_COPYING_COMPLETED - if force_host_assisted - else constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE) - - share = self.migrate_share( - share['id'], dest_pool, wait_for_status=task_state, - force_host_assisted_migration=force_host_assisted) - - self._validate_migration_successful( - dest_pool, share, task_state, complete=False) - - progress = self.shares_v2_client.migration_get_progress(share['id']) - - self.assertEqual(task_state, progress['task_state']) - self.assertEqual(100, progress['total_progress']) - - share = self.migration_cancel(share['id'], dest_pool) - progress = self.shares_v2_client.migration_get_progress(share['id']) - - self.assertEqual( - constants.TASK_STATE_MIGRATION_CANCELLED, progress['task_state']) - self.assertEqual(100, progress['total_progress']) - - self._validate_migration_successful( - dest_pool, share, constants.TASK_STATE_MIGRATION_CANCELLED, - complete=False) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @base.skip_if_microversion_lt("2.29") - @testtools.skipUnless( - CONF.share.run_snapshot_tests, 'Snapshot tests are disabled.') - @testtools.skipUnless( - CONF.share.run_driver_assisted_migration_tests, - 'Driver-assisted migration tests are disabled.') - @testtools.skipUnless( - CONF.share.run_migration_with_preserve_snapshots_tests, - 'Migration with preserve snapshots tests are disabled.') - def test_migration_cancel_share_with_snapshot(self): - share = self.create_share(self.protocol) - share = self.shares_v2_client.get_share(share['id']) - - share, dest_pool = self._setup_migration(share) - snapshot1 = self.create_snapshot_wait_for_active(share['id']) - snapshot2 = self.create_snapshot_wait_for_active(share['id']) - - task_state, new_share_network_id, new_share_type_id = ( - self._get_migration_data(share)) - - share = self.migrate_share( - share['id'], dest_pool, - wait_for_status=task_state, new_share_type_id=new_share_type_id, - new_share_network_id=new_share_network_id, preserve_snapshots=True) - - share = self.migration_cancel(share['id'], dest_pool) - self._validate_snapshot(share, snapshot1, snapshot2) - - -@ddt.ddt -class MigrationOppositeDriverModesNFSTest(MigrationBase): - protocol = "nfs" - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @base.skip_if_microversion_lt("2.29") - @ddt.data(True, False) - def test_migration_opposite_driver_modes(self, force_host_assisted): - self._check_migration_enabled(force_host_assisted) - - share = self.create_share(self.protocol) - share = self.shares_v2_client.get_share(share['id']) - share, dest_pool = self._setup_migration(share, opposite=True) - - if not CONF.share.multitenancy_enabled: - # If currently configured is DHSS=False, - # then we need it for DHSS=True - new_share_network_id = self.provide_share_network( - self.shares_v2_client, - self.networks_client, - isolated_creds_client=None, - ignore_multitenancy_config=True, - ) - else: - # If currently configured is DHSS=True, - # then we must pass None for DHSS=False - new_share_network_id = None - - old_share_network_id = share['share_network_id'] - old_share_type_id = share['share_type'] - new_share_type_id = self.new_type_opposite['share_type']['id'] - - task_state = (constants.TASK_STATE_DATA_COPYING_COMPLETED - if force_host_assisted - else constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE) - - share = self.migrate_share( - share['id'], dest_pool, - force_host_assisted_migration=force_host_assisted, - wait_for_status=task_state, new_share_type_id=new_share_type_id, - new_share_network_id=new_share_network_id) - - self._validate_migration_successful( - dest_pool, share, task_state, complete=False, - share_network_id=old_share_network_id, - share_type_id=old_share_type_id) - - progress = self.shares_v2_client.migration_get_progress(share['id']) - - self.assertEqual(task_state, progress['task_state']) - self.assertEqual(100, progress['total_progress']) - - share = self.migration_complete(share['id'], dest_pool) - - progress = self.shares_v2_client.migration_get_progress(share['id']) - - self.assertEqual( - constants.TASK_STATE_MIGRATION_SUCCESS, progress['task_state']) - self.assertEqual(100, progress['total_progress']) - - self._validate_migration_successful( - dest_pool, share, constants.TASK_STATE_MIGRATION_SUCCESS, - complete=True, share_network_id=new_share_network_id, - share_type_id=new_share_type_id) - - -@ddt.ddt -class MigrationTwoPhaseNFSTest(MigrationBase): - protocol = "nfs" - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @base.skip_if_microversion_lt("2.29") - @ddt.data(True, False) - def test_migration_2phase(self, force_host_assisted): - self._check_migration_enabled(force_host_assisted) - - share = self.create_share(self.protocol) - share = self.shares_v2_client.get_share(share['id']) - share, dest_pool = self._setup_migration(share) - - old_share_network_id = share['share_network_id'] - old_share_type_id = share['share_type'] - task_state, new_share_network_id, new_share_type_id = ( - self._get_migration_data(share, force_host_assisted)) - - share = self.migrate_share( - share['id'], dest_pool, - force_host_assisted_migration=force_host_assisted, - wait_for_status=task_state, new_share_type_id=new_share_type_id, - new_share_network_id=new_share_network_id) - - self._validate_migration_successful( - dest_pool, share, task_state, complete=False, - share_network_id=old_share_network_id, - share_type_id=old_share_type_id) - - progress = self.shares_v2_client.migration_get_progress(share['id']) - - self.assertEqual(task_state, progress['task_state']) - self.assertEqual(100, progress['total_progress']) - - share = self.migration_complete(share['id'], dest_pool) - - progress = self.shares_v2_client.migration_get_progress(share['id']) - - self.assertEqual( - constants.TASK_STATE_MIGRATION_SUCCESS, progress['task_state']) - self.assertEqual(100, progress['total_progress']) - - self._validate_migration_successful( - dest_pool, share, constants.TASK_STATE_MIGRATION_SUCCESS, - complete=True, share_network_id=new_share_network_id, - share_type_id=new_share_type_id) - self._cleanup_share(share) - - -@ddt.ddt -class MigrationWithShareExtendingNFSTest(MigrationBase): - protocol = "nfs" - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @base.skip_if_microversion_lt("2.29") - @testtools.skipUnless( - CONF.share.run_extend_tests, 'Extend share tests are disabled.') - @ddt.data(True, False) - def test_extend_on_migrated_share(self, force_host_assisted): - self._test_resize_post_migration(force_host_assisted, resize='extend') - - -@ddt.ddt -class MigrationWithShareShrinkingNFSTest(MigrationBase): - protocol = "nfs" - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @base.skip_if_microversion_lt("2.29") - @testtools.skipUnless( - CONF.share.run_shrink_tests, 'Shrink share tests are disabled.') - @ddt.data(True, False) - def test_shrink_on_migrated_share(self, force_host_assisted): - self._test_resize_post_migration(force_host_assisted, resize='shrink') - - -@ddt.ddt -class MigrationOfShareWithSnapshotNFSTest(MigrationBase): - protocol = "nfs" - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @base.skip_if_microversion_lt("2.29") - @testtools.skipUnless( - CONF.share.run_snapshot_tests, 'Snapshot tests are disabled.') - @testtools.skipUnless( - CONF.share.run_driver_assisted_migration_tests, - 'Driver-assisted migration tests are disabled.') - @testtools.skipUnless( - CONF.share.run_migration_with_preserve_snapshots_tests, - 'Migration with preserve snapshots tests are disabled.') - def test_migrating_share_with_snapshot(self): - ss_type, __ = self._create_share_type_for_snapshot_capability() - - share = self.create_share(self.protocol, cleanup_in_class=False) - share = self.shares_v2_client.get_share(share['id']) - - share, dest_pool = self._setup_migration(share) - snapshot1 = self.create_snapshot_wait_for_active( - share['id'], cleanup_in_class=False) - snapshot2 = self.create_snapshot_wait_for_active( - share['id'], cleanup_in_class=False) - - task_state, new_share_network_id, __ = self._get_migration_data(share) - - share = self.migrate_share( - share['id'], dest_pool, - wait_for_status=task_state, - new_share_type_id=ss_type['share_type']['id'], - new_share_network_id=new_share_network_id, preserve_snapshots=True) - - share = self.migration_complete(share['id'], dest_pool) - - self._validate_snapshot(share, snapshot1, snapshot2) - - -@ddt.ddt -class MigrationWithDifferentSnapshotSupportNFSTest(MigrationBase): - protocol = "nfs" - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @base.skip_if_microversion_lt("2.29") - @testtools.skipUnless(CONF.share.run_snapshot_tests, - 'Snapshot tests are disabled.') - @ddt.data(True, False) - def test_migrate_share_to_snapshot_capability_share_type( - self, force_host_assisted): - # Verify that share with no snapshot support type can be migrated - # to new share type which supports the snapshot - self._validate_share_migration_with_different_snapshot_capability_type( - force_host_assisted, True) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @base.skip_if_microversion_lt("2.29") - @testtools.skipUnless(CONF.share.run_snapshot_tests, - 'Snapshot tests are disabled.') - @ddt.data(True, False) - def test_migrate_share_to_no_snapshot_capability_share_type( - self, force_host_assisted): - # Verify that share with snapshot support type can be migrated - # to new share type which doesn't support the snapshot - self._validate_share_migration_with_different_snapshot_capability_type( - force_host_assisted, False) - - -# NOTE(u_glide): this function is required to exclude MigrationBase from -# executed test cases. -# See: https://docs.python.org/2/library/unittest.html#load-tests-protocol -# for details. -def load_tests(loader, tests, _): - result = [] - for test_case in tests: - if not test_case._tests or type(test_case._tests[0]) is MigrationBase: - continue - result.append(test_case) - return loader.suiteClass(result) diff --git a/manila_tempest_tests/tests/api/admin/test_migration_negative.py b/manila_tempest_tests/tests/api/admin/test_migration_negative.py deleted file mode 100644 index 87ded6380a..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_migration_negative.py +++ /dev/null @@ -1,326 +0,0 @@ -# Copyright 2015 Hitachi Data Systems. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import ddt -from tempest import config -from tempest.lib.common.utils import data_utils -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests import share_exceptions -from manila_tempest_tests.tests.api import base -from manila_tempest_tests import utils - -CONF = config.CONF - - -@ddt.ddt -class MigrationNegativeTest(base.BaseSharesAdminTest): - """Tests Share Migration. - - Tests share migration in multi-backend environment. - """ - - protocol = "nfs" - - @classmethod - def resource_setup(cls): - super(MigrationNegativeTest, cls).resource_setup() - if cls.protocol not in CONF.share.enable_protocols: - message = "%s tests are disabled." % cls.protocol - raise cls.skipException(message) - if not (CONF.share.run_host_assisted_migration_tests or - CONF.share.run_driver_assisted_migration_tests): - raise cls.skipException("Share migration tests are disabled.") - - pools = cls.shares_client.list_pools(detail=True)['pools'] - - if len(pools) < 2: - raise cls.skipException("At least two different pool entries " - "are needed to run share migration tests.") - - cls.share = cls.create_share(cls.protocol, - size=CONF.share.share_size+1) - cls.share = cls.shares_client.get_share(cls.share['id']) - - cls.default_type = cls.shares_v2_client.list_share_types( - default=True)['share_type'] - - dest_pool = utils.choose_matching_backend( - cls.share, pools, cls.default_type) - - if not dest_pool or dest_pool.get('name') is None: - raise share_exceptions.ShareMigrationException( - "No valid pool entries to run share migration tests.") - - cls.dest_pool = dest_pool['name'] - - cls.new_type_invalid = cls.create_share_type( - name=data_utils.rand_name( - 'new_invalid_share_type_for_migration'), - cleanup_in_class=True, - extra_specs=utils.get_configured_extra_specs(variation='invalid')) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.22") - def test_migration_cancel_invalid(self): - self.assertRaises( - lib_exc.BadRequest, self.shares_v2_client.migration_cancel, - self.share['id']) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.22") - def test_migration_get_progress_None(self): - self.shares_v2_client.reset_task_state(self.share["id"], None) - self.shares_v2_client.wait_for_share_status( - self.share["id"], None, 'task_state') - self.assertRaises( - lib_exc.BadRequest, self.shares_v2_client.migration_get_progress, - self.share['id']) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.22") - def test_migration_complete_invalid(self): - self.assertRaises( - lib_exc.BadRequest, self.shares_v2_client.migration_complete, - self.share['id']) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.22") - def test_migration_cancel_not_found(self): - self.assertRaises( - lib_exc.NotFound, self.shares_v2_client.migration_cancel, - 'invalid_share_id') - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.22") - def test_migration_get_progress_not_found(self): - self.assertRaises( - lib_exc.NotFound, self.shares_v2_client.migration_get_progress, - 'invalid_share_id') - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.22") - def test_migration_complete_not_found(self): - self.assertRaises( - lib_exc.NotFound, self.shares_v2_client.migration_complete, - 'invalid_share_id') - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_migrate_share_with_snapshot(self): - snap = self.create_snapshot_wait_for_active(self.share['id']) - self.assertRaises( - lib_exc.Conflict, self.shares_v2_client.migrate_share, - self.share['id'], self.dest_pool, - force_host_assisted_migration=True) - self.shares_v2_client.delete_snapshot(snap['id']) - self.shares_v2_client.wait_for_resource_deletion(snapshot_id=snap[ - "id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - @ddt.data(True, False) - def test_migrate_share_same_host(self, specified): - new_share_type_id = None - new_share_network_id = None - if specified: - new_share_type_id = self.default_type['id'] - new_share_network_id = self.share['share_network_id'] - self.migrate_share( - self.share['id'], self.share['host'], - wait_for_status=constants.TASK_STATE_MIGRATION_SUCCESS, - new_share_type_id=new_share_type_id, - new_share_network_id=new_share_network_id) - # NOTE(ganso): No need to assert, it is already waiting for correct - # status (migration_success). - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - def test_migrate_share_host_invalid(self): - self.assertRaises( - lib_exc.NotFound, self.shares_v2_client.migrate_share, - self.share['id'], 'invalid_host') - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - @ddt.data({'writable': False, 'preserve_metadata': False, - 'preserve_snapshots': False, 'nondisruptive': True}, - {'writable': False, 'preserve_metadata': False, - 'preserve_snapshots': True, 'nondisruptive': False}, - {'writable': False, 'preserve_metadata': True, - 'preserve_snapshots': False, 'nondisruptive': False}, - {'writable': True, 'preserve_metadata': False, - 'preserve_snapshots': False, 'nondisruptive': False}) - @ddt.unpack - def test_migrate_share_host_assisted_not_allowed_API( - self, writable, preserve_metadata, preserve_snapshots, - nondisruptive): - self.assertRaises( - lib_exc.BadRequest, self.shares_v2_client.migrate_share, - self.share['id'], self.dest_pool, - force_host_assisted_migration=True, writable=writable, - preserve_metadata=preserve_metadata, nondisruptive=nondisruptive, - preserve_snapshots=preserve_snapshots) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - def test_migrate_share_change_type_no_valid_host(self): - if not CONF.share.multitenancy_enabled: - new_share_network_id = self.create_share_network( - neutron_net_id='fake_net_id', - neutron_subnet_id='fake_subnet_id')['id'] - else: - new_share_network_id = None - - self.shares_v2_client.migrate_share( - self.share['id'], self.dest_pool, - new_share_type_id=self.new_type_invalid['share_type']['id'], - new_share_network_id=new_share_network_id) - self.shares_v2_client.wait_for_migration_status( - self.share['id'], self.dest_pool, - constants.TASK_STATE_MIGRATION_ERROR) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - def test_migrate_share_not_found(self): - self.assertRaises( - lib_exc.NotFound, self.shares_v2_client.migrate_share, - 'invalid_share_id', self.dest_pool) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - def test_migrate_share_not_available(self): - self.shares_client.reset_state(self.share['id'], - constants.STATUS_ERROR) - self.shares_client.wait_for_share_status(self.share['id'], - constants.STATUS_ERROR) - self.assertRaises( - lib_exc.BadRequest, self.shares_v2_client.migrate_share, - self.share['id'], self.dest_pool) - self.shares_client.reset_state(self.share['id'], - constants.STATUS_AVAILABLE) - self.shares_client.wait_for_share_status(self.share['id'], - constants.STATUS_AVAILABLE) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - def test_migrate_share_invalid_share_network(self): - self.assertRaises( - lib_exc.BadRequest, self.shares_v2_client.migrate_share, - self.share['id'], self.dest_pool, - new_share_network_id='invalid_net_id') - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - def test_migrate_share_invalid_share_type(self): - self.assertRaises( - lib_exc.BadRequest, self.shares_v2_client.migrate_share, - self.share['id'], self.dest_pool, - new_share_type_id='invalid_type_id') - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - def test_migrate_share_opposite_type_share_network_invalid(self): - - extra_specs = utils.get_configured_extra_specs( - variation='opposite_driver_modes') - - new_type_opposite = self.create_share_type( - name=data_utils.rand_name('share_type_migration_negative'), - extra_specs=extra_specs) - - new_share_network_id = None - - if CONF.share.multitenancy_enabled: - - new_share_network_id = self.create_share_network( - neutron_net_id='fake_net_id', - neutron_subnet_id='fake_subnet_id')['id'] - - self.assertRaises( - lib_exc.BadRequest, self.shares_v2_client.migrate_share, - self.share['id'], self.dest_pool, - new_share_type_id=new_type_opposite['share_type']['id'], - new_share_network_id=new_share_network_id) - - @testtools.skipUnless(CONF.share.run_driver_assisted_migration_tests, - "Driver-assisted migration tests are disabled.") - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - def test_create_snapshot_during_share_migration(self): - self._test_share_actions_during_share_migration('create_snapshot', []) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - @ddt.data(('extend_share', [CONF.share.share_size + 2]), - ('shrink_share', [CONF.share.share_size])) - @ddt.unpack - def test_share_resize_during_share_migration(self, method_name, *args): - self._test_share_actions_during_share_migration(method_name, *args) - - def skip_if_tests_are_disabled(self, method_name): - property_to_evaluate = { - 'extend_share': CONF.share.run_extend_tests, - 'shrink_share': CONF.share.run_shrink_tests, - 'create_snapshot': CONF.share.run_snapshot_tests, - } - if not property_to_evaluate[method_name]: - raise self.skipException(method_name + 'tests are disabled.') - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - def test_add_access_rule_during_migration(self): - access_type = "ip" - access_to = "50.50.50.50" - self.shares_v2_client.reset_state(self.share['id'], - constants.STATUS_MIGRATING) - self.shares_v2_client.reset_task_state( - self.share['id'], - constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE) - self.assertRaises( - lib_exc.BadRequest, - self.shares_v2_client.create_access_rule, - self.share['id'], access_type, access_to) - # Revert the migration state by cancelling the migration - self.shares_v2_client.reset_state(self.share['id'], - constants.STATUS_AVAILABLE) - self.shares_v2_client.reset_task_state( - self.share['id'], - constants.TASK_STATE_MIGRATION_CANCELLED) - - def _test_share_actions_during_share_migration(self, method_name, *args): - self.skip_if_tests_are_disabled(method_name) - # Verify various share operations during share migration - self.shares_v2_client.reset_state(self.share['id'], - constants.STATUS_MIGRATING) - self.shares_v2_client.reset_task_state( - self.share['id'], - constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE) - - self.assertRaises( - lib_exc.BadRequest, getattr(self.shares_v2_client, method_name), - self.share['id'], *args) - # Revert the migration state by cancelling the migration - self.shares_v2_client.reset_state(self.share['id'], - constants.STATUS_AVAILABLE) - self.shares_v2_client.reset_task_state( - self.share['id'], - constants.TASK_STATE_MIGRATION_CANCELLED) diff --git a/manila_tempest_tests/tests/api/admin/test_multi_backend.py b/manila_tempest_tests/tests/api/admin/test_multi_backend.py deleted file mode 100644 index 79c9c19bcb..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_multi_backend.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib.common.utils import data_utils -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -CONF = config.CONF - - -class ShareMultiBackendTest(base.BaseSharesAdminTest): - - @classmethod - def resource_setup(cls): - super(ShareMultiBackendTest, cls).resource_setup() - if not CONF.share.multi_backend: - raise cls.skipException("Manila multi-backend tests are disabled.") - elif len(CONF.share.backend_names) < 2: - raise cls.skipException("For running multi-backend tests required" - " two names in config. Skipping.") - elif any(not name for name in CONF.share.backend_names): - raise cls.skipException("Share backend names can not be empty. " - "Skipping.") - cls.sts = [] - cls.shares = [] - share_data_list = [] - - # Create share types - for i in [0, 1]: - st_name = data_utils.rand_name("share-type-%s" % str(i)) - extra_specs = { - "share_backend_name": CONF.share.backend_names[i], - } - st = cls.create_share_type( - name=st_name, - extra_specs=cls.add_extra_specs_to_dict(extra_specs)) - cls.sts.append(st["share_type"]) - st_id = st["share_type"]["id"] - share_data_list.append({"kwargs": {"share_type_id": st_id}}) - - # Create shares using precreated share types - cls.shares = cls.create_shares(share_data_list) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_share_backend_name_reporting(self): - # Share's 'host' should be like "hostname@backend_name" - for share in self.shares: - get = self.shares_client.get_share(share['id']) - self.assertEqual(2, len(get["host"].split("@"))) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_share_share_type(self): - # Share type should be the same as provided with share creation - for i in [0, 1]: - get = self.shares_v2_client.get_share(self.shares[i]['id'], - version="2.5") - self.assertEqual(self.sts[i]["name"], get["share_type"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_share_share_type_v_2_6(self): - # Share type should be the same as provided with share creation - for i in [0, 1]: - get = self.shares_v2_client.get_share(self.shares[i]['id'], - version="2.6") - self.assertEqual(self.sts[i]["id"], get["share_type"]) - self.assertEqual(self.sts[i]["name"], get["share_type_name"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_share_backend_name_distinction(self): - # Different share backends should have different host records - if CONF.share.backend_names[0] == CONF.share.backend_names[1]: - raise self.skipException("Share backends " - "configured with same name. Skipping.") - get1 = self.shares_client.get_share(self.shares[0]['id']) - get2 = self.shares_client.get_share(self.shares[1]['id']) - self.assertNotEqual(get1["host"], get2["host"]) diff --git a/manila_tempest_tests/tests/api/admin/test_quotas.py b/manila_tempest_tests/tests/api/admin/test_quotas.py deleted file mode 100644 index d94582860f..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_quotas.py +++ /dev/null @@ -1,759 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from tempest import config -from tempest.lib.common.utils import data_utils -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base -from manila_tempest_tests import utils - -CONF = config.CONF -PRE_SHARE_GROUPS_MICROVERSION = "2.39" -SHARE_GROUPS_MICROVERSION = "2.40" - - -@ddt.ddt -class SharesAdminQuotasTest(base.BaseSharesAdminTest): - - @classmethod - def resource_setup(cls): - if not CONF.share.run_quota_tests: - msg = "Quota tests are disabled." - raise cls.skipException(msg) - super(SharesAdminQuotasTest, cls).resource_setup() - cls.user_id = cls.shares_v2_client.user_id - cls.tenant_id = cls.shares_v2_client.tenant_id - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_default_quotas(self): - quotas = self.shares_v2_client.default_quotas(self.tenant_id) - self.assertGreater(int(quotas["gigabytes"]), -2) - self.assertGreater(int(quotas["snapshot_gigabytes"]), -2) - self.assertGreater(int(quotas["shares"]), -2) - self.assertGreater(int(quotas["snapshots"]), -2) - self.assertGreater(int(quotas["share_networks"]), -2) - if utils.is_microversion_supported(SHARE_GROUPS_MICROVERSION): - self.assertGreater(int(quotas["share_groups"]), -2) - self.assertGreater(int(quotas["share_group_snapshots"]), -2) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_show_quotas(self): - quotas = self.shares_v2_client.show_quotas(self.tenant_id) - self.assertGreater(int(quotas["gigabytes"]), -2) - self.assertGreater(int(quotas["snapshot_gigabytes"]), -2) - self.assertGreater(int(quotas["shares"]), -2) - self.assertGreater(int(quotas["snapshots"]), -2) - self.assertGreater(int(quotas["share_networks"]), -2) - if utils.is_microversion_supported(SHARE_GROUPS_MICROVERSION): - self.assertGreater(int(quotas["share_groups"]), -2) - self.assertGreater(int(quotas["share_group_snapshots"]), -2) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_show_quotas_for_user(self): - quotas = self.shares_v2_client.show_quotas( - self.tenant_id, self.user_id) - self.assertGreater(int(quotas["gigabytes"]), -2) - self.assertGreater(int(quotas["snapshot_gigabytes"]), -2) - self.assertGreater(int(quotas["shares"]), -2) - self.assertGreater(int(quotas["snapshots"]), -2) - self.assertGreater(int(quotas["share_networks"]), -2) - if utils.is_microversion_supported(SHARE_GROUPS_MICROVERSION): - self.assertGreater(int(quotas["share_groups"]), -2) - self.assertGreater(int(quotas["share_group_snapshots"]), -2) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @base.skip_if_microversion_not_supported(PRE_SHARE_GROUPS_MICROVERSION) - def test_show_sg_quotas_using_too_old_microversion(self): - quotas = self.shares_v2_client.show_quotas( - self.tenant_id, version=PRE_SHARE_GROUPS_MICROVERSION) - - for key in ('share_groups', 'share_group_snapshots'): - self.assertNotIn(key, quotas) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @base.skip_if_microversion_not_supported(PRE_SHARE_GROUPS_MICROVERSION) - def test_show_sg_quotas_for_user_using_too_old_microversion(self): - quotas = self.shares_v2_client.show_quotas( - self.tenant_id, self.user_id, - version=PRE_SHARE_GROUPS_MICROVERSION) - - for key in ('share_groups', 'share_group_snapshots'): - self.assertNotIn(key, quotas) - - @ddt.data( - ('id', True), - ('name', False), - ) - @ddt.unpack - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.39") - def test_show_share_type_quotas(self, share_type_key, is_st_public): - # Create share type - share_type = self.create_share_type( - data_utils.rand_name("tempest-manila"), - is_public=is_st_public, - cleanup_in_class=False, - extra_specs=self.add_extra_specs_to_dict(), - ) - if 'share_type' in share_type: - share_type = share_type['share_type'] - - # Get current project quotas - p_quotas = self.shares_v2_client.show_quotas(self.tenant_id) - - # Get current quotas - st_quotas = self.shares_v2_client.show_quotas( - self.tenant_id, share_type=share_type[share_type_key]) - - # Share type quotas have values equal to project's - for key in ('shares', 'gigabytes', 'snapshots', 'snapshot_gigabytes'): - self.assertEqual(st_quotas[key], p_quotas[key]) - - # Verify that we do not have share groups related quotas - # for share types. - for key in ('share_groups', 'share_group_snapshots'): - self.assertNotIn(key, st_quotas) - - -@ddt.ddt -class SharesAdminQuotasUpdateTest(base.BaseSharesAdminTest): - - force_tenant_isolation = True - - @classmethod - def resource_setup(cls): - if not CONF.share.run_quota_tests: - msg = "Quota tests are disabled." - raise cls.skipException(msg) - super(SharesAdminQuotasUpdateTest, cls).resource_setup() - - def setUp(self): - super(self.__class__, self).setUp() - self.client = self.get_client_with_isolated_creds(client_version='2') - self.tenant_id = self.client.tenant_id - self.user_id = self.client.user_id - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_tenant_quota_shares(self): - # get current quotas - quotas = self.client.show_quotas(self.tenant_id) - new_quota = int(quotas["shares"]) + 2 - - # set new quota for shares - updated = self.client.update_quotas(self.tenant_id, shares=new_quota) - self.assertEqual(new_quota, int(updated["shares"])) - - @ddt.data( - "share_groups", - "share_group_snapshots", - ) - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') - @utils.skip_if_microversion_not_supported(SHARE_GROUPS_MICROVERSION) - def test_update_tenant_quota_share_groups(self, quota_key): - # Get current quotas - quotas = self.client.show_quotas(self.tenant_id) - new_quota = int(quotas[quota_key]) + 2 - - # Set new quota - updated = self.client.update_quotas( - self.tenant_id, **{quota_key: new_quota}) - self.assertEqual(new_quota, int(updated[quota_key])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_user_quota_shares(self): - # get current quotas - quotas = self.client.show_quotas(self.tenant_id, self.user_id) - new_quota = int(quotas["shares"]) - 1 - - # set new quota for shares - updated = self.client.update_quotas( - self.tenant_id, self.user_id, shares=new_quota) - self.assertEqual(new_quota, int(updated["shares"])) - - @ddt.data( - "share_groups", - "share_group_snapshots", - ) - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') - @utils.skip_if_microversion_not_supported(SHARE_GROUPS_MICROVERSION) - def test_update_user_quota_share_groups(self, quota_key): - # Get current quotas - quotas = self.client.show_quotas(self.tenant_id, self.user_id) - new_quota = int(quotas[quota_key]) - 1 - - # Set new quota - updated = self.client.update_quotas( - self.tenant_id, self.user_id, **{quota_key: new_quota}) - self.assertEqual(new_quota, int(updated[quota_key])) - - def _create_share_type(self): - share_type = self.create_share_type( - data_utils.rand_name("tempest-manila"), - cleanup_in_class=False, - client=self.shares_v2_client, - extra_specs=self.add_extra_specs_to_dict(), - ) - if 'share_type' in share_type: - share_type = share_type['share_type'] - return share_type - - @ddt.data( - ('id', True), - ('name', False), - ) - @ddt.unpack - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.39") - def test_update_share_type_quota(self, share_type_key, is_st_public): - share_type = self._create_share_type() - - # Get current quotas - quotas = self.client.show_quotas( - self.tenant_id, share_type=share_type[share_type_key]) - - # Update quotas - for q in ('shares', 'gigabytes', 'snapshots', 'snapshot_gigabytes'): - new_quota = int(quotas[q]) - 1 - - # Set new quota - updated = self.client.update_quotas( - self.tenant_id, share_type=share_type[share_type_key], - **{q: new_quota}) - self.assertEqual(new_quota, int(updated[q])) - - current_quotas = self.client.show_quotas( - self.tenant_id, share_type=share_type[share_type_key]) - - for q in ('shares', 'gigabytes', 'snapshots', 'snapshot_gigabytes'): - self.assertEqual(int(quotas[q]) - 1, current_quotas[q]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.39") - def test_update_share_type_quota_in_two_projects(self): - """Regression test for bug/1722707""" - share_type = self._create_share_type() - client1 = self.get_client_with_isolated_creds(client_version='2') - client2 = self.get_client_with_isolated_creds(client_version='2') - - for client in (client1, client2): - # Update quotas - for q in ('shares', 'gigabytes', 'snapshots', - 'snapshot_gigabytes'): - # Set new quota - updated = client.update_quotas( - client.tenant_id, share_type=share_type['id'], **{q: 0}) - self.assertEqual(0, int(updated[q])) - - current_quotas = client.show_quotas( - client.tenant_id, share_type=share_type['id']) - - for q in ('shares', 'gigabytes', 'snapshots', - 'snapshot_gigabytes'): - self.assertEqual(0, int(current_quotas[q])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_tenant_quota_snapshots(self): - # get current quotas - quotas = self.client.show_quotas(self.tenant_id) - new_quota = int(quotas["snapshots"]) + 2 - - # set new quota for snapshots - updated = self.client.update_quotas( - self.tenant_id, snapshots=new_quota) - self.assertEqual(new_quota, int(updated["snapshots"])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_user_quota_snapshots(self): - # get current quotas - quotas = self.client.show_quotas(self.tenant_id, self.user_id) - new_quota = int(quotas["snapshots"]) - 1 - - # set new quota for snapshots - updated = self.client.update_quotas( - self.tenant_id, self.user_id, snapshots=new_quota) - self.assertEqual(new_quota, int(updated["snapshots"])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_tenant_quota_gigabytes(self): - # get current quotas - custom = self.client.show_quotas(self.tenant_id) - - # make quotas for update - gigabytes = int(custom["gigabytes"]) + 2 - - # set new quota for shares - updated = self.client.update_quotas( - self.tenant_id, gigabytes=gigabytes) - self.assertEqual(gigabytes, int(updated["gigabytes"])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_tenant_quota_snapshot_gigabytes(self): - # get current quotas - custom = self.client.show_quotas(self.tenant_id) - - # make quotas for update - snapshot_gigabytes = int(custom["snapshot_gigabytes"]) + 2 - - # set new quota for shares - updated = self.client.update_quotas( - self.tenant_id, - snapshot_gigabytes=snapshot_gigabytes) - self.assertEqual(snapshot_gigabytes, - int(updated["snapshot_gigabytes"])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_user_quota_gigabytes(self): - # get current quotas - custom = self.client.show_quotas(self.tenant_id, self.user_id) - - # make quotas for update - gigabytes = int(custom["gigabytes"]) - 1 - - # set new quota for shares - updated = self.client.update_quotas( - self.tenant_id, self.user_id, gigabytes=gigabytes) - self.assertEqual(gigabytes, int(updated["gigabytes"])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_user_quota_snapshot_gigabytes(self): - # get current quotas - custom = self.client.show_quotas(self.tenant_id, self.user_id) - - # make quotas for update - snapshot_gigabytes = int(custom["snapshot_gigabytes"]) - 1 - - # set new quota for shares - updated = self.client.update_quotas( - self.tenant_id, self.user_id, - snapshot_gigabytes=snapshot_gigabytes) - self.assertEqual(snapshot_gigabytes, - int(updated["snapshot_gigabytes"])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_tenant_quota_share_networks(self): - # get current quotas - quotas = self.client.show_quotas(self.tenant_id) - new_quota = int(quotas["share_networks"]) + 2 - - # set new quota for share-networks - updated = self.client.update_quotas( - self.tenant_id, share_networks=new_quota) - self.assertEqual(new_quota, int(updated["share_networks"])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_user_quota_share_networks(self): - # get current quotas - quotas = self.client.show_quotas( - self.tenant_id, self.user_id) - new_quota = int(quotas["share_networks"]) - 1 - - # set new quota for share-networks - updated = self.client.update_quotas( - self.tenant_id, self.user_id, - share_networks=new_quota) - self.assertEqual(new_quota, int(updated["share_networks"])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_reset_tenant_quotas(self): - # Get default_quotas - default = self.client.default_quotas(self.tenant_id) - - # Get current quotas - custom = self.client.show_quotas(self.tenant_id) - - # Make quotas for update - data = { - "shares": int(custom["shares"]) + 2, - "snapshots": int(custom["snapshots"]) + 2, - "gigabytes": int(custom["gigabytes"]) + 2, - "snapshot_gigabytes": int(custom["snapshot_gigabytes"]) + 2, - "share_networks": int(custom["share_networks"]) + 2, - } - if (utils.is_microversion_supported(SHARE_GROUPS_MICROVERSION) and - CONF.share.run_share_group_tests): - data["share_groups"] = int(custom["share_groups"]) + 2 - data["share_group_snapshots"] = ( - int(custom["share_group_snapshots"]) + 2) - - # set new quota - updated = self.client.update_quotas(self.tenant_id, **data) - self.assertEqual(data["shares"], int(updated["shares"])) - self.assertEqual(data["snapshots"], int(updated["snapshots"])) - self.assertEqual(data["gigabytes"], int(updated["gigabytes"])) - self.assertEqual( - data["snapshot_gigabytes"], int(updated["snapshot_gigabytes"])) - self.assertEqual( - data["share_networks"], int(updated["share_networks"])) - if (utils.is_microversion_supported(SHARE_GROUPS_MICROVERSION) and - CONF.share.run_share_group_tests): - self.assertEqual( - data["share_groups"], int(updated["share_groups"])) - self.assertEqual( - data["share_group_snapshots"], - int(updated["share_group_snapshots"])) - - # Reset customized quotas - self.client.reset_quotas(self.tenant_id) - - # Verify quotas - reseted = self.client.show_quotas(self.tenant_id) - self.assertEqual(int(default["shares"]), int(reseted["shares"])) - self.assertEqual(int(default["snapshots"]), int(reseted["snapshots"])) - self.assertEqual(int(default["gigabytes"]), int(reseted["gigabytes"])) - self.assertEqual( - int(default["snapshot_gigabytes"]), - int(reseted["snapshot_gigabytes"])) - self.assertEqual( - int(default["share_networks"]), int(reseted["share_networks"])) - if (utils.is_microversion_supported(SHARE_GROUPS_MICROVERSION) and - CONF.share.run_share_group_tests): - self.assertEqual( - int(default["share_groups"]), int(reseted["share_groups"])) - self.assertEqual( - int(default["share_group_snapshots"]), - int(reseted["share_group_snapshots"])) - - @ddt.data( - ('id', True), - ('name', False), - ) - @ddt.unpack - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.39") - def test_reset_share_type_quotas(self, share_type_key, is_st_public): - share_type = self._create_share_type() - - # get default_quotas - default_quotas = self.client.default_quotas(self.tenant_id) - - # set new quota for project - updated_p_quota = self.client.update_quotas( - self.tenant_id, - shares=int(default_quotas['shares']) + 5, - snapshots=int(default_quotas['snapshots']) + 5, - gigabytes=int(default_quotas['gigabytes']) + 5, - snapshot_gigabytes=int(default_quotas['snapshot_gigabytes']) + 5) - - # set new quota for project - self.client.update_quotas( - self.tenant_id, - share_type=share_type[share_type_key], - shares=int(default_quotas['shares']) + 3, - snapshots=int(default_quotas['snapshots']) + 3, - gigabytes=int(default_quotas['gigabytes']) + 3, - snapshot_gigabytes=int(default_quotas['snapshot_gigabytes']) + 3) - - # reset share type quotas - self.client.reset_quotas( - self.tenant_id, share_type=share_type[share_type_key]) - - # verify quotas - current_p_quota = self.client.show_quotas(self.tenant_id) - current_st_quota = self.client.show_quotas( - self.tenant_id, share_type=share_type[share_type_key]) - for key in ('shares', 'snapshots', 'gigabytes', 'snapshot_gigabytes'): - self.assertEqual(updated_p_quota[key], current_p_quota[key]) - - # Default share type quotas are current project quotas - self.assertNotEqual(default_quotas[key], current_st_quota[key]) - self.assertEqual(current_p_quota[key], current_st_quota[key]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_unlimited_quota_for_shares(self): - self.client.update_quotas(self.tenant_id, shares=-1) - - quotas = self.client.show_quotas(self.tenant_id) - - self.assertEqual(-1, quotas.get('shares')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_unlimited_user_quota_for_shares(self): - self.client.update_quotas( - self.tenant_id, self.user_id, shares=-1) - - quotas = self.client.show_quotas(self.tenant_id, self.user_id) - - self.assertEqual(-1, quotas.get('shares')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_unlimited_quota_for_snapshots(self): - self.client.update_quotas(self.tenant_id, snapshots=-1) - - quotas = self.client.show_quotas(self.tenant_id) - - self.assertEqual(-1, quotas.get('snapshots')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_unlimited_user_quota_for_snapshots(self): - self.client.update_quotas( - self.tenant_id, self.user_id, snapshots=-1) - - quotas = self.client.show_quotas(self.tenant_id, self.user_id) - - self.assertEqual(-1, quotas.get('snapshots')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_unlimited_quota_for_gigabytes(self): - self.client.update_quotas(self.tenant_id, gigabytes=-1) - - quotas = self.client.show_quotas(self.tenant_id) - - self.assertEqual(-1, quotas.get('gigabytes')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_unlimited_quota_for_snapshot_gigabytes(self): - self.client.update_quotas( - self.tenant_id, snapshot_gigabytes=-1) - - quotas = self.client.show_quotas(self.tenant_id) - - self.assertEqual(-1, quotas.get('snapshot_gigabytes')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_unlimited_user_quota_for_gigabytes(self): - self.client.update_quotas( - self.tenant_id, self.user_id, gigabytes=-1) - - quotas = self.client.show_quotas(self.tenant_id, self.user_id) - - self.assertEqual(-1, quotas.get('gigabytes')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_unlimited_user_quota_for_snapshot_gigabytes(self): - self.client.update_quotas( - self.tenant_id, self.user_id, snapshot_gigabytes=-1) - - quotas = self.client.show_quotas(self.tenant_id, self.user_id) - - self.assertEqual(-1, quotas.get('snapshot_gigabytes')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_unlimited_quota_for_share_networks(self): - self.client.update_quotas(self.tenant_id, share_networks=-1) - - quotas = self.client.show_quotas(self.tenant_id) - - self.assertEqual(-1, quotas.get('share_networks')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_unlimited_user_quota_for_share_networks(self): - self.client.update_quotas( - self.tenant_id, self.user_id, share_networks=-1) - - quotas = self.client.show_quotas(self.tenant_id, self.user_id) - - self.assertEqual(-1, quotas.get('share_networks')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') - @utils.skip_if_microversion_not_supported(SHARE_GROUPS_MICROVERSION) - def test_unlimited_quota_for_share_groups(self): - self.client.update_quotas(self.tenant_id, share_groups=-1) - - quotas = self.client.show_quotas(self.tenant_id) - - self.assertEqual(-1, quotas.get('share_groups')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') - @utils.skip_if_microversion_not_supported(SHARE_GROUPS_MICROVERSION) - def test_unlimited_user_quota_for_share_group_snapshots(self): - self.client.update_quotas( - self.tenant_id, self.user_id, share_group_snapshots=-1) - - quotas = self.client.show_quotas(self.tenant_id, self.user_id) - - self.assertEqual(-1, quotas.get('share_group_snapshots')) - - @ddt.data(11, -1) - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_update_user_quotas_bigger_than_project_quota(self, user_quota): - self.client.update_quotas(self.tenant_id, shares=10) - self.client.update_quotas( - self.tenant_id, user_id=self.user_id, force=True, - shares=user_quota) - - @ddt.data(11, -1) - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.39") - def test_update_share_type_quotas_bigger_than_project_quota(self, st_q): - share_type = self._create_share_type() - self.client.update_quotas(self.tenant_id, shares=10) - - self.client.update_quotas( - self.tenant_id, share_type=share_type['name'], force=True, - shares=st_q) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.39") - def test_set_share_type_quota_bigger_than_users_quota(self): - share_type = self._create_share_type() - self.client.update_quotas(self.tenant_id, force=False, shares=13) - self.client.update_quotas( - self.tenant_id, user_id=self.user_id, force=False, shares=11) - - # Share type quota does not depend on user's quota, so we should be - # able to update it. - self.client.update_quotas( - self.tenant_id, share_type=share_type['name'], force=False, - shares=12) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.39") - def test_quotas_usages(self): - # Create share types - st_1, st_2 = (self._create_share_type() for i in (1, 2)) - - # Set quotas for project, user and both share types - self.client.update_quotas(self.tenant_id, shares=3, gigabytes=10) - self.client.update_quotas( - self.tenant_id, user_id=self.user_id, shares=2, gigabytes=7) - for st in (st_1['id'], st_2['name']): - self.client.update_quotas( - self.tenant_id, share_type=st, shares=2, gigabytes=4) - - # Create share, 4Gb, st1 - ok - share_1 = self.create_share( - size=4, share_type_id=st_1['id'], client=self.client, - cleanup_in_class=False) - - # Try create shares twice, failing on user and share type quotas - for size, st_id in ((3, st_1['id']), (4, st_2['id'])): - self.assertRaises( - lib_exc.OverLimit, - self.create_share, - size=size, share_type_id=st_id, client=self.client, - cleanup_in_class=False) - - # Create share, 3Gb, st2 - ok - share_2 = self.create_share( - size=3, share_type_id=st_2['id'], client=self.client, - cleanup_in_class=False) - - # Check quota usages - for g_l, g_use, s_l, s_use, kwargs in ( - (10, 7, 3, 2, {}), - (7, 7, 2, 2, {'user_id': self.user_id}), - (4, 4, 2, 1, {'share_type': st_1['id']}), - (4, 3, 2, 1, {'share_type': st_2['name']})): - quotas = self.client.detail_quotas( - tenant_id=self.tenant_id, **kwargs) - self.assertEqual(0, quotas['gigabytes']['reserved']) - self.assertEqual(g_l, quotas['gigabytes']['limit']) - self.assertEqual(g_use, quotas['gigabytes']['in_use']) - self.assertEqual(0, quotas['shares']['reserved']) - self.assertEqual(s_l, quotas['shares']['limit']) - self.assertEqual(s_use, quotas['shares']['in_use']) - - # Delete shares and then check usages - for share_id in (share_1['id'], share_2['id']): - self.client.delete_share(share_id) - self.client.wait_for_resource_deletion(share_id=share_id) - for kwargs in ({}, {'share_type': st_1['name']}, - {'user_id': self.user_id}, {'share_type': st_2['id']}): - quotas = self.client.detail_quotas( - tenant_id=self.tenant_id, **kwargs) - for key in ('shares', 'gigabytes'): - self.assertEqual(0, quotas[key]['reserved']) - self.assertEqual(0, quotas[key]['in_use']) - - def _check_sg_usages(self, quotas, in_use, limit): - """Helper method for 'test_share_group_quotas_usages' test.""" - self.assertEqual(0, int(quotas['share_groups']['reserved'])) - self.assertEqual(in_use, int(quotas['share_groups']['in_use'])) - self.assertEqual(limit, int(quotas['share_groups']['limit'])) - - def _check_sgs_usages(self, quotas, in_use): - """Helper method for 'test_share_group_quotas_usages' test.""" - self.assertEqual(0, int(quotas['share_group_snapshots']['reserved'])) - self.assertEqual( - in_use, int(quotas['share_group_snapshots']['in_use'])) - self.assertEqual(1, int(quotas['share_group_snapshots']['limit'])) - - def _check_usages(self, sg_in_use, sgs_in_use): - """Helper method for 'test_share_group_quotas_usages' test.""" - p_quotas = self.client.detail_quotas(tenant_id=self.tenant_id) - u_quotas = self.client.detail_quotas( - tenant_id=self.tenant_id, user_id=self.user_id) - self._check_sg_usages(p_quotas, sg_in_use, 3) - self._check_sg_usages(u_quotas, sg_in_use, 2) - self._check_sgs_usages(p_quotas, sgs_in_use) - self._check_sgs_usages(u_quotas, sgs_in_use) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') - @base.skip_if_microversion_lt(SHARE_GROUPS_MICROVERSION) - def test_share_group_quotas_usages(self): - # Set quotas for project (3 SG, 1 SGS) and user (2 SG, 1 SGS) - self.client.update_quotas( - self.tenant_id, share_groups=3, share_group_snapshots=1) - self.client.update_quotas( - self.tenant_id, user_id=self.user_id, - share_groups=2, share_group_snapshots=1) - - # Check usages, they should be 0s - self._check_usages(0, 0) - - # Create SG1 and check usages - share_group1 = self.create_share_group( - cleanup_in_class=False, client=self.client) - self._check_usages(1, 0) - - # Create SGS1 and check usages - sg_snapshot = self.create_share_group_snapshot_wait_for_active( - share_group1['id'], cleanup_in_class=False, client=self.client) - self._check_usages(1, 1) - - # Create SG2 from SGS1 and check usages - share_group2 = self.create_share_group( - cleanup_in_class=False, client=self.client, - source_share_group_snapshot_id=sg_snapshot['id']) - self._check_usages(2, 1) - - # Try create SGS2, fail, then check usages - self.assertRaises( - lib_exc.OverLimit, - self.create_share_group, - client=self.client, cleanup_in_class=False) - self._check_usages(2, 1) - - # Delete SG2 and check usages - self.client.delete_share_group(share_group2['id']) - self.client.wait_for_resource_deletion( - share_group_id=share_group2['id']) - self._check_usages(1, 1) - - # Delete SGS1 and check usages - self.client.delete_share_group_snapshot(sg_snapshot['id']) - self.client.wait_for_resource_deletion( - share_group_snapshot_id=sg_snapshot['id']) - self._check_usages(1, 0) - - # Delete SG1 and check usages - self.client.delete_share_group(share_group1['id']) - self.client.wait_for_resource_deletion( - share_group_id=share_group1['id']) - self._check_usages(0, 0) diff --git a/manila_tempest_tests/tests/api/admin/test_quotas_negative.py b/manila_tempest_tests/tests/api/admin/test_quotas_negative.py deleted file mode 100644 index 91b2507eb6..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_quotas_negative.py +++ /dev/null @@ -1,357 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from tempest import config -from tempest.lib.common.utils import data_utils -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base -from manila_tempest_tests import utils - -CONF = config.CONF -PRE_SHARE_GROUPS_MICROVERSION = "2.39" -SHARE_GROUPS_MICROVERSION = "2.40" - - -@ddt.ddt -class SharesAdminQuotasNegativeTest(base.BaseSharesAdminTest): - - force_tenant_isolation = True - - @classmethod - def resource_setup(cls): - if not CONF.share.run_quota_tests: - msg = "Quota tests are disabled." - raise cls.skipException(msg) - super(SharesAdminQuotasNegativeTest, cls).resource_setup() - cls.user_id = cls.shares_client.user_id - cls.tenant_id = cls.shares_client.tenant_id - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_quotas_with_empty_tenant_id(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.show_quotas, "") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_reset_quotas_with_empty_tenant_id(self): - client = self.get_client_with_isolated_creds() - self.assertRaises(lib_exc.NotFound, - client.reset_quotas, "") - - @ddt.data( - {"shares": -2}, - {"snapshots": -2}, - {"gigabytes": -2}, - {"snapshot_gigabytes": -2}, - {"share_networks": -2}, - ) - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_update_quota_with_wrong_data(self, kwargs): - # -1 is acceptable value as unlimited - client = self.get_client_with_isolated_creds() - self.assertRaises( - lib_exc.BadRequest, - client.update_quotas, client.tenant_id, **kwargs) - - @ddt.data( - {"share_groups": -2}, - {"share_group_snapshots": -2}, - ) - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') - @utils.skip_if_microversion_not_supported(SHARE_GROUPS_MICROVERSION) - def test_update_sg_quota_with_wrong_data(self, kwargs): - # -1 is acceptable value as unlimited - client = self.get_client_with_isolated_creds(client_version='2') - self.assertRaises( - lib_exc.BadRequest, - client.update_quotas, client.tenant_id, **kwargs) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_create_share_with_size_bigger_than_quota(self): - quotas = self.shares_client.show_quotas( - self.shares_client.tenant_id) - overquota = int(quotas['gigabytes']) + 2 - - # try schedule share with size, bigger than gigabytes quota - self.assertRaises(lib_exc.OverLimit, - self.create_share, - size=overquota) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') - @utils.skip_if_microversion_not_supported(SHARE_GROUPS_MICROVERSION) - def test_create_share_group_with_exceeding_quota_limit(self): - client = self.get_client_with_isolated_creds(client_version='2') - client.update_quotas(client.tenant_id, share_groups=0) - - # Try schedule share group creation - self.assertRaises( - lib_exc.OverLimit, - self.create_share_group, - client=client, - cleanup_in_class=False) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_set_user_quota_shares_bigger_than_tenant_quota(self): - client = self.get_client_with_isolated_creds() - - # get current quotas for tenant - tenant_quotas = client.show_quotas(client.tenant_id) - - # try set user quota for shares bigger than tenant quota - bigger_value = int(tenant_quotas["shares"]) + 2 - self.assertRaises(lib_exc.BadRequest, - client.update_quotas, - client.tenant_id, - client.user_id, - force=False, - shares=bigger_value) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_set_user_quota_snaps_bigger_than_tenant_quota(self): - client = self.get_client_with_isolated_creds() - - # get current quotas for tenant - tenant_quotas = client.show_quotas(client.tenant_id) - - # try set user quota for snapshots bigger than tenant quota - bigger_value = int(tenant_quotas["snapshots"]) + 2 - self.assertRaises(lib_exc.BadRequest, - client.update_quotas, - client.tenant_id, - client.user_id, - force=False, - snapshots=bigger_value) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_set_user_quota_gigabytes_bigger_than_tenant_quota(self): - client = self.get_client_with_isolated_creds() - - # get current quotas for tenant - tenant_quotas = client.show_quotas(client.tenant_id) - - # try set user quota for gigabytes bigger than tenant quota - bigger_value = int(tenant_quotas["gigabytes"]) + 2 - self.assertRaises(lib_exc.BadRequest, - client.update_quotas, - client.tenant_id, - client.user_id, - force=False, - gigabytes=bigger_value) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_set_user_quota_snap_gigabytes_bigger_than_tenant_quota(self): - client = self.get_client_with_isolated_creds() - - # get current quotas for tenant - tenant_quotas = client.show_quotas(client.tenant_id) - - # try set user quota for snapshot gigabytes bigger than tenant quota - bigger_value = int(tenant_quotas["snapshot_gigabytes"]) + 2 - self.assertRaises(lib_exc.BadRequest, - client.update_quotas, - client.tenant_id, - client.user_id, - force=False, - snapshot_gigabytes=bigger_value) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_set_user_quota_share_networks_bigger_than_tenant_quota(self): - client = self.get_client_with_isolated_creds() - - # get current quotas for tenant - tenant_quotas = client.show_quotas(client.tenant_id) - - # try set user quota for share_networks bigger than tenant quota - bigger_value = int(tenant_quotas["share_networks"]) + 2 - self.assertRaises(lib_exc.BadRequest, - client.update_quotas, - client.tenant_id, - client.user_id, - force=False, - share_networks=bigger_value) - - @ddt.data( - ('quota-sets', '2.0', 'show_quotas'), - ('quota-sets', '2.0', 'default_quotas'), - ('quota-sets', '2.0', 'reset_quotas'), - ('quota-sets', '2.0', 'update_quotas'), - ('quota-sets', '2.6', 'show_quotas'), - ('quota-sets', '2.6', 'default_quotas'), - ('quota-sets', '2.6', 'reset_quotas'), - ('quota-sets', '2.6', 'update_quotas'), - ('os-quota-sets', '2.7', 'show_quotas'), - ('os-quota-sets', '2.7', 'default_quotas'), - ('os-quota-sets', '2.7', 'reset_quotas'), - ('os-quota-sets', '2.7', 'update_quotas'), - ) - @ddt.unpack - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @base.skip_if_microversion_not_supported("2.7") - def test_show_quotas_with_wrong_versions(self, url, version, method_name): - self.assertRaises( - lib_exc.NotFound, - getattr(self.shares_v2_client, method_name), - self.shares_v2_client.tenant_id, - version=version, url=url, - ) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_show_quota_detail_with_wrong_versions(self): - version = '2.24' - url = 'quota-sets' - - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.detail_quotas, - self.shares_v2_client.tenant_id, - version=version, url=url, - ) - - @ddt.data('show', 'reset', 'update') - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.39") - def test_share_type_quotas_using_nonexistent_share_type(self, op): - client = self.get_client_with_isolated_creds(client_version='2') - - kwargs = {"share_type": "fake_nonexistent_share_type"} - if op == 'update': - tenant_quotas = client.show_quotas(client.tenant_id) - kwargs['shares'] = tenant_quotas['shares'] - - self.assertRaises( - lib_exc.NotFound, - getattr(client, op + '_quotas'), - client.tenant_id, - **kwargs) - - def _create_share_type(self): - share_type = self.create_share_type( - data_utils.rand_name("tempest-manila"), - cleanup_in_class=False, - client=self.shares_v2_client, - extra_specs=self.add_extra_specs_to_dict(), - ) - if 'share_type' in share_type: - share_type = share_type['share_type'] - return share_type - - @ddt.data('id', 'name') - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.39") - def test_try_update_share_type_quota_for_share_networks(self, key): - client = self.get_client_with_isolated_creds(client_version='2') - share_type = self._create_share_type() - tenant_quotas = client.show_quotas(client.tenant_id) - - # Try to set 'share_networks' quota for share type - self.assertRaises( - lib_exc.BadRequest, - client.update_quotas, - client.tenant_id, - share_type=share_type[key], - share_networks=int(tenant_quotas["share_networks"]), - ) - - @ddt.data('share_groups', 'share_group_snapshots') - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @base.skip_if_microversion_lt(SHARE_GROUPS_MICROVERSION) - def test_try_update_share_type_quota_for_share_groups(self, quota_name): - client = self.get_client_with_isolated_creds(client_version='2') - share_type = self._create_share_type() - tenant_quotas = client.show_quotas(client.tenant_id) - - self.assertRaises( - lib_exc.BadRequest, - client.update_quotas, - client.tenant_id, - share_type=share_type["name"], - **{quota_name: int(tenant_quotas[quota_name])} - ) - - @ddt.data('share_groups', 'share_group_snapshots') - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @base.skip_if_microversion_not_supported(PRE_SHARE_GROUPS_MICROVERSION) - @base.skip_if_microversion_not_supported(SHARE_GROUPS_MICROVERSION) - def test_share_group_quotas_using_too_old_microversion(self, quota_key): - client = self.get_client_with_isolated_creds(client_version='2') - tenant_quotas = client.show_quotas( - client.tenant_id, version=SHARE_GROUPS_MICROVERSION) - kwargs = { - "version": PRE_SHARE_GROUPS_MICROVERSION, - quota_key: tenant_quotas[quota_key], - } - - self.assertRaises( - lib_exc.BadRequest, - client.update_quotas, - client.tenant_id, - **kwargs) - - @ddt.data('show', 'reset', 'update') - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.38") - def test_share_type_quotas_using_too_old_microversion(self, op): - client = self.get_client_with_isolated_creds(client_version='2') - share_type = self._create_share_type() - kwargs = {"version": "2.38", "share_type": share_type["name"]} - if op == 'update': - tenant_quotas = client.show_quotas(client.tenant_id) - kwargs['shares'] = tenant_quotas['shares'] - - self.assertRaises( - lib_exc.BadRequest, - getattr(client, op + '_quotas'), - client.tenant_id, - **kwargs) - - @ddt.data('show', 'reset', 'update') - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.39") - def test_quotas_providing_share_type_and_user_id(self, op): - client = self.get_client_with_isolated_creds(client_version='2') - share_type = self._create_share_type() - kwargs = {"share_type": share_type["name"], "user_id": client.user_id} - if op == 'update': - tenant_quotas = client.show_quotas(client.tenant_id) - kwargs['shares'] = tenant_quotas['shares'] - - self.assertRaises( - lib_exc.BadRequest, - getattr(client, op + '_quotas'), - client.tenant_id, - **kwargs) - - @ddt.data(11, -1) - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.39") - def test_update_share_type_quotas_bigger_than_project_quota(self, st_q): - client = self.get_client_with_isolated_creds(client_version='2') - share_type = self._create_share_type() - client.update_quotas(client.tenant_id, shares=10) - - self.assertRaises( - lib_exc.BadRequest, - client.update_quotas, - client.tenant_id, - share_type=share_type['name'], - force=False, - shares=st_q) diff --git a/manila_tempest_tests/tests/api/admin/test_replication.py b/manila_tempest_tests/tests/api/admin/test_replication.py deleted file mode 100644 index e41c92b340..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_replication.py +++ /dev/null @@ -1,183 +0,0 @@ -# Copyright 2015 Yogesh Kshirsagar -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib.common.utils import data_utils -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests import share_exceptions -from manila_tempest_tests.tests.api import base - -CONF = config.CONF -_MIN_SUPPORTED_MICROVERSION = '2.11' - - -@testtools.skipUnless(CONF.share.run_replication_tests, - 'Replication tests are disabled.') -@base.skip_if_microversion_lt(_MIN_SUPPORTED_MICROVERSION) -class ReplicationAdminTest(base.BaseSharesMixedTest): - - @classmethod - def resource_setup(cls): - super(ReplicationAdminTest, cls).resource_setup() - # Create share_type - name = data_utils.rand_name(constants.TEMPEST_MANILA_PREFIX) - cls.admin_client = cls.admin_shares_v2_client - cls.member_client = cls.shares_v2_client - cls.replication_type = CONF.share.backend_replication_type - - if cls.replication_type not in constants.REPLICATION_TYPE_CHOICES: - raise share_exceptions.ShareReplicationTypeException( - replication_type=cls.replication_type - ) - cls.zones = cls.get_availability_zones(client=cls.admin_client) - cls.share_zone = cls.zones[0] - cls.replica_zone = cls.zones[-1] - - cls.extra_specs = cls.add_extra_specs_to_dict( - {"replication_type": cls.replication_type}) - share_type = cls.create_share_type( - name, - extra_specs=cls.extra_specs, - client=cls.admin_client) - cls.share_type = share_type["share_type"] - # Create share with above share_type - cls.share = cls.create_share(share_type_id=cls.share_type["id"], - availability_zone=cls.share_zone, - client=cls.admin_client) - cls.replica = cls.admin_client.list_share_replicas( - share_id=cls.share['id'])[0] - - @staticmethod - def _filter_share_replica_list(replica_list, r_state): - # Iterate through replica list to filter based on replica_state - return [replica['id'] for replica in replica_list - if replica['replica_state'] == r_state] - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_promote_out_of_sync_share_replica(self): - """Test promote 'out_of_sync' share replica to active state.""" - if (self.replication_type - not in constants.REPLICATION_PROMOTION_CHOICES): - msg = "Option backend_replication_type should be one of (%s)!" - raise self.skipException( - msg % ','.join(constants.REPLICATION_PROMOTION_CHOICES)) - share = self.create_share( - share_type_id=self.share_type['id'], client=self.admin_client) - original_replica = self.admin_client.list_share_replicas( - share_id=share['id'])[0] - - # NOTE(Yogi1): Cleanup needs to be disabled for replica that is - # being promoted since it will become the 'primary'/'active' replica. - replica = self.create_share_replica( - share["id"], self.replica_zone, cleanup=False, - client=self.admin_client) - # Wait for replica state to update after creation - self.admin_client.wait_for_share_replica_status( - replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') - - # List replicas - replica_list = self.admin_client.list_share_replicas( - share_id=share['id']) - - # Check if there is only 1 'active' replica before promotion. - active_replicas = self._filter_share_replica_list( - replica_list, constants.REPLICATION_STATE_ACTIVE) - self.assertEqual(1, len(active_replicas)) - - # Set replica_state to 'out_of_sync' - self.admin_client.reset_share_replica_state( - replica['id'], constants.REPLICATION_STATE_OUT_OF_SYNC) - self.admin_client.wait_for_share_replica_status( - replica['id'], constants.REPLICATION_STATE_OUT_OF_SYNC, - status_attr='replica_state') - - # Promote 'out_of_sync' replica to 'active' state. - self.promote_share_replica(replica['id'], self.admin_client) - # Original replica will need to be cleaned up before the promoted - # replica can be deleted. - self.addCleanup(self.delete_share_replica, original_replica['id']) - - # Check if there is still only 1 'active' replica after promotion. - replica_list = self.admin_client.list_share_replicas( - share_id=self.share["id"]) - new_active_replicas = self._filter_share_replica_list( - replica_list, constants.REPLICATION_STATE_ACTIVE) - self.assertEqual(1, len(new_active_replicas)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_force_delete_share_replica(self): - """Test force deleting a replica that is in 'error_deleting' status.""" - replica = self.create_share_replica(self.share['id'], - self.replica_zone, - cleanup_in_class=False, - client=self.admin_client) - self.admin_client.reset_share_replica_status( - replica['id'], constants.STATUS_ERROR_DELETING) - self.admin_client.wait_for_share_replica_status( - replica['id'], constants.STATUS_ERROR_DELETING) - self.admin_client.force_delete_share_replica(replica['id']) - self.admin_client.wait_for_resource_deletion(replica_id=replica['id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_reset_share_replica_status(self): - """Test resetting a replica's 'status' attribute.""" - replica = self.create_share_replica(self.share['id'], - self.replica_zone, - cleanup_in_class=False, - client=self.admin_client) - self.admin_client.reset_share_replica_status(replica['id'], - constants.STATUS_ERROR) - self.admin_client.wait_for_share_replica_status( - replica['id'], constants.STATUS_ERROR) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_reset_share_replica_state(self): - """Test resetting a replica's 'replica_state' attribute.""" - replica = self.create_share_replica(self.share['id'], - self.replica_zone, - cleanup_in_class=False, - client=self.admin_client) - self.admin_client.reset_share_replica_state(replica['id'], - constants.STATUS_ERROR) - self.admin_client.wait_for_share_replica_status( - replica['id'], constants.STATUS_ERROR, status_attr='replica_state') - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_resync_share_replica(self): - """Test resyncing a replica.""" - replica = self.create_share_replica(self.share['id'], - self.replica_zone, - cleanup_in_class=False, - client=self.admin_client) - self.admin_client.wait_for_share_replica_status( - replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') - - # Set replica_state to 'out_of_sync'. - self.admin_client.reset_share_replica_state( - replica['id'], constants.REPLICATION_STATE_OUT_OF_SYNC) - self.admin_client.wait_for_share_replica_status( - replica['id'], constants.REPLICATION_STATE_OUT_OF_SYNC, - status_attr='replica_state') - - # Attempt resync - self.admin_client.resync_share_replica(replica['id']) - self.admin_client.wait_for_share_replica_status( - replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') diff --git a/manila_tempest_tests/tests/api/admin/test_replication_actions.py b/manila_tempest_tests/tests/api/admin/test_replication_actions.py deleted file mode 100644 index 5b5653341f..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_replication_actions.py +++ /dev/null @@ -1,179 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib.common.utils import data_utils -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests import share_exceptions -from manila_tempest_tests.tests.api import base - -CONF = config.CONF -_MIN_SUPPORTED_MICROVERSION = '2.11' - - -@testtools.skipUnless(CONF.share.run_replication_tests, - 'Replication tests are disabled.') -@testtools.skipIf( - CONF.share.multitenancy_enabled, - "Only for driver_handles_share_servers = False driver mode.") -@base.skip_if_microversion_lt(_MIN_SUPPORTED_MICROVERSION) -class ReplicationAdminTest(base.BaseSharesMixedTest): - - @classmethod - def resource_setup(cls): - super(ReplicationAdminTest, cls).resource_setup() - # Create share_type - name = data_utils.rand_name(constants.TEMPEST_MANILA_PREFIX) - cls.admin_client = cls.admin_shares_v2_client - cls.member_client = cls.shares_v2_client - cls.replication_type = CONF.share.backend_replication_type - - if cls.replication_type not in constants.REPLICATION_TYPE_CHOICES: - raise share_exceptions.ShareReplicationTypeException( - replication_type=cls.replication_type - ) - cls.zones = cls.get_availability_zones(client=cls.admin_client) - cls.share_zone = cls.zones[0] - cls.replica_zone = cls.zones[-1] - - cls.extra_specs = cls.add_extra_specs_to_dict( - {"replication_type": cls.replication_type}) - share_type = cls.create_share_type( - name, - cleanup_in_class=True, - extra_specs=cls.extra_specs, - client=cls.admin_client) - cls.share_type = share_type["share_type"] - # Create share with above share_type - cls.share = cls.create_share(size=CONF.share.share_size+1, - share_type_id=cls.share_type["id"], - availability_zone=cls.share_zone, - client=cls.admin_client) - cls.replica = cls.admin_client.list_share_replicas( - share_id=cls.share['id'])[0] - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_extend_tests, - 'Extend share tests are disabled.') - def test_extend_replicated_share(self): - # Test extend share - new_size = self.share["size"] + 1 - self.admin_client.extend_share(self.share["id"], new_size) - self.admin_client.wait_for_share_status(self.share["id"], - "available") - share = self.admin_client.get_share(self.share["id"]) - self.assertEqual(new_size, int(share["size"])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_shrink_tests, - 'Shrink share tests are disabled.') - def test_shrink_replicated_share(self): - share = self.admin_client.get_share(self.share["id"]) - new_size = self.share["size"] - 1 - self.admin_client.shrink_share(self.share["id"], new_size) - self.admin_client.wait_for_share_status(share["id"], "available") - shrink_share = self.admin_client.get_share(self.share["id"]) - self.assertEqual(new_size, int(shrink_share["size"])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipUnless(CONF.share.run_manage_unmanage_tests, - 'Manage/Unmanage Tests are disabled.') - def test_manage_share_for_replication_type(self): - """Manage a share with replication share type.""" - # Create a share and unmanage it - share = self.create_share(size=2, - share_type_id=self.share_type["id"], - availability_zone=self.share_zone, - cleanup_in_class=True, - client=self.admin_client) - share = self.admin_client.get_share(share["id"]) - export_locations = self.admin_client.list_share_export_locations( - share["id"]) - export_path = export_locations[0]['path'] - - self.admin_client.unmanage_share(share['id']) - self.admin_client.wait_for_resource_deletion(share_id=share['id']) - - # Manage the previously unmanaged share - managed_share = self.admin_client.manage_share( - share['host'], share['share_proto'], - export_path, self.share_type['id']) - self.admin_client.wait_for_share_status( - managed_share['id'], 'available') - - # Add managed share to cleanup queue - self.method_resources.insert( - 0, {'type': 'share', 'id': managed_share['id'], - 'client': self.admin_client}) - - # Make sure a replica can be added to newly managed share - self.create_share_replica(managed_share['id'], self.replica_zone, - cleanup=True, client=self.admin_client) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_manage_unmanage_tests, - 'Manage/Unmanage Tests are disabled.') - def test_unmanage_replicated_share_with_replica(self): - """Try to unmanage a share having replica.""" - # Create a share replica before unmanaging the share - self.create_share_replica(self.share["id"], self.replica_zone, - cleanup=True, client=self.admin_client) - self.assertRaises( - lib_exc.Conflict, - self.admin_client.unmanage_share, - share_id=self.share['id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipUnless(CONF.share.run_manage_unmanage_tests, - 'Manage/Unmanage Tests are disabled.') - def test_unmanage_replicated_share_with_no_replica(self): - """Unmanage a replication type share that does not have replica.""" - share = self.create_share(size=2, - share_type_id=self.share_type["id"], - availability_zone=self.share_zone, - client=self.admin_client) - self.admin_client.unmanage_share(share['id']) - self.admin_client.wait_for_resource_deletion(share_id=share['id']) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_manage_unmanage_snapshot_tests, - 'Manage/Unmanage Snapshot Tests are disabled.') - def test_manage_replicated_share_snapshot(self): - """Try to manage a snapshot of the replicated.""" - # Create a share replica before managing the snapshot - self.create_share_replica(self.share["id"], self.replica_zone, - cleanup=True, client=self.admin_client) - self.assertRaises( - lib_exc.Conflict, - self.admin_client.manage_snapshot, - share_id=self.share['id'], - provider_location="127.0.0.1:/fake_provider_location/" - "manila_share_9dc61f49_fbc8_48d7_9337_2f9593d9") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_manage_unmanage_snapshot_tests, - 'Manage/Unmanage Snapshot Tests are disabled.') - def test_unmanage_replicated_share_snapshot(self): - """Try to unmanage a snapshot of the replicated share with replica.""" - # Create a share replica before unmanaging the snapshot - self.create_share_replica(self.share["id"], self.replica_zone, - cleanup=True, client=self.admin_client) - snapshot = self.create_snapshot_wait_for_active( - self.share["id"], client=self.admin_client) - self.assertRaises( - lib_exc.Conflict, - self.admin_client.unmanage_snapshot, - snapshot_id=snapshot['id']) diff --git a/manila_tempest_tests/tests/api/admin/test_scheduler_stats.py b/manila_tempest_tests/tests/api/admin/test_scheduler_stats.py deleted file mode 100644 index aa298288ad..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_scheduler_stats.py +++ /dev/null @@ -1,204 +0,0 @@ -# Copyright (c) 2015 Clinton Knight. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from tempest import config -from tempest.lib.common.utils import data_utils -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@ddt.ddt -class SchedulerStatsAdminTest(base.BaseSharesAdminTest): - - @classmethod - def _create_share_type(cls, negative=False): - name = data_utils.rand_name("unique_st_name") - extra_specs = None - - if negative: - extra_specs = { - 'share_backend_name': data_utils.rand_name("fake_name"), - } - - extra_specs = cls.add_extra_specs_to_dict(extra_specs=extra_specs) - return cls.create_share_type( - name, extra_specs=extra_specs, - client=cls.admin_client) - - @classmethod - def resource_setup(cls): - super(SchedulerStatsAdminTest, cls).resource_setup() - cls.admin_client = cls.shares_v2_client - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_pool_list(self): - - # List pools - pool_response = self.shares_client.list_pools() - pool_list = pool_response.get('pools') - self.assertIsNotNone(pool_list, 'No pools returned from pools API') - self.assertNotEmpty(pool_list) - pool = pool_list[0] - required_keys = {'name', 'host', 'backend', 'pool'} - actual_keys = set(pool.keys()) - self.assertTrue(actual_keys.issuperset(required_keys)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_pool_list_with_filters(self): - - # List pools - pool_response = self.shares_client.list_pools() - pool_list = pool_response.get('pools') - - # Ensure we got at least one pool - self.assertIsNotNone(pool_list, 'No pools returned from pools API') - self.assertNotEmpty(pool_list) - pool = pool_list[0] - - # Build search opts from data and get pools again with filter - search_opts = { - 'host': self._wrap_regex_for_exact_match(pool.get('host')), - 'backend': self._wrap_regex_for_exact_match(pool.get('backend')), - 'pool': self._wrap_regex_for_exact_match(pool.get('pool')), - } - pool_response = self.shares_client.list_pools( - search_opts=search_opts) - filtered_pool_list = pool_response.get('pools') - - # Ensure we got exactly one pool matching the first one from above - self.assertEqual(1, len(filtered_pool_list)) - - # Match the key values, not the timestamp. - for k, v in search_opts.items(): - self.assertEqual(v[1:-1], filtered_pool_list[0][k]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_pool_list_with_filters_negative(self): - - # Build search opts for a non-existent pool - search_opts = { - 'host': 'foo', - 'backend': 'bar', - 'pool': 'shark', - } - pool_response = self.shares_client.list_pools( - search_opts=search_opts) - pool_list = pool_response.get('pools') - - # Ensure we got no pools - self.assertEmpty(pool_list) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_pool_list_detail(self): - - # List pools - pool_response = self.shares_client.list_pools(detail=True) - pool_list = pool_response.get('pools') - self.assertIsNotNone(pool_list, 'No pools returned from pools API') - self.assertNotEmpty(pool_list) - pool = pool_list[0] - required_keys = {'name', 'host', 'backend', 'pool', 'capabilities'} - actual_keys = set(pool.keys()) - self.assertTrue(actual_keys.issuperset(required_keys)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_pool_list_detail_with_filters(self): - - # List pools - pool_response = self.shares_client.list_pools(detail=True) - pool_list = pool_response.get('pools') - - # Ensure we got at least one pool - self.assertIsNotNone(pool_list, 'No pools returned from pools API') - self.assertNotEmpty(pool_list) - pool = pool_list[0] - - # Build search opts from data and get pools again with filter - search_opts = { - 'host': self._wrap_regex_for_exact_match(pool.get('host')), - 'backend': self._wrap_regex_for_exact_match(pool.get('backend')), - 'pool': self._wrap_regex_for_exact_match(pool.get('pool')), - } - pool_response = self.shares_client.list_pools( - detail=True, search_opts=search_opts) - filtered_pool_list = pool_response.get('pools') - - # Ensure we got exactly one pool matching the first one from above - self.assertEqual(1, len(filtered_pool_list)) - - # Match the key values, not the timestamp. - for k, v in search_opts.items(): - self.assertEqual(v[1:-1], filtered_pool_list[0][k]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_pool_list_detail_with_filters_negative(self): - - # Build search opts for a non-existent pool - search_opts = { - 'host': 'foo', - 'backend': 'bar', - 'pool': 'shark', - } - pool_response = self.shares_client.list_pools( - detail=True, search_opts=search_opts) - pool_list = pool_response.get('pools') - - # Ensure we got no pools - self.assertEmpty(pool_list) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_not_supported("2.23") - @ddt.data((True, "name"), (True, "id"), (False, "name"), (False, "id")) - @ddt.unpack - def test_pool_list_with_share_type_filter_with_detail( - self, detail, share_type_key): - st = self._create_share_type() - search_opts = {"share_type": st["share_type"][share_type_key]} - kwargs = {'search_opts': search_opts} - - if detail: - kwargs.update({'detail': True}) - - pools = self.admin_client.list_pools(**kwargs)['pools'] - - self.assertIsNotNone(pools, 'No pools returned from pools API') - self.assertNotEmpty(pools) - for pool in pools: - pool_keys = list(pool.keys()) - self.assertIn("name", pool_keys) - self.assertIn("host", pool_keys) - self.assertIn("backend", pool_keys) - self.assertIn("pool", pool_keys) - self.assertIs(detail, "capabilities" in pool_keys) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_not_supported("2.23") - @ddt.data((True, "name"), (True, "id"), (False, "name"), (False, "id")) - @ddt.unpack - def test_pool_list_with_share_type_filter_with_detail_negative( - self, detail, share_type_key): - st_negative = self._create_share_type(negative=True) - search_opts = {"share_type": st_negative["share_type"][share_type_key]} - - pools = self.admin_client.list_pools( - detail=detail, search_opts=search_opts)['pools'] - - self.assertEmpty(pools) - - def _wrap_regex_for_exact_match(self, regex): - return '^%s$' % regex diff --git a/manila_tempest_tests/tests/api/admin/test_security_services.py b/manila_tempest_tests/tests/api/admin/test_security_services.py deleted file mode 100644 index 3b29e2655b..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_security_services.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base -from manila_tempest_tests.tests.api import test_security_services - - -class SecurityServiceAdminTest( - base.BaseSharesAdminTest, - test_security_services.SecurityServiceListMixin): - - def setUp(self): - super(SecurityServiceAdminTest, self).setUp() - ss_ldap_data = { - 'name': 'ss_ldap', - 'dns_ip': '1.1.1.1', - 'server': 'fake_server_1', - 'domain': 'fake_domain_1', - 'user': 'fake_user', - 'password': 'pass', - } - ss_kerberos_data = { - 'name': 'ss_kerberos', - 'dns_ip': '2.2.2.2', - 'server': 'fake_server_2', - 'domain': 'fake_domain_2', - 'user': 'test_user', - 'password': 'word', - } - self.ss_ldap = self.create_security_service('ldap', **ss_ldap_data) - self.ss_kerberos = self.create_security_service( - 'kerberos', - **ss_kerberos_data) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_list_security_services_all_tenants(self): - listed = self.shares_client.list_security_services( - params={'all_tenants': 1}) - self.assertTrue(any(self.ss_ldap['id'] == ss['id'] for ss in listed)) - self.assertTrue(any(self.ss_kerberos['id'] == ss['id'] - for ss in listed)) - - keys = ["name", "id", "status", "type", ] - [self.assertIn(key, s_s.keys()) for s_s in listed for key in keys] - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_list_security_services_invalid_filters(self): - listed = self.shares_client.list_security_services( - params={'fake_opt': 'some_value'}) - self.assertEqual(0, len(listed)) diff --git a/manila_tempest_tests/tests/api/admin/test_services.py b/manila_tempest_tests/tests/api/admin/test_services.py deleted file mode 100644 index bda4bb42af..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_services.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -@ddt.ddt -class ServicesAdminTest(base.BaseSharesAdminTest): - - def setUp(self): - super(ServicesAdminTest, self).setUp() - self.services = self.shares_client.list_services() - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @ddt.data('shares_client', 'shares_v2_client') - def test_list_services(self, client_name): - services = getattr(self, client_name).list_services() - self.assertNotEqual(0, len(services)) - - for service in services: - self.assertIsNotNone(service['id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @ddt.data('shares_client', 'shares_v2_client') - def test_get_services_by_host_name(self, client_name): - host = self.services[0]["host"] - params = {"host": host} - services = getattr(self, client_name).list_services(params) - self.assertNotEqual(0, len(services)) - for service in services: - self.assertEqual(host, service["host"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @ddt.data('shares_client', 'shares_v2_client') - def test_get_services_by_binary_name(self, client_name): - binary = self.services[0]["binary"] - params = {"binary": binary, } - services = getattr(self, client_name).list_services(params) - self.assertNotEqual(0, len(services)) - for service in services: - self.assertEqual(binary, service["binary"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @ddt.data('shares_client', 'shares_v2_client') - def test_get_services_by_availability_zone(self, client_name): - zone = self.services[0]["zone"] - params = {"zone": zone, } - services = getattr(self, client_name).list_services(params) - self.assertNotEqual(0, len(services)) - for service in services: - self.assertEqual(zone, service["zone"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @ddt.data('shares_client', 'shares_v2_client') - def test_get_services_by_status(self, client_name): - status = self.services[0]["status"] - params = {"status": status, } - services = getattr(self, client_name).list_services(params) - self.assertNotEqual(0, len(services)) - for service in services: - self.assertEqual(status, service["status"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @ddt.data('shares_client', 'shares_v2_client') - def test_get_services_by_state(self, client_name): - state = self.services[0]["state"] - params = {"state": state, } - services = getattr(self, client_name).list_services(params) - self.assertNotEqual(0, len(services)) - for service in services: - self.assertEqual(state, service["state"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @ddt.data('shares_client', 'shares_v2_client') - def test_get_services_by_all_filters(self, client_name): - params = { - "host": self.services[0]["host"], - "binary": self.services[0]["binary"], - "zone": self.services[0]["zone"], - "status": self.services[0]["status"], - "state": self.services[0]["state"], - } - services = getattr(self, client_name).list_services(params) - self.assertNotEqual(0, len(services)) - for service in services: - self.assertEqual(params["host"], service["host"]) - self.assertEqual(params["binary"], service["binary"]) - self.assertEqual(params["zone"], service["zone"]) - self.assertEqual(params["status"], service["status"]) - self.assertEqual(params["state"], service["state"]) diff --git a/manila_tempest_tests/tests/api/admin/test_services_negative.py b/manila_tempest_tests/tests/api/admin/test_services_negative.py deleted file mode 100644 index 04ff803edf..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_services_negative.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from tempest.lib import exceptions as lib_exc -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -@ddt.ddt -class ServicesAdminNegativeTest(base.BaseSharesMixedTest): - - @classmethod - def resource_setup(cls): - super(ServicesAdminNegativeTest, cls).resource_setup() - cls.admin_client = cls.admin_shares_v2_client - cls.member_client = cls.shares_v2_client - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_list_services_with_non_admin_user(self): - self.assertRaises(lib_exc.Forbidden, - self.member_client.list_services) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_service_by_invalid_params(self): - # All services are expected if send the request with invalid parameter - services = self.admin_client.list_services() - params = {'fake_param': 'fake_param_value'} - services_fake = self.admin_client.list_services(params) - self.assertEqual(len(services), len(services_fake)) - - # "update_at" field could be updated before second request, - # so do not take it in account. - for service in services + services_fake: - service["updated_at"] = "removed_possible_difference" - - msg = ('Unexpected service list. Expected %s, got %s.' % - (services, services_fake)) - self.assertEqual(sorted(services, key=lambda service: service['id']), - sorted(services_fake, - key=lambda service: service['id']), - msg) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_service_by_invalid_host(self): - params = {'host': 'fake_host'} - services_fake = self.admin_client.list_services(params) - self.assertEqual(0, len(services_fake)) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_service_by_invalid_binary(self): - params = {'binary': 'fake_binary'} - services_fake = self.admin_client.list_services(params) - self.assertEqual(0, len(services_fake)) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_service_by_invalid_zone(self): - params = {'zone': 'fake_zone'} - services_fake = self.admin_client.list_services(params) - self.assertEqual(0, len(services_fake)) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_service_by_invalid_status(self): - params = {'status': 'fake_status'} - services_fake = self.admin_client.list_services(params) - self.assertEqual(0, len(services_fake)) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_service_by_invalid_state(self): - params = {'state': 'fake_state'} - services_fake = self.admin_client.list_services(params) - self.assertEqual(0, len(services_fake)) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @ddt.data( - ('os-services', '2.7'), - ('services', '2.6'), - ('services', '2.0'), - ) - @ddt.unpack - @base.skip_if_microversion_not_supported("2.7") - def test_list_services_with_wrong_versions(self, url, version): - self.assertRaises( - lib_exc.NotFound, - self.admin_client.list_services, - version=version, url=url, - ) diff --git a/manila_tempest_tests/tests/api/admin/test_share_group_types.py b/manila_tempest_tests/tests/api/admin/test_share_group_types.py deleted file mode 100644 index c0c9a97220..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_share_group_types.py +++ /dev/null @@ -1,245 +0,0 @@ -# Copyright 2016 Andrew Kerr -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from tempest import config -from tempest.lib.common.utils import data_utils -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') -@base.skip_if_microversion_lt(constants.MIN_SHARE_GROUP_MICROVERSION) -@ddt.ddt -class ShareGroupTypesTest(base.BaseSharesAdminTest): - - @classmethod - def resource_setup(cls): - super(ShareGroupTypesTest, cls).resource_setup() - - # Create 2 share_types - name = data_utils.rand_name("tempest-manila") - extra_specs = cls.add_extra_specs_to_dict() - share_type = cls.create_share_type(name, extra_specs=extra_specs) - cls.share_type = share_type['share_type'] - - name = data_utils.rand_name("tempest-manila") - share_type = cls.create_share_type(name, extra_specs=extra_specs) - cls.share_type2 = share_type['share_type'] - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @ddt.data('id', 'name') - def test_create_get_delete_share_group_type_min(self, st_key): - name = data_utils.rand_name("tempest-manila") - - # Create share group type - sg_type_c = self.create_share_group_type( - name=name, - share_types=self.share_type[st_key], - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - self.assertEqual( - [self.share_type['id']], - sg_type_c['share_types'], - 'Share type not applied correctly.') - - # Read share group type - sg_type_r = self.shares_v2_client.get_share_group_type(sg_type_c['id']) - keys = set(sg_type_r.keys()) - self.assertTrue( - constants.SHARE_GROUP_TYPE_REQUIRED_KEYS.issubset(keys), - 'At least one expected key missing from share group type ' - 'response. Expected %s, got %s.' % ( - constants.SHARE_GROUP_TYPE_REQUIRED_KEYS, keys)) - self.assertEqual(sg_type_c['name'], sg_type_r['name']) - - # Delete share group type - self.shares_v2_client.delete_share_group_type( - sg_type_r['id'], version=constants.MIN_SHARE_GROUP_MICROVERSION) - self.shares_v2_client.wait_for_resource_deletion( - share_group_type_id=sg_type_r['id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @ddt.data('id', 'name') - def test_create_share_group_type_multiple_share_types_min(self, st_key): - name = data_utils.rand_name("tempest-manila") - - sg_type = self.create_share_group_type( - name=name, - share_types=[self.share_type[st_key], self.share_type2[st_key]], - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - self.assertEqual( - {self.share_type['id'], self.share_type2['id']}, - set(sg_type['share_types']), - 'Share types not applied correctly.') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_create_share_group_type_with_one_spec_min(self): - name = data_utils.rand_name("tempest-manila") - group_specs = {'key': 'value'} - - sg_type = self.create_share_group_type( - name=name, - share_types=self.share_type['id'], - group_specs=group_specs, - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - self.assertDictMatch(group_specs, sg_type['group_specs']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_create_share_group_type_with_multiple_specs_min(self): - name = data_utils.rand_name("tempest-manila") - group_specs = {'key1': 'value1', 'key2': 'value2'} - - sg_type = self.create_share_group_type( - name=name, - share_types=self.share_type['id'], - group_specs=group_specs, - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - self.assertDictMatch(group_specs, sg_type['group_specs']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_single_share_group_type_spec_min(self): - name = data_utils.rand_name("tempest-manila") - group_specs = {'key1': 'value1', 'key2': 'value2'} - - sg_type = self.create_share_group_type( - name=name, - share_types=self.share_type['id'], - group_specs=group_specs, - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - self.assertDictMatch(group_specs, sg_type['group_specs']) - - group_specs = {'key1': 'value1', 'key2': 'value2'} - - self.shares_v2_client.update_share_group_type_spec( - sg_type['id'], 'key1', 'value3') - sg_type = self.shares_v2_client.get_share_group_type(sg_type['id']) - - self.assertIn('key1', sg_type['group_specs']) - self.assertIn('key2', sg_type['group_specs']) - self.assertEqual('value3', sg_type['group_specs']['key1']) - self.assertEqual(group_specs['key2'], sg_type['group_specs']['key2']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_all_share_group_type_specs_min(self): - name = data_utils.rand_name("tempest-manila") - group_specs = {'key1': 'value1', 'key2': 'value2'} - - sg_type = self.create_share_group_type( - name=name, - share_types=self.share_type['id'], - group_specs=group_specs, - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - self.assertDictMatch(group_specs, sg_type['group_specs']) - - group_specs = {'key1': 'value3', 'key2': 'value4'} - - self.shares_v2_client.update_share_group_type_specs( - sg_type['id'], group_specs) - sg_type = self.shares_v2_client.get_share_group_type(sg_type['id']) - - for k, v in group_specs.items(): - self.assertIn(k, sg_type['group_specs']) - self.assertEqual(v, sg_type['group_specs'][k]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_delete_single_share_group_type_spec_min(self): - name = data_utils.rand_name("tempest-manila") - group_specs = {'key1': 'value1', 'key2': 'value2'} - - sg_type = self.create_share_group_type( - name=name, - share_types=self.share_type['id'], - group_specs=group_specs, - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - self.assertDictMatch(group_specs, sg_type['group_specs']) - - key_to_delete = 'key1' - group_specs.pop(key_to_delete) - - self.shares_v2_client.delete_share_group_type_spec( - sg_type['id'], key_to_delete) - sg_type = self.shares_v2_client.get_share_group_type( - sg_type['id']) - - self.assertDictMatch(group_specs, sg_type['group_specs']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_private_share_group_type_access(self): - name = data_utils.rand_name("tempest-manila") - group_specs = {"key1": "value1", "key2": "value2"} - project_id = self.shares_v2_client.tenant_id - - # Create private share group type - sgt_create = self.create_share_group_type( - name=name, - share_types=[self.share_type['id']], - is_public=False, - group_specs=group_specs, - ) - self.assertEqual(name, sgt_create['name']) - sgt_id = sgt_create["id"] - - # It should not be listed without access - sgt_list = self.shares_v2_client.list_share_group_types() - self.assertFalse(any(sgt_id == sgt["id"] for sgt in sgt_list)) - - # List projects that have access for share group type - none expected - access = self.shares_v2_client.list_access_to_share_group_type(sgt_id) - self.assertEmpty(access) - - # Add project access to share group type - access = self.shares_v2_client.add_access_to_share_group_type( - sgt_id, project_id) - - # Now it should be listed - sgt_list = self.shares_v2_client.list_share_group_types() - self.assertTrue(any(sgt_id == sgt["id"] for sgt in sgt_list)) - - # List projects that have access for share group type - one expected - access = self.shares_v2_client.list_access_to_share_group_type(sgt_id) - expected = [{'share_group_type_id': sgt_id, 'project_id': project_id}] - self.assertEqual(expected, access) - - # Remove project access from share group type - access = self.shares_v2_client.remove_access_from_share_group_type( - sgt_id, project_id) - - # It should not be listed without access - sgt_list = self.shares_v2_client.list_share_group_types() - self.assertFalse(any(sgt_id == sgt["id"] for sgt in sgt_list)) - - # List projects that have access for share group type - none expected - access = self.shares_v2_client.list_access_to_share_group_type(sgt_id) - self.assertEmpty(access) diff --git a/manila_tempest_tests/tests/api/admin/test_share_group_types_negative.py b/manila_tempest_tests/tests/api/admin/test_share_group_types_negative.py deleted file mode 100644 index 6dfe20456b..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_share_group_types_negative.py +++ /dev/null @@ -1,146 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib.common.utils import data_utils -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') -@base.skip_if_microversion_lt(constants.MIN_SHARE_GROUP_MICROVERSION) -class ShareGroupTypesAdminNegativeTest(base.BaseSharesMixedTest): - - @classmethod - def resource_setup(cls): - super(ShareGroupTypesAdminNegativeTest, cls).resource_setup() - cls.share_type = cls.create_share_type( - data_utils.rand_name("unique_st_name"), - extra_specs=cls.add_extra_specs_to_dict({"key": "value"}), - client=cls.admin_shares_v2_client) - cls.share_group_type = cls.create_share_group_type( - data_utils.rand_name("unique_sgt_name"), - share_types=[cls.share_type['share_type']['id']], - client=cls.admin_shares_v2_client) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_create_share_group_type_without_name(self): - self.assertRaises( - lib_exc.BadRequest, - self.admin_shares_v2_client.create_share_group_type, - name=None, - share_types=data_utils.rand_name("fake")) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_create_share_group_type_with_nonexistent_share_type(self): - self.assertRaises( - lib_exc.NotFound, - self.admin_shares_v2_client.create_share_group_type, - name=data_utils.rand_name("sgt_name_should_have_not_been_created"), - share_types=data_utils.rand_name("fake")) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_create_share_group_type_with_empty_name(self): - self.assertRaises( - lib_exc.BadRequest, - self.create_share_group_type, '', - client=self.admin_shares_v2_client) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_create_share_group_type_with_too_big_name(self): - self.assertRaises( - lib_exc.BadRequest, - self.create_share_group_type, - "x" * 256, client=self.admin_shares_v2_client) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_create_share_group_type_with_wrong_value_for_group_specs(self): - self.assertRaises( - lib_exc.BadRequest, - self.admin_shares_v2_client.create_share_group_type, - name=data_utils.rand_name("tempest_manila"), - share_types=[self.share_type['share_type']['id']], - group_specs="expecting_error_code_400") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_share_group_type_using_nonexistent_id(self): - self.assertRaises( - lib_exc.NotFound, - self.admin_shares_v2_client.get_share_group_type, - data_utils.rand_name("fake")) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_delete_share_group_type_using_nonexistent_id(self): - self.assertRaises( - lib_exc.NotFound, - self.admin_shares_v2_client.delete_share_group_type, - data_utils.rand_name("fake")) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_create_duplicate_of_share_group_type(self): - unique_name = data_utils.rand_name("unique_sgt_name") - list_of_ids = set() - for step in (1, 2): - sg_type = self.create_share_group_type( - unique_name, - share_types=[self.share_type['share_type']['id']], - client=self.admin_shares_v2_client, - cleanup_in_class=False) - self.assertRaises( - lib_exc.Conflict, - self.create_share_group_type, - unique_name, - share_types=[self.share_type['share_type']['id']], - client=self.admin_shares_v2_client) - list_of_ids.add(sg_type['id']) - self.assertEqual(unique_name, sg_type['name']) - self.admin_shares_v2_client.delete_share_group_type(sg_type['id']) - self.assertEqual(2, len(list_of_ids)) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_add_project_access_to_public_share_group_type(self): - self.assertRaises( - lib_exc.Conflict, - self.admin_shares_v2_client.add_access_to_share_group_type, - self.share_group_type["id"], - self.admin_shares_v2_client.tenant_id) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_remove_project_access_from_public_share_group_type(self): - self.assertRaises( - lib_exc.Conflict, - self.admin_shares_v2_client.remove_access_from_share_group_type, - self.share_group_type["id"], - self.admin_shares_v2_client.tenant_id) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_add_project_access_to_nonexistent_share_group_type(self): - self.assertRaises( - lib_exc.NotFound, - self.admin_shares_v2_client.add_access_to_share_group_type, - data_utils.rand_name("fake"), - self.admin_shares_v2_client.tenant_id) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_remove_project_access_from_nonexistent_share_group_type(self): - self.assertRaises( - lib_exc.NotFound, - self.admin_shares_v2_client.remove_access_from_share_group_type, - data_utils.rand_name("fake"), - self.admin_shares_v2_client.tenant_id) diff --git a/manila_tempest_tests/tests/api/admin/test_share_groups.py b/manila_tempest_tests/tests/api/admin/test_share_groups.py deleted file mode 100644 index dc1f2aea9c..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_share_groups.py +++ /dev/null @@ -1,191 +0,0 @@ -# Copyright 2016 Andrew Kerr -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib.common.utils import data_utils -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') -@base.skip_if_microversion_lt(constants.MIN_SHARE_GROUP_MICROVERSION) -class ShareGroupsTest(base.BaseSharesAdminTest): - - @classmethod - def resource_setup(cls): - super(ShareGroupsTest, cls).resource_setup() - # Create 2 share_types - name = data_utils.rand_name("tempest-manila") - extra_specs = cls.add_extra_specs_to_dict() - share_type = cls.create_share_type(name, extra_specs=extra_specs) - cls.share_type = share_type['share_type'] - - name = data_utils.rand_name("tempest-manila") - share_type = cls.create_share_type(name, extra_specs=extra_specs) - cls.share_type2 = share_type['share_type'] - - cls.sg_type = cls.create_share_group_type( - name=name, - share_types=[cls.share_type['id'], cls.share_type2['id']], - cleanup_in_class=True, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_create_share_group_with_single_share_type_min(self): - share_group = self.create_share_group( - share_group_type_id=self.sg_type['id'], - cleanup_in_class=False, - share_type_ids=[self.share_type['id']], - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - keys = set(share_group.keys()) - self.assertTrue( - constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS.issubset(keys), - 'At least one expected element missing from share group ' - 'response. Expected %(expected)s, got %(actual)s.' % { - "expected": constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS, - "actual": keys}) - - actual_sg_type = share_group['share_group_type_id'] - expected_sg_type = self.sg_type['id'] - self.assertEqual( - expected_sg_type, actual_sg_type, - 'Incorrect share group type applied to share group ' - '%s. Expected %s, got %s' % ( - share_group['id'], expected_sg_type, actual_sg_type)) - - actual_share_types = share_group['share_types'] - expected_share_types = [self.share_type['id']] - self.assertEqual( - sorted(expected_share_types), - sorted(actual_share_types), - 'Incorrect share types applied to share group %s. ' - 'Expected %s, got %s' % ( - share_group['id'], expected_share_types, actual_share_types)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_create_share_group_with_multiple_share_types_min(self): - share_group = self.create_share_group( - share_group_type_id=self.sg_type['id'], - cleanup_in_class=False, - share_type_ids=[self.share_type['id'], self.share_type2['id']], - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - keys = set(share_group.keys()) - self.assertTrue( - constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS.issubset(keys), - 'At least one expected element missing from share group ' - 'response. Expected %(expected)s, got %(actual)s.' % { - "expected": constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS, - "actual": keys}) - - actual_sg_type = share_group['share_group_type_id'] - expected_sg_type = self.sg_type['id'] - self.assertEqual( - expected_sg_type, actual_sg_type, - 'Incorrect share group type applied to share group %s. ' - 'Expected %s, got %s' % ( - share_group['id'], expected_sg_type, actual_sg_type)) - - actual_share_types = share_group['share_types'] - expected_share_types = [self.share_type['id'], self.share_type2['id']] - self.assertEqual( - sorted(expected_share_types), - sorted(actual_share_types), - 'Incorrect share types applied to share group %s. ' - 'Expected %s, got %s' % ( - share_group['id'], expected_share_types, actual_share_types)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_default_share_group_type_applied(self): - default_type = self.shares_v2_client.get_default_share_group_type() - default_share_types = default_type['share_types'] - - share_group = self.create_share_group( - cleanup_in_class=False, - share_type_ids=default_share_types, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - keys = set(share_group.keys()) - self.assertTrue( - constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS.issubset(keys), - 'At least one expected element missing from share group ' - 'response. Expected %(expected)s, got %(actual)s.' % { - "expected": constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS, - "actual": keys}) - - actual_sg_type = share_group['share_group_type_id'] - expected_sg_type = default_type['id'] - self.assertEqual( - expected_sg_type, actual_sg_type, - 'Incorrect share group type applied to share group %s. ' - 'Expected %s, got %s' % ( - share_group['id'], expected_sg_type, actual_sg_type)) - - @testtools.skipUnless( - CONF.share.multitenancy_enabled, "Only for multitenancy.") - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_create_sg_from_snapshot_verify_share_server_information_min(self): - # Create a share group - orig_sg = self.create_share_group( - share_group_type_id=self.sg_type['id'], - cleanup_in_class=False, - share_type_ids=[self.share_type['id']], - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - # Get latest share group information - orig_sg = self.shares_v2_client.get_share_group( - orig_sg['id'], version=constants.MIN_SHARE_GROUP_MICROVERSION) - - # Assert share server information - self.assertIsNotNone(orig_sg['share_network_id']) - self.assertIsNotNone(orig_sg['share_server_id']) - - sg_snapshot = self.create_share_group_snapshot_wait_for_active( - orig_sg['id'], cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - new_sg = self.create_share_group( - share_group_type_id=self.sg_type['id'], - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION, - source_share_group_snapshot_id=sg_snapshot['id']) - - # Assert share server information - self.assertEqual( - orig_sg['share_network_id'], new_sg['share_network_id']) - self.assertEqual( - orig_sg['share_server_id'], new_sg['share_server_id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_create_sg_with_sg_type_but_without_any_group_specs(self): - # Create share group type not specifying any group specs - sg_type = self.create_share_group_type( - name=data_utils.rand_name("tempest-manila"), - share_types=[self.share_type['id']], - group_specs={}, - cleanup_in_class=False) - - # Create share group, it should be created always, because we do not - # restrict choice anyhow. - self.create_share_group( - share_type_ids=[self.share_type['id']], - share_group_type_id=sg_type['id'], - cleanup_in_class=False) diff --git a/manila_tempest_tests/tests/api/admin/test_share_groups_negative.py b/manila_tempest_tests/tests/api/admin/test_share_groups_negative.py deleted file mode 100644 index 979e6a43a3..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_share_groups_negative.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2017 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib.common.utils import data_utils -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests import share_exceptions -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') -@base.skip_if_microversion_lt(constants.MIN_SHARE_GROUP_MICROVERSION) -class ShareGroupsNegativeTest(base.BaseSharesAdminTest): - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_create_share_group_with_wrong_consistent_snapshot_spec(self): - # Create valid share type for share group type - name = data_utils.rand_name("tempest-manila") - extra_specs = self.add_extra_specs_to_dict() - st = self.create_share_type(name, extra_specs=extra_specs) - share_type = st['share_type'] if 'share_type' in st else st - - # Create share group type with wrong value for - # 'consistent_snapshot_support' capability, we always expect - # NoValidHostFound using this SG type. - sg_type = self.create_share_group_type( - name=name, - share_types=[share_type['id']], - group_specs={"consistent_snapshot_support": "fake"}, - cleanup_in_class=False) - - # Try create share group - self.assertRaises( - share_exceptions.ShareGroupBuildErrorException, - self.create_share_group, - share_type_ids=[share_type['id']], - share_group_type_id=sg_type['id'], - cleanup_in_class=False) diff --git a/manila_tempest_tests/tests/api/admin/test_share_instances.py b/manila_tempest_tests/tests/api/admin/test_share_instances.py deleted file mode 100644 index 479850cbab..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_share_instances.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright 2015 Andrew Kerr -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base -from manila_tempest_tests import utils - - -@ddt.ddt -class ShareInstancesTest(base.BaseSharesAdminTest): - - @classmethod - def resource_setup(cls): - super(ShareInstancesTest, cls).resource_setup() - cls.share = cls.create_share() - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_get_instances_of_share_v2_3(self): - """Test that we get only the 1 share instance back for the share.""" - share_instances = self.shares_v2_client.get_instances_of_share( - self.share['id'], version='2.3' - ) - - self.assertEqual(1, len(share_instances), - 'Too many share instances found; expected 1, ' - 'found %s' % len(share_instances)) - - si = share_instances[0] - self.assertEqual(self.share['id'], si['share_id'], - 'Share instance %s has incorrect share id value; ' - 'expected %s, got %s.' % (si['id'], - self.share['id'], - si['share_id'])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_share_instances_v2_3(self): - """Test that we list the share instance back.""" - share_instances = self.shares_v2_client.list_share_instances( - version='2.3' - ) - - share_ids = [si['share_id'] for si in share_instances] - - msg = 'Share instance for share %s was not found.' % self.share['id'] - self.assertIn(self.share['id'], share_ids, msg) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('2.3', '2.9', '2.10', '2.30') - def test_get_share_instance(self, version): - """Test that we get the proper keys back for the instance.""" - share_instances = self.shares_v2_client.get_instances_of_share( - self.share['id'], version=version, - ) - - si = self.shares_v2_client.get_share_instance( - share_instances[0]['id'], version=version) - - expected_keys = [ - 'host', 'share_id', 'id', 'share_network_id', 'status', - 'availability_zone', 'share_server_id', 'created_at', - ] - if utils.is_microversion_lt(version, '2.9'): - expected_keys.extend(["export_location", "export_locations"]) - if utils.is_microversion_ge(version, '2.10'): - expected_keys.append("access_rules_status") - if utils.is_microversion_ge(version, '2.11'): - expected_keys.append("replica_state") - if utils.is_microversion_ge(version, '2.22'): - expected_keys.append("share_type_id") - if utils.is_microversion_ge(version, '2.30'): - expected_keys.append("cast_rules_to_readonly") - expected_keys = sorted(expected_keys) - actual_keys = sorted(si.keys()) - self.assertEqual(expected_keys, actual_keys, - 'Share instance %s returned incorrect keys; ' - 'expected %s, got %s.' % ( - si['id'], expected_keys, actual_keys)) - - @ddt.data('path', 'id') - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.35") - def test_list_share_instances_with_export_location_path_and_id( - self, export_location_type): - share_instances_except = ( - self.shares_v2_client.get_instances_of_share( - self.share['id'])) - export_locations = ( - self.shares_v2_client.list_share_instance_export_locations( - share_instances_except[0]['id'])) - - filters = { - 'export_location_' + export_location_type: - export_locations[0][export_location_type], - } - share_instances = self.shares_v2_client.list_share_instances( - params=filters) - - self.assertEqual(1, len(share_instances)) - self.assertEqual(share_instances_except[0]['id'], - share_instances[0]['id']) diff --git a/manila_tempest_tests/tests/api/admin/test_share_instances_negative.py b/manila_tempest_tests/tests/api/admin/test_share_instances_negative.py deleted file mode 100644 index b52a8a1b3f..0000000000 --- a/manila_tempest_tests/tests/api/admin/test_share_instances_negative.py +++ /dev/null @@ -1,54 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -@ddt.ddt -class ShareInstancesNegativeTest(base.BaseSharesAdminTest): - - @classmethod - def resource_setup(cls): - super(ShareInstancesNegativeTest, cls).resource_setup() - cls.share = cls.create_share() - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_not_supported("2.34") - @ddt.data('path', 'id') - def test_list_share_instances_with_export_location_and_invalid_version( - self, export_location_type): - # In API versions 2 else False - no_lock_required = kwargs.get( - "isolated_creds_client", with_isolated_creds) - if no_lock_required: - # Usage of not reusable network. No need in lock. - return f(self, *args, **kwargs) - - # Use lock assuming reusage of common network. - @lockutils.synchronized("manila_network_lock", external=True) - def source_func(self, *args, **kwargs): - return f(self, *args, **kwargs) - - return source_func(self, *args, **kwargs) - - return wrapped_func - - -skip_if_microversion_not_supported = utils.skip_if_microversion_not_supported -skip_if_microversion_lt = utils.skip_if_microversion_lt - - -class BaseSharesTest(test.BaseTestCase): - """Base test case class for all Manila API tests.""" - - credentials = ('primary', ) - force_tenant_isolation = False - protocols = ["nfs", "cifs", "glusterfs", "hdfs", "cephfs", "maprfs"] - - # Will be cleaned up in resource_cleanup - class_resources = [] - - # Will be cleaned up in tearDown method - method_resources = [] - - # Will be cleaned up in resource_cleanup - class_isolated_creds = [] - - # Will be cleaned up in tearDown method - method_isolated_creds = [] - - # NOTE(andreaf) Override the client manager class to be used, so that - # a stable class is used, which includes plugin registered services as well - client_manager = clients.Clients - - def skip_if_microversion_not_supported(self, microversion): - if not utils.is_microversion_supported(microversion): - raise self.skipException( - "Microversion '%s' is not supported." % microversion) - - def skip_if_microversion_lt(self, microversion): - if utils.is_microversion_lt(CONF.share.max_api_microversion, - microversion): - raise self.skipException( - "Microversion must be greater than or equal to '%s'." % - microversion) - - @classmethod - def _get_dynamic_creds(cls, name, network_resources=None): - identity_version = CONF.identity.auth_version - if identity_version == 'v3': - identity_uri = CONF.identity.uri_v3 - identity_admin_endpoint_type = CONF.identity.v3_endpoint_type - elif identity_version == 'v2': - identity_uri = CONF.identity.uri - identity_admin_endpoint_type = CONF.identity.v2_admin_endpoint_type - - return dynamic_creds.DynamicCredentialProvider( - identity_version=identity_version, - name=name, - network_resources=network_resources, - credentials_domain=CONF.auth.default_credentials_domain_name, - admin_role=CONF.identity.admin_role, - admin_creds=common_creds.get_configured_admin_credentials(), - identity_admin_domain_scope=CONF.identity.admin_domain_scope, - identity_admin_role=CONF.identity.admin_role, - extra_roles=None, - neutron_available=CONF.service_available.neutron, - create_networks=( - CONF.share.create_networks_when_multitenancy_enabled), - project_network_cidr=CONF.network.project_network_cidr, - project_network_mask_bits=CONF.network.project_network_mask_bits, - public_network_id=CONF.network.public_network_id, - resource_prefix='tempest', - identity_admin_endpoint_type=identity_admin_endpoint_type, - identity_uri=identity_uri) - - @classmethod - def get_client_with_isolated_creds(cls, - name=None, - type_of_creds="admin", - cleanup_in_class=False, - client_version='1'): - """Creates isolated creds. - - :param name: name, will be used for naming ic and related stuff - :param type_of_creds: admin, alt or primary - :param cleanup_in_class: defines place where to delete - :returns: SharesClient -- shares client with isolated creds. - :returns: To client added dict attr 'creds' with - :returns: key elements 'tenant' and 'user'. - """ - if name is None: - # Get name of test method - name = inspect.stack()[1][3] - if len(name) > 32: - name = name[0:32] - - # Choose type of isolated creds - ic = cls._get_dynamic_creds(name) - if "admin" in type_of_creds: - creds = ic.get_admin_creds().credentials - elif "alt" in type_of_creds: - creds = ic.get_alt_creds().credentials - else: - creds = ic.get_credentials(type_of_creds).credentials - ic.type_of_creds = type_of_creds - - # create client with isolated creds - os = clients.Clients(creds) - if client_version == '1': - client = os.share_v1.SharesClient() - elif client_version == '2': - client = os.share_v2.SharesV2Client() - - # Set place where will be deleted isolated creds - ic_res = { - "method": ic.clear_creds, - "deleted": False, - } - if cleanup_in_class: - cls.class_isolated_creds.insert(0, ic_res) - else: - cls.method_isolated_creds.insert(0, ic_res) - - # Provide share network - if CONF.share.multitenancy_enabled: - if (not CONF.service_available.neutron and - CONF.share.create_networks_when_multitenancy_enabled): - raise cls.skipException("Neutron support is required") - nc = os.network.NetworksClient() - share_network_id = cls.provide_share_network(client, nc, ic) - client.share_network_id = share_network_id - resource = { - "type": "share_network", - "id": client.share_network_id, - "client": client, - } - if cleanup_in_class: - cls.class_resources.insert(0, resource) - else: - cls.method_resources.insert(0, resource) - return client - - @classmethod - def skip_checks(cls): - super(BaseSharesTest, cls).skip_checks() - if not CONF.service_available.manila: - raise cls.skipException("Manila support is required") - - @classmethod - def verify_nonempty(cls, *args): - if not all(args): - msg = "Missing API credentials in configuration." - raise cls.skipException(msg) - - @classmethod - def setup_clients(cls): - super(BaseSharesTest, cls).setup_clients() - os = getattr(cls, 'os_%s' % cls.credentials[0]) - # Initialise share clients for test credentials - cls.shares_client = os.share_v1.SharesClient() - cls.shares_v2_client = os.share_v2.SharesV2Client() - # Initialise network clients for test credentials - if CONF.service_available.neutron: - cls.networks_client = os.network.NetworksClient() - cls.subnets_client = os.network.SubnetsClient() - else: - cls.networks_client = None - cls.subnets_client = None - - if CONF.identity.auth_version == 'v3': - project_id = os.auth_provider.auth_data[1]['project']['id'] - else: - project_id = os.auth_provider.auth_data[1]['token']['tenant']['id'] - cls.tenant_id = project_id - cls.user_id = os.auth_provider.auth_data[1]['user']['id'] - - if CONF.share.multitenancy_enabled: - if (not CONF.service_available.neutron and - CONF.share.create_networks_when_multitenancy_enabled): - raise cls.skipException("Neutron support is required") - share_network_id = cls.provide_share_network( - cls.shares_v2_client, cls.networks_client) - cls.shares_client.share_network_id = share_network_id - cls.shares_v2_client.share_network_id = share_network_id - - @classmethod - def resource_setup(cls): - if not (any(p in CONF.share.enable_protocols - for p in cls.protocols) and - CONF.service_available.manila): - skip_msg = "Manila is disabled" - raise cls.skipException(skip_msg) - super(BaseSharesTest, cls).resource_setup() - - def setUp(self): - super(BaseSharesTest, self).setUp() - self.addCleanup(self.clear_isolated_creds) - self.addCleanup(self.clear_resources) - verify_test_has_appropriate_tags(self) - - @classmethod - def resource_cleanup(cls): - cls.clear_resources(cls.class_resources) - cls.clear_isolated_creds(cls.class_isolated_creds) - super(BaseSharesTest, cls).resource_cleanup() - - @classmethod - @network_synchronized - def provide_share_network(cls, shares_client, networks_client, - isolated_creds_client=None, - ignore_multitenancy_config=False): - """Used for finding/creating share network for multitenant driver. - - This method creates/gets entity share-network for one tenant. This - share-network will be used for creation of service vm. - - :param shares_client: shares client, which requires share-network - :param networks_client: network client from same tenant as shares - :param isolated_creds_client: DynamicCredentialProvider instance - If provided, then its networking will be used if needed. - If not provided, then common network will be used if needed. - :param ignore_multitenancy_config: provide a share network regardless - of 'multitenancy_enabled' configuration value. - :returns: str -- share network id for shares_client tenant - :returns: None -- if single-tenant driver used - """ - - sc = shares_client - search_word = "reusable" - sn_name = "autogenerated_by_tempest_%s" % search_word - - if (not ignore_multitenancy_config and - not CONF.share.multitenancy_enabled): - # Assumed usage of a single-tenant driver - share_network_id = None - else: - if sc.share_network_id: - # Share-network already exists, use it - share_network_id = sc.share_network_id - elif not CONF.share.create_networks_when_multitenancy_enabled: - share_network_id = None - - # Try get suitable share-network - share_networks = sc.list_share_networks_with_detail() - for sn in share_networks: - if (sn["neutron_net_id"] is None and - sn["neutron_subnet_id"] is None and - sn["name"] and search_word in sn["name"]): - share_network_id = sn["id"] - break - - # Create new share-network if one was not found - if share_network_id is None: - sn_desc = "This share-network was created by tempest" - sn = sc.create_share_network(name=sn_name, - description=sn_desc) - share_network_id = sn["id"] - else: - net_id = subnet_id = share_network_id = None - - if not isolated_creds_client: - # Search for networks, created in previous runs - service_net_name = "share-service" - networks = networks_client.list_networks() - if "networks" in networks.keys(): - networks = networks["networks"] - for network in networks: - if (service_net_name in network["name"] and - sc.tenant_id == network['tenant_id']): - net_id = network["id"] - if len(network["subnets"]) > 0: - subnet_id = network["subnets"][0] - break - - # Create suitable network - if net_id is None or subnet_id is None: - ic = cls._get_dynamic_creds(service_net_name) - net_data = ic._create_network_resources(sc.tenant_id) - network, subnet, router = net_data - net_id = network["id"] - subnet_id = subnet["id"] - - # Try get suitable share-network - share_networks = sc.list_share_networks_with_detail() - for sn in share_networks: - if (net_id == sn["neutron_net_id"] and - subnet_id == sn["neutron_subnet_id"] and - sn["name"] and search_word in sn["name"]): - share_network_id = sn["id"] - break - else: - sn_name = "autogenerated_by_tempest_for_isolated_creds" - # Use precreated network and subnet from isolated creds - net_id = isolated_creds_client.get_credentials( - isolated_creds_client.type_of_creds).network['id'] - subnet_id = isolated_creds_client.get_credentials( - isolated_creds_client.type_of_creds).subnet['id'] - - # Create suitable share-network - if share_network_id is None: - sn_desc = "This share-network was created by tempest" - sn = sc.create_share_network(name=sn_name, - description=sn_desc, - neutron_net_id=net_id, - neutron_subnet_id=subnet_id) - share_network_id = sn["id"] - - return share_network_id - - @classmethod - def _create_share(cls, share_protocol=None, size=None, name=None, - snapshot_id=None, description=None, metadata=None, - share_network_id=None, share_type_id=None, - share_group_id=None, client=None, - cleanup_in_class=True, is_public=False, **kwargs): - client = client or cls.shares_v2_client - description = description or "Tempest's share" - share_network_id = (share_network_id or - CONF.share.share_network_id or - client.share_network_id or None) - metadata = metadata or {} - size = size or CONF.share.share_size - kwargs.update({ - 'share_protocol': share_protocol, - 'size': size, - 'name': name, - 'snapshot_id': snapshot_id, - 'description': description, - 'metadata': metadata, - 'share_network_id': share_network_id, - 'share_type_id': share_type_id, - 'is_public': is_public, - }) - if share_group_id: - kwargs['share_group_id'] = share_group_id - - share = client.create_share(**kwargs) - resource = {"type": "share", "id": share["id"], "client": client, - "share_group_id": share_group_id} - cleanup_list = (cls.class_resources if cleanup_in_class else - cls.method_resources) - cleanup_list.insert(0, resource) - return share - - @classmethod - def migrate_share( - cls, share_id, dest_host, wait_for_status, client=None, - force_host_assisted_migration=False, writable=False, - nondisruptive=False, preserve_metadata=False, - preserve_snapshots=False, new_share_network_id=None, - new_share_type_id=None, **kwargs): - client = client or cls.shares_v2_client - client.migrate_share( - share_id, dest_host, - force_host_assisted_migration=force_host_assisted_migration, - writable=writable, preserve_metadata=preserve_metadata, - nondisruptive=nondisruptive, preserve_snapshots=preserve_snapshots, - new_share_network_id=new_share_network_id, - new_share_type_id=new_share_type_id, **kwargs) - share = client.wait_for_migration_status( - share_id, dest_host, wait_for_status, **kwargs) - return share - - @classmethod - def migration_complete(cls, share_id, dest_host, client=None, **kwargs): - client = client or cls.shares_v2_client - client.migration_complete(share_id, **kwargs) - share = client.wait_for_migration_status( - share_id, dest_host, 'migration_success', **kwargs) - return share - - @classmethod - def migration_cancel(cls, share_id, dest_host, client=None, **kwargs): - client = client or cls.shares_v2_client - client.migration_cancel(share_id, **kwargs) - share = client.wait_for_migration_status( - share_id, dest_host, 'migration_cancelled', **kwargs) - return share - - @classmethod - def create_share(cls, *args, **kwargs): - """Create one share and wait for available state. Retry if allowed.""" - result = cls.create_shares([{"args": args, "kwargs": kwargs}]) - return result[0] - - @classmethod - def create_shares(cls, share_data_list): - """Creates several shares in parallel with retries. - - Use this method when you want to create more than one share at same - time. Especially if config option 'share.share_creation_retry_number' - has value more than zero (0). - All shares will be expected to have 'available' status with or without - recreation else error will be raised. - - :param share_data_list: list -- list of dictionaries with 'args' and - 'kwargs' for '_create_share' method of this base class. - example of data: - share_data_list=[{'args': ['quuz'], 'kwargs': {'foo': 'bar'}}}] - :returns: list -- list of shares created using provided data. - """ - - for d in share_data_list: - if not isinstance(d, dict): - raise exceptions.TempestException( - "Expected 'dict', got '%s'" % type(d)) - if "args" not in d: - d["args"] = [] - if "kwargs" not in d: - d["kwargs"] = {} - if len(d) > 2: - raise exceptions.TempestException( - "Expected only 'args' and 'kwargs' keys. " - "Provided %s" % list(d)) - - data = [] - for d in share_data_list: - client = d["kwargs"].pop("client", cls.shares_v2_client) - wait_for_status = d["kwargs"].pop("wait_for_status", True) - local_d = { - "args": d["args"], - "kwargs": copy.deepcopy(d["kwargs"]), - } - local_d["kwargs"]["client"] = client - local_d["share"] = cls._create_share( - *local_d["args"], **local_d["kwargs"]) - local_d["cnt"] = 0 - local_d["available"] = False - local_d["wait_for_status"] = wait_for_status - data.append(local_d) - - while not all(d["available"] for d in data): - for d in data: - if not d["wait_for_status"]: - d["available"] = True - if d["available"]: - continue - client = d["kwargs"]["client"] - share_id = d["share"]["id"] - try: - client.wait_for_share_status(share_id, "available") - d["available"] = True - except (share_exceptions.ShareBuildErrorException, - exceptions.TimeoutException) as e: - if CONF.share.share_creation_retry_number > d["cnt"]: - d["cnt"] += 1 - msg = ("Share '%s' failed to be built. " - "Trying create another." % share_id) - LOG.error(msg) - LOG.error(e) - cg_id = d["kwargs"].get("consistency_group_id") - if cg_id: - # NOTE(vponomaryov): delete errored share - # immediately in case share is part of CG. - client.delete_share( - share_id, - params={"consistency_group_id": cg_id}) - client.wait_for_resource_deletion( - share_id=share_id) - d["share"] = cls._create_share( - *d["args"], **d["kwargs"]) - else: - raise - - return [d["share"] for d in data] - - @classmethod - def create_share_group(cls, client=None, cleanup_in_class=True, - share_network_id=None, **kwargs): - client = client or cls.shares_v2_client - if kwargs.get('source_share_group_snapshot_id') is None: - kwargs['share_network_id'] = (share_network_id or - client.share_network_id or None) - share_group = client.create_share_group(**kwargs) - resource = { - "type": "share_group", - "id": share_group["id"], - "client": client, - } - if cleanup_in_class: - cls.class_resources.insert(0, resource) - else: - cls.method_resources.insert(0, resource) - - if kwargs.get('source_share_group_snapshot_id'): - new_share_group_shares = client.list_shares( - detailed=True, - params={'share_group_id': share_group['id']}, - experimental=True) - - for share in new_share_group_shares: - resource = {"type": "share", - "id": share["id"], - "client": client, - "share_group_id": share.get("share_group_id")} - if cleanup_in_class: - cls.class_resources.insert(0, resource) - else: - cls.method_resources.insert(0, resource) - - client.wait_for_share_group_status(share_group['id'], 'available') - return share_group - - @classmethod - def create_share_group_type(cls, name=None, share_types=(), is_public=None, - group_specs=None, client=None, - cleanup_in_class=True, **kwargs): - client = client or cls.shares_v2_client - if (group_specs is None and - CONF.share.capability_sg_consistent_snapshot_support): - group_specs = { - 'consistent_snapshot_support': ( - CONF.share.capability_sg_consistent_snapshot_support), - } - share_group_type = client.create_share_group_type( - name=name, - share_types=share_types, - is_public=is_public, - group_specs=group_specs, - **kwargs) - resource = { - "type": "share_group_type", - "id": share_group_type["id"], - "client": client, - } - if cleanup_in_class: - cls.class_resources.insert(0, resource) - else: - cls.method_resources.insert(0, resource) - return share_group_type - - @classmethod - def create_snapshot_wait_for_active(cls, share_id, name=None, - description=None, force=False, - client=None, cleanup_in_class=True): - if client is None: - client = cls.shares_v2_client - if description is None: - description = "Tempest's snapshot" - snapshot = client.create_snapshot(share_id, name, description, force) - resource = { - "type": "snapshot", - "id": snapshot["id"], - "client": client, - } - if cleanup_in_class: - cls.class_resources.insert(0, resource) - else: - cls.method_resources.insert(0, resource) - client.wait_for_snapshot_status(snapshot["id"], "available") - return snapshot - - @classmethod - def create_share_group_snapshot_wait_for_active( - cls, share_group_id, name=None, description=None, client=None, - cleanup_in_class=True, **kwargs): - client = client or cls.shares_v2_client - if description is None: - description = "Tempest's share group snapshot" - sg_snapshot = client.create_share_group_snapshot( - share_group_id, name=name, description=description, **kwargs) - resource = { - "type": "share_group_snapshot", - "id": sg_snapshot["id"], - "client": client, - } - if cleanup_in_class: - cls.class_resources.insert(0, resource) - else: - cls.method_resources.insert(0, resource) - client.wait_for_share_group_snapshot_status( - sg_snapshot["id"], "available") - return sg_snapshot - - @classmethod - def get_availability_zones(cls, client=None): - """List the availability zones for "manila-share" services - - that are currently in "up" state. - """ - client = client or cls.shares_v2_client - cls.services = client.list_services() - zones = [service['zone'] for service in cls.services if - service['binary'] == "manila-share" and - service['state'] == 'up'] - return zones - - def get_pools_for_replication_domain(self): - # Get the list of pools for the replication domain - pools = self.admin_client.list_pools(detail=True)['pools'] - instance_host = self.admin_client.get_share( - self.shares[0]['id'])['host'] - host_pool = [p for p in pools if p['name'] == instance_host][0] - rep_domain = host_pool['capabilities']['replication_domain'] - pools_in_rep_domain = [p for p in pools if p['capabilities'][ - 'replication_domain'] == rep_domain] - return rep_domain, pools_in_rep_domain - - @classmethod - def create_share_replica(cls, share_id, availability_zone, client=None, - cleanup_in_class=False, cleanup=True): - client = client or cls.shares_v2_client - replica = client.create_share_replica(share_id, availability_zone) - resource = { - "type": "share_replica", - "id": replica["id"], - "client": client, - "share_id": share_id, - } - # NOTE(Yogi1): Cleanup needs to be disabled during promotion tests. - if cleanup: - if cleanup_in_class: - cls.class_resources.insert(0, resource) - else: - cls.method_resources.insert(0, resource) - client.wait_for_share_replica_status( - replica["id"], constants.STATUS_AVAILABLE) - return replica - - @classmethod - def delete_share_replica(cls, replica_id, client=None): - client = client or cls.shares_v2_client - try: - client.delete_share_replica(replica_id) - client.wait_for_resource_deletion(replica_id=replica_id) - except exceptions.NotFound: - pass - - @classmethod - def promote_share_replica(cls, replica_id, client=None): - client = client or cls.shares_v2_client - replica = client.promote_share_replica(replica_id) - client.wait_for_share_replica_status( - replica["id"], - constants.REPLICATION_STATE_ACTIVE, - status_attr="replica_state") - return replica - - def _get_access_rule_data_from_config(self): - """Get the first available access type/to combination from config. - - This method opportunistically picks the first configured protocol - to create the share. Do not use this method in tests where you need - to test depth and breadth in the access types and access recipients. - """ - protocol = self.shares_v2_client.share_protocol - - if protocol in CONF.share.enable_ip_rules_for_protocols: - access_type = "ip" - access_to = utils.rand_ip() - elif protocol in CONF.share.enable_user_rules_for_protocols: - access_type = "user" - access_to = CONF.share.username_for_user_rules - elif protocol in CONF.share.enable_cert_rules_for_protocols: - access_type = "cert" - access_to = "client3.com" - elif protocol in CONF.share.enable_cephx_rules_for_protocols: - access_type = "cephx" - access_to = "eve" - else: - message = "Unrecognized protocol and access rules configuration." - raise self.skipException(message) - - return access_type, access_to - - @classmethod - def create_share_network(cls, client=None, - cleanup_in_class=False, **kwargs): - if client is None: - client = cls.shares_client - share_network = client.create_share_network(**kwargs) - resource = { - "type": "share_network", - "id": share_network["id"], - "client": client, - } - if cleanup_in_class: - cls.class_resources.insert(0, resource) - else: - cls.method_resources.insert(0, resource) - return share_network - - @classmethod - def create_security_service(cls, ss_type="ldap", client=None, - cleanup_in_class=False, **kwargs): - if client is None: - client = cls.shares_client - security_service = client.create_security_service(ss_type, **kwargs) - resource = { - "type": "security_service", - "id": security_service["id"], - "client": client, - } - if cleanup_in_class: - cls.class_resources.insert(0, resource) - else: - cls.method_resources.insert(0, resource) - return security_service - - @classmethod - def create_share_type(cls, name, is_public=True, client=None, - cleanup_in_class=True, **kwargs): - if client is None: - client = cls.shares_v2_client - share_type = client.create_share_type(name, is_public, **kwargs) - resource = { - "type": "share_type", - "id": share_type["share_type"]["id"], - "client": client, - } - if cleanup_in_class: - cls.class_resources.insert(0, resource) - else: - cls.method_resources.insert(0, resource) - return share_type - - @staticmethod - def add_extra_specs_to_dict(extra_specs=None): - """Add any required extra-specs to share type dictionary""" - dhss = six.text_type(CONF.share.multitenancy_enabled) - snapshot_support = six.text_type( - CONF.share.capability_snapshot_support) - create_from_snapshot_support = six.text_type( - CONF.share.capability_create_share_from_snapshot_support) - - extra_specs_dict = { - "driver_handles_share_servers": dhss, - } - - optional = { - "snapshot_support": snapshot_support, - "create_share_from_snapshot_support": create_from_snapshot_support, - } - # NOTE(gouthamr): In micro-versions < 2.24, snapshot_support is a - # required extra-spec - extra_specs_dict.update(optional) - - if extra_specs: - extra_specs_dict.update(extra_specs) - - return extra_specs_dict - - @classmethod - def clear_isolated_creds(cls, creds=None): - if creds is None: - creds = cls.method_isolated_creds - for ic in creds: - if "deleted" not in ic.keys(): - ic["deleted"] = False - if not ic["deleted"]: - with handle_cleanup_exceptions(): - ic["method"]() - ic["deleted"] = True - - @classmethod - def clear_share_replicas(cls, share_id, client=None): - client = client or cls.shares_v2_client - share_replicas = client.list_share_replicas( - share_id=share_id) - - for replica in share_replicas: - try: - cls.delete_share_replica(replica['id']) - except exceptions.BadRequest: - # Ignore the exception due to deletion of last active replica - pass - - @classmethod - def clear_resources(cls, resources=None): - """Deletes resources, that were created in test suites. - - This method tries to remove resources from resource list, - if it is not found, assumed it was deleted in test itself. - It is expected, that all resources were added as LIFO - due to restriction of deletion resources, that is in the chain. - - :param resources: dict with keys 'type','id','client' and 'deleted' - """ - if resources is None: - resources = cls.method_resources - for res in resources: - if "deleted" not in res.keys(): - res["deleted"] = False - if "client" not in res.keys(): - res["client"] = cls.shares_client - if not(res["deleted"]): - res_id = res['id'] - client = res["client"] - with handle_cleanup_exceptions(): - if res["type"] is "share": - cls.clear_share_replicas(res_id) - share_group_id = res.get('share_group_id') - if share_group_id: - params = {'share_group_id': share_group_id} - client.delete_share(res_id, params=params) - else: - client.delete_share(res_id) - client.wait_for_resource_deletion(share_id=res_id) - elif res["type"] is "snapshot": - client.delete_snapshot(res_id) - client.wait_for_resource_deletion(snapshot_id=res_id) - elif (res["type"] is "share_network" and - res_id != CONF.share.share_network_id): - client.delete_share_network(res_id) - client.wait_for_resource_deletion(sn_id=res_id) - elif res["type"] is "security_service": - client.delete_security_service(res_id) - client.wait_for_resource_deletion(ss_id=res_id) - elif res["type"] is "share_type": - client.delete_share_type(res_id) - client.wait_for_resource_deletion(st_id=res_id) - elif res["type"] is "share_group": - client.delete_share_group(res_id) - client.wait_for_resource_deletion( - share_group_id=res_id) - elif res["type"] is "share_group_type": - client.delete_share_group_type(res_id) - client.wait_for_resource_deletion( - share_group_type_id=res_id) - elif res["type"] is "share_group_snapshot": - client.delete_share_group_snapshot(res_id) - client.wait_for_resource_deletion( - share_group_snapshot_id=res_id) - elif res["type"] is "share_replica": - client.delete_share_replica(res_id) - client.wait_for_resource_deletion(replica_id=res_id) - else: - LOG.warning("Provided unsupported resource type for " - "cleanup '%s'. Skipping.", res["type"]) - res["deleted"] = True - - @classmethod - def generate_share_network_data(self): - data = { - "name": data_utils.rand_name("sn-name"), - "description": data_utils.rand_name("sn-desc"), - "neutron_net_id": data_utils.rand_name("net-id"), - "neutron_subnet_id": data_utils.rand_name("subnet-id"), - } - return data - - @classmethod - def generate_security_service_data(self): - data = { - "name": data_utils.rand_name("ss-name"), - "description": data_utils.rand_name("ss-desc"), - "dns_ip": utils.rand_ip(), - "server": utils.rand_ip(), - "domain": data_utils.rand_name("ss-domain"), - "user": data_utils.rand_name("ss-user"), - "password": data_utils.rand_name("ss-password"), - } - return data - - # Useful assertions - def assertDictMatch(self, d1, d2, approx_equal=False, tolerance=0.001): - """Assert two dicts are equivalent. - - This is a 'deep' match in the sense that it handles nested - dictionaries appropriately. - - NOTE: - - If you don't care (or don't know) a given value, you can specify - the string DONTCARE as the value. This will cause that dict-item - to be skipped. - - """ - def raise_assertion(msg): - d1str = str(d1) - d2str = str(d2) - base_msg = ('Dictionaries do not match. %(msg)s d1: %(d1str)s ' - 'd2: %(d2str)s' % - {"msg": msg, "d1str": d1str, "d2str": d2str}) - raise AssertionError(base_msg) - - d1keys = set(d1.keys()) - d2keys = set(d2.keys()) - if d1keys != d2keys: - d1only = d1keys - d2keys - d2only = d2keys - d1keys - raise_assertion('Keys in d1 and not d2: %(d1only)s. ' - 'Keys in d2 and not d1: %(d2only)s' % - {"d1only": d1only, "d2only": d2only}) - - for key in d1keys: - d1value = d1[key] - d2value = d2[key] - try: - error = abs(float(d1value) - float(d2value)) - within_tolerance = error <= tolerance - except (ValueError, TypeError): - # If both values aren't convertible to float, just ignore - # ValueError if arg is a str, TypeError if it's something else - # (like None) - within_tolerance = False - - if hasattr(d1value, 'keys') and hasattr(d2value, 'keys'): - self.assertDictMatch(d1value, d2value) - elif 'DONTCARE' in (d1value, d2value): - continue - elif approx_equal and within_tolerance: - continue - elif d1value != d2value: - raise_assertion("d1['%(key)s']=%(d1value)s != " - "d2['%(key)s']=%(d2value)s" % - { - "key": key, - "d1value": d1value, - "d2value": d2value - }) - - def create_user_message(self): - """Trigger a 'no valid host' situation to generate a message.""" - extra_specs = { - 'vendor_name': 'foobar', - 'driver_handles_share_servers': CONF.share.multitenancy_enabled, - } - share_type_name = data_utils.rand_name("share-type") - - bogus_type = self.create_share_type( - name=share_type_name, - extra_specs=extra_specs)['share_type'] - - params = {'share_type_id': bogus_type['id'], - 'share_network_id': self.shares_v2_client.share_network_id} - share = self.shares_v2_client.create_share(**params) - self.addCleanup(self.shares_v2_client.delete_share, share['id']) - self.shares_v2_client.wait_for_share_status(share['id'], "error") - return self.shares_v2_client.wait_for_message(share['id']) - - -class BaseSharesAltTest(BaseSharesTest): - """Base test case class for all Shares Alt API tests.""" - credentials = ('alt', ) - - -class BaseSharesAdminTest(BaseSharesTest): - """Base test case class for all Shares Admin API tests.""" - credentials = ('admin', ) - - -class BaseSharesMixedTest(BaseSharesTest): - """Base test case class for all Shares API tests with all user roles.""" - credentials = ('primary', 'alt', 'admin') - - @classmethod - def setup_clients(cls): - super(BaseSharesMixedTest, cls).setup_clients() - # Initialise share clients - cls.admin_shares_client = cls.os_admin.share_v1.SharesClient() - cls.admin_shares_v2_client = cls.os_admin.share_v2.SharesV2Client() - cls.alt_shares_client = cls.os_alt.share_v1.SharesClient() - cls.alt_shares_v2_client = cls.os_alt.share_v2.SharesV2Client() - # Initialise network clients - cls.os_admin.networks_client = cls.os_admin.network.NetworksClient() - cls.os_alt.networks_client = cls.os_alt.network.NetworksClient() - - if CONF.share.multitenancy_enabled: - admin_share_network_id = cls.provide_share_network( - cls.admin_shares_v2_client, cls.os_admin.networks_client) - cls.admin_shares_client.share_network_id = admin_share_network_id - cls.admin_shares_v2_client.share_network_id = ( - admin_share_network_id) - - alt_share_network_id = cls.provide_share_network( - cls.alt_shares_v2_client, cls.os_alt.networks_client) - cls.alt_shares_client.share_network_id = alt_share_network_id - cls.alt_shares_v2_client.share_network_id = alt_share_network_id diff --git a/manila_tempest_tests/tests/api/test_availability_zones.py b/manila_tempest_tests/tests/api/test_availability_zones.py deleted file mode 100644 index 0d870369a9..0000000000 --- a/manila_tempest_tests/tests/api/test_availability_zones.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2015 mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -class AvailabilityZonesTest(base.BaseSharesTest): - - def _list_availability_zones_assertions(self, availability_zones): - self.assertGreater(len(availability_zones), 0) - keys = ("created_at", "updated_at", "name", "id") - for az in availability_zones: - self.assertEqual(len(keys), len(az)) - for key in keys: - self.assertIn(key, az) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_list_availability_zones_legacy_url_api_v1(self): - # NOTE(vponomaryov): remove this test with removal of availability zone - # extension url support. - azs = self.shares_client.list_availability_zones() - self._list_availability_zones_assertions(azs) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @base.skip_if_microversion_not_supported("2.6") - def test_list_availability_zones_legacy_url_api_v2(self): - # NOTE(vponomaryov): remove this test with removal of availability zone - # extension url support. - azs = self.shares_v2_client.list_availability_zones( - url='os-availability-zone', version='2.6') - self._list_availability_zones_assertions(azs) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @base.skip_if_microversion_not_supported("2.7") - def test_list_availability_zones(self): - azs = self.shares_v2_client.list_availability_zones(version='2.7') - self._list_availability_zones_assertions(azs) diff --git a/manila_tempest_tests/tests/api/test_availability_zones_negative.py b/manila_tempest_tests/tests/api/test_availability_zones_negative.py deleted file mode 100644 index 477e69c26b..0000000000 --- a/manila_tempest_tests/tests/api/test_availability_zones_negative.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2015 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest.lib import exceptions as lib_exc -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -@base.skip_if_microversion_not_supported("2.7") -class AvailabilityZonesNegativeTest(base.BaseSharesTest): - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_list_availability_zones_api_not_found_with_legacy_url(self): - # NOTE(vponomaryov): remove this test with removal of availability zone - # extension url support. - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.list_availability_zones, - url='os-availability-zone', - version='2.7', - ) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_list_availability_zones_api_not_found(self): - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.list_availability_zones, - url='availability-zones', - version='2.6', - ) diff --git a/manila_tempest_tests/tests/api/test_extensions.py b/manila_tempest_tests/tests/api/test_extensions.py deleted file mode 100644 index d60b86150e..0000000000 --- a/manila_tempest_tests/tests/api/test_extensions.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2014 mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -class ExtensionsTest(base.BaseSharesTest): - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_extensions(self): - - # get extensions - extensions = self.shares_client.list_extensions() - - # verify response - keys = ["alias", "updated", "name", "description"] - [self.assertIn(key, ext.keys()) for ext in extensions for key in keys] diff --git a/manila_tempest_tests/tests/api/test_limits.py b/manila_tempest_tests/tests/api/test_limits.py deleted file mode 100644 index 5f1f33d139..0000000000 --- a/manila_tempest_tests/tests/api/test_limits.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -class ShareLimitsTest(base.BaseSharesTest): - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_limits_keys(self): - - # list limits - limits = self.shares_client.get_limits() - - # verify response - keys = ["rate", "absolute"] - [self.assertIn(key, limits.keys()) for key in keys] - - abs_keys = [ - "maxTotalShareGigabytes", - "maxTotalShares", - "maxTotalShareSnapshots", - "maxTotalShareNetworks", - "maxTotalSnapshotGigabytes", - "totalSharesUsed", - "totalShareSnapshotsUsed", - "totalShareNetworksUsed", - "totalShareGigabytesUsed", - "totalSnapshotGigabytesUsed", - ] - [self.assertIn(key, limits["absolute"].keys()) for key in abs_keys] - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_limits_values(self): - - # list limits - limits = self.shares_client.get_limits() - - # verify integer values for absolute limits - abs_l = limits["absolute"] - self.assertGreater(int(abs_l["maxTotalShareGigabytes"]), -2) - self.assertGreater(int(abs_l["maxTotalShares"]), -2) - self.assertGreater(int(abs_l["maxTotalShareSnapshots"]), -2) - self.assertGreater(int(abs_l["maxTotalShareNetworks"]), -2) - self.assertGreater(int(abs_l["maxTotalSnapshotGigabytes"]), -2) - self.assertGreater(int(abs_l["totalSharesUsed"]), -2) - self.assertGreater(int(abs_l["totalShareSnapshotsUsed"]), -2) - self.assertGreater(int(abs_l["totalShareNetworksUsed"]), -2) - self.assertGreater(int(abs_l["totalShareGigabytesUsed"]), -2) - self.assertGreater(int(abs_l["totalSnapshotGigabytesUsed"]), -2) diff --git a/manila_tempest_tests/tests/api/test_metadata.py b/manila_tempest_tests/tests/api/test_metadata.py deleted file mode 100644 index a2a92cfbda..0000000000 --- a/manila_tempest_tests/tests/api/test_metadata.py +++ /dev/null @@ -1,163 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -class SharesMetadataTest(base.BaseSharesTest): - - @classmethod - def resource_setup(cls): - super(SharesMetadataTest, cls).resource_setup() - cls.share = cls.create_share() - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_set_metadata_in_share_creation(self): - - md = {u"key1": u"value1", u"key2": u"value2", } - - # create share with metadata - share = self.create_share(metadata=md, cleanup_in_class=False) - - # get metadata of share - metadata = self.shares_client.get_metadata(share["id"]) - - # verify metadata - self.assertEqual(md, metadata) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_set_get_delete_metadata(self): - - md = {u"key3": u"value3", u"key4": u"value4", } - - # create share - share = self.create_share(cleanup_in_class=False) - - # set metadata - self.shares_client.set_metadata(share["id"], md) - - # read metadata - get_md = self.shares_client.get_metadata(share["id"]) - - # verify metadata - self.assertEqual(md, get_md) - - # delete metadata - for key in md.keys(): - self.shares_client.delete_metadata(share["id"], key) - - # verify deletion of metadata - get_metadata = self.shares_client.get_metadata(share["id"]) - self.assertEqual({}, get_metadata) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_set_and_update_metadata_by_key(self): - - md1 = {u"key5": u"value5", u"key6": u"value6", } - md2 = {u"key7": u"value7", u"key8": u"value8", } - - # create share - share = self.create_share(cleanup_in_class=False) - - # set metadata - self.shares_client.set_metadata(share["id"], md1) - - # update metadata - self.shares_client.update_all_metadata(share["id"], md2) - - # get metadata - get_md = self.shares_client.get_metadata(share["id"]) - - # verify metadata - self.assertEqual(md2, get_md) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_set_metadata_min_size_key(self): - data = {"k": "value"} - - self.shares_client.set_metadata(self.share["id"], data) - - body_get = self.shares_client.get_metadata(self.share["id"]) - self.assertEqual(data['k'], body_get.get('k')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_set_metadata_max_size_key(self): - max_key = "k" * 255 - data = {max_key: "value"} - - self.shares_client.set_metadata(self.share["id"], data) - - body_get = self.shares_client.get_metadata(self.share["id"]) - self.assertIn(max_key, body_get) - self.assertEqual(data[max_key], body_get.get(max_key)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_set_metadata_min_size_value(self): - data = {"key": "v"} - - self.shares_client.set_metadata(self.share["id"], data) - - body_get = self.shares_client.get_metadata(self.share["id"]) - self.assertEqual(data['key'], body_get['key']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_set_metadata_max_size_value(self): - max_value = "v" * 1023 - data = {"key": max_value} - - self.shares_client.set_metadata(self.share["id"], data) - - body_get = self.shares_client.get_metadata(self.share["id"]) - self.assertEqual(data['key'], body_get['key']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_upd_metadata_min_size_key(self): - data = {"k": "value"} - - self.shares_client.update_all_metadata(self.share["id"], data) - - body_get = self.shares_client.get_metadata(self.share["id"]) - self.assertEqual(data, body_get) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_upd_metadata_max_size_key(self): - max_key = "k" * 255 - data = {max_key: "value"} - - self.shares_client.update_all_metadata(self.share["id"], data) - - body_get = self.shares_client.get_metadata(self.share["id"]) - self.assertEqual(data, body_get) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_upd_metadata_min_size_value(self): - data = {"key": "v"} - - self.shares_client.update_all_metadata(self.share["id"], data) - - body_get = self.shares_client.get_metadata(self.share["id"]) - self.assertEqual(data, body_get) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_upd_metadata_max_size_value(self): - max_value = "v" * 1023 - data = {"key": max_value} - - self.shares_client.update_all_metadata(self.share["id"], data) - - body_get = self.shares_client.get_metadata(self.share["id"]) - self.assertEqual(data, body_get) diff --git a/manila_tempest_tests/tests/api/test_metadata_negative.py b/manila_tempest_tests/tests/api/test_metadata_negative.py deleted file mode 100644 index a4ad2608fc..0000000000 --- a/manila_tempest_tests/tests/api/test_metadata_negative.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from tempest.lib import exceptions as lib_exc -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -@ddt.ddt -class SharesMetadataAPIOnlyNegativeTest(base.BaseSharesTest): - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @ddt.data(True, False) - def test_try_set_metadata_to_unexisting_share(self, is_v2_client): - md = {u"key1": u"value1", u"key2": u"value2", } - client = self.shares_v2_client if is_v2_client else self.shares_client - self.assertRaises(lib_exc.NotFound, - client.set_metadata, - "wrong_share_id", md) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @ddt.data(True, False) - def test_try_update_all_metadata_for_unexisting_share(self, is_v2_client): - md = {u"key1": u"value1", u"key2": u"value2", } - client = self.shares_v2_client if is_v2_client else self.shares_client - self.assertRaises(lib_exc.NotFound, - client.update_all_metadata, - "wrong_share_id", md) - - -class SharesMetadataNegativeTest(base.BaseSharesTest): - - @classmethod - def resource_setup(cls): - super(SharesMetadataNegativeTest, cls).resource_setup() - cls.share = cls.create_share() - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_set_metadata_with_empty_key(self): - self.assertRaises(lib_exc.BadRequest, - self.shares_client.set_metadata, - self.share["id"], {"": "value"}) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_upd_metadata_with_empty_key(self): - self.assertRaises(lib_exc.BadRequest, - self.shares_client.update_all_metadata, - self.share["id"], {"": "value"}) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_set_metadata_with_too_big_key(self): - too_big_key = "x" * 256 - md = {too_big_key: "value"} - self.assertRaises(lib_exc.BadRequest, - self.shares_client.set_metadata, - self.share["id"], md) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_upd_metadata_with_too_big_key(self): - too_big_key = "x" * 256 - md = {too_big_key: "value"} - self.assertRaises(lib_exc.BadRequest, - self.shares_client.update_all_metadata, - self.share["id"], md) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_set_metadata_with_too_big_value(self): - too_big_value = "x" * 1024 - md = {"key": too_big_value} - self.assertRaises(lib_exc.BadRequest, - self.shares_client.set_metadata, - self.share["id"], md) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_upd_metadata_with_too_big_value(self): - too_big_value = "x" * 1024 - md = {"key": too_big_value} - self.assertRaises(lib_exc.BadRequest, - self.shares_client.update_all_metadata, - self.share["id"], md) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_delete_unexisting_metadata(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.delete_metadata, - self.share["id"], "wrong_key") diff --git a/manila_tempest_tests/tests/api/test_microversions.py b/manila_tempest_tests/tests/api/test_microversions.py deleted file mode 100644 index ce259ac093..0000000000 --- a/manila_tempest_tests/tests/api/test_microversions.py +++ /dev/null @@ -1,172 +0,0 @@ -# Copyright 2015 Goutham Pacha Ravi -# Copyright 2015 Clinton Knight -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - -API_MICROVERSIONS_HEADER_LOWER = 'x-openstack-manila-api-version' -API_MICROVERSIONS_HEADER = 'X-OpenStack-Manila-API-Version' -_MIN_API_VERSION = CONF.share.min_api_microversion -_MAX_API_VERSION = CONF.share.max_api_microversion - - -class MicroversionsTest(base.BaseSharesTest): - """Request and validate REST API Microversions. - - Sends HTTP GET requests to the version API to validate microversions. - """ - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_microversions_root_version(self): - - resp, resp_body = self.shares_v2_client.send_microversion_request() - - self.assertEqual(300, resp.status) - - version_list = resp_body['versions'] - ids = [v['id'] for v in version_list] - self.assertEqual({'v1.0', 'v2.0'}, set(ids)) - - self.assertNotIn(API_MICROVERSIONS_HEADER_LOWER, resp) - self.assertNotIn('vary', resp) - - v1 = [v for v in version_list if v['id'] == 'v1.0'][0] - self.assertEqual('', v1.get('min_version')) - self.assertEqual('', v1.get('version')) - - v2 = [v for v in version_list if v['id'] == 'v2.0'][0] - self.assertEqual(_MIN_API_VERSION, v2.get('min_version')) - self.assertEqual(_MAX_API_VERSION, v2.get('version')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_microversions_v1_no_version(self): - - resp, resp_body = self.shares_v2_client.send_microversion_request( - script_name='v1') - - self.assertEqual(200, resp.status) - - version_list = resp_body['versions'] - ids = [v['id'] for v in version_list] - self.assertEqual({'v1.0'}, set(ids)) - - self.assertEqual('1.0', resp.get(API_MICROVERSIONS_HEADER_LOWER)) - self.assertEqual(API_MICROVERSIONS_HEADER, resp.get('vary')) - self.assertEqual('', version_list[0].get('min_version')) - self.assertEqual('', version_list[0].get('version')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_microversions_v1_with_version(self): - - resp, resp_body = self.shares_v2_client.send_microversion_request( - script_name='v1', version='5.0') - - self.assertEqual(200, resp.status) - - version_list = resp_body['versions'] - ids = [v['id'] for v in version_list] - self.assertEqual({'v1.0'}, set(ids)) - - self.assertEqual('1.0', resp.get(API_MICROVERSIONS_HEADER_LOWER)) - self.assertEqual(API_MICROVERSIONS_HEADER, resp.get('vary')) - self.assertEqual('', version_list[0].get('min_version')) - self.assertEqual('', version_list[0].get('version')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_microversions_v2_no_version(self): - - resp, resp_body = self.shares_v2_client.send_microversion_request( - script_name='v2') - - self.assertEqual(200, resp.status) - - version_list = resp_body['versions'] - ids = [v['id'] for v in version_list] - self.assertEqual({'v2.0'}, set(ids)) - - self.assertEqual(_MIN_API_VERSION, - resp.get(API_MICROVERSIONS_HEADER_LOWER)) - self.assertEqual(API_MICROVERSIONS_HEADER, resp.get('vary')) - self.assertEqual(_MIN_API_VERSION, version_list[0].get('min_version')) - self.assertEqual(_MAX_API_VERSION, version_list[0].get('version')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_microversions_v2_min_version(self): - - resp, resp_body = self.shares_v2_client.send_microversion_request( - script_name='v2', version=_MIN_API_VERSION) - - self.assertEqual(200, resp.status) - - version_list = resp_body['versions'] - ids = [v['id'] for v in version_list] - self.assertEqual({'v2.0'}, set(ids)) - - self.assertEqual(_MIN_API_VERSION, - resp.get(API_MICROVERSIONS_HEADER_LOWER)) - self.assertEqual(API_MICROVERSIONS_HEADER, resp.get('vary')) - self.assertEqual(_MIN_API_VERSION, version_list[0].get('min_version')) - self.assertEqual(_MAX_API_VERSION, version_list[0].get('version')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_microversions_v2_max_version(self): - - resp, resp_body = self.shares_v2_client.send_microversion_request( - script_name='v2', version=_MAX_API_VERSION) - - self.assertEqual(200, resp.status) - - version_list = resp_body['versions'] - ids = [v['id'] for v in version_list] - self.assertEqual({'v2.0'}, set(ids)) - - self.assertEqual(_MAX_API_VERSION, - resp.get(API_MICROVERSIONS_HEADER_LOWER)) - self.assertEqual(API_MICROVERSIONS_HEADER, resp.get('vary')) - self.assertEqual(_MIN_API_VERSION, version_list[0].get('min_version')) - self.assertEqual(_MAX_API_VERSION, version_list[0].get('version')) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_microversions_v2_invalid_version(self): - - resp, _ = self.shares_v2_client.send_microversion_request( - script_name='v2', version='1.2.1') - - self.assertEqual(400, resp.status) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_microversions_v2_unacceptable_version(self): - - # First get max version from the server - resp, resp_body = self.shares_v2_client.send_microversion_request( - script_name='v2') - - self.assertEqual(200, resp.status) - - version_list = resp_body['versions'] - latest_version = version_list[0].get('version') - major, minor = [int(ver) for ver in latest_version.split(".")] - next_version = ('%s.%s' % (major + 1, minor + 1)) - - # Request a version that is too high - resp, _ = self.shares_v2_client.send_microversion_request( - script_name='v2', version=next_version) - - self.assertEqual(406, resp.status) diff --git a/manila_tempest_tests/tests/api/test_quotas.py b/manila_tempest_tests/tests/api/test_quotas.py deleted file mode 100644 index 83c776f5e2..0000000000 --- a/manila_tempest_tests/tests/api/test_quotas.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -import itertools -from tempest import config -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@ddt.ddt -class SharesQuotasTest(base.BaseSharesTest): - - @classmethod - def resource_setup(cls): - if not CONF.share.run_quota_tests: - msg = "Quota tests are disabled." - raise cls.skipException(msg) - super(SharesQuotasTest, cls).resource_setup() - cls.user_id = cls.shares_v2_client.user_id or cls.user_id - cls.tenant_id = cls.shares_v2_client.tenant_id or cls.tenant_id - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @ddt.data('shares_client', 'shares_v2_client') - def test_default_quotas(self, client_name): - quotas = getattr(self, client_name).default_quotas(self.tenant_id) - self.assertGreater(int(quotas["gigabytes"]), -2) - self.assertGreater(int(quotas["snapshot_gigabytes"]), -2) - self.assertGreater(int(quotas["shares"]), -2) - self.assertGreater(int(quotas["snapshots"]), -2) - self.assertGreater(int(quotas["share_networks"]), -2) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @ddt.data('shares_client', 'shares_v2_client') - def test_show_quotas(self, client_name): - quotas = getattr(self, client_name).show_quotas(self.tenant_id) - self.assertGreater(int(quotas["gigabytes"]), -2) - self.assertGreater(int(quotas["snapshot_gigabytes"]), -2) - self.assertGreater(int(quotas["shares"]), -2) - self.assertGreater(int(quotas["snapshots"]), -2) - self.assertGreater(int(quotas["share_networks"]), -2) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @ddt.data('shares_client', 'shares_v2_client') - def test_show_quotas_for_user(self, client_name): - quotas = getattr(self, client_name).show_quotas( - self.tenant_id, self.user_id) - self.assertGreater(int(quotas["gigabytes"]), -2) - self.assertGreater(int(quotas["snapshot_gigabytes"]), -2) - self.assertGreater(int(quotas["shares"]), -2) - self.assertGreater(int(quotas["snapshots"]), -2) - self.assertGreater(int(quotas["share_networks"]), -2) - - @ddt.data( - *itertools.product(set(("2.25", CONF.share.max_api_microversion)), - (True, False)) - ) - @ddt.unpack - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @base.skip_if_microversion_not_supported("2.25") - def test_show_quotas_detail(self, microversion, with_user): - quota_args = {"tenant_id": self.tenant_id, "version": microversion, } - if with_user: - quota_args.update({"user_id": self.user_id}) - quotas = self.shares_v2_client.detail_quotas(**quota_args) - quota_keys = list(quotas.keys()) - for outer in ('gigabytes', 'snapshot_gigabytes', 'shares', - 'snapshots', 'share_networks'): - self.assertIn(outer, quota_keys) - outer_keys = list(quotas[outer].keys()) - for inner in ('in_use', 'limit', 'reserved'): - self.assertIn(inner, outer_keys) - self.assertGreater(int(quotas[outer][inner]), -2) diff --git a/manila_tempest_tests/tests/api/test_quotas_negative.py b/manila_tempest_tests/tests/api/test_quotas_negative.py deleted file mode 100644 index 2e850fb728..0000000000 --- a/manila_tempest_tests/tests/api/test_quotas_negative.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from tempest import config -from tempest.lib import exceptions as lib_exc -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@ddt.ddt -class SharesQuotasNegativeTest(base.BaseSharesTest): - - @classmethod - def resource_setup(cls): - if not CONF.share.run_quota_tests: - msg = "Quota tests are disabled." - raise cls.skipException(msg) - super(SharesQuotasNegativeTest, cls).resource_setup() - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_quotas_with_empty_tenant_id(self): - self.assertRaises(lib_exc.NotFound, - self.shares_v2_client.show_quotas, "") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_reset_quotas_with_user(self): - self.assertRaises(lib_exc.Forbidden, - self.shares_v2_client.reset_quotas, - self.shares_v2_client.tenant_id) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_update_quotas_with_user(self): - self.assertRaises(lib_exc.Forbidden, - self.shares_v2_client.update_quotas, - self.shares_v2_client.tenant_id, - shares=9) - - @ddt.data("2.6", "2.7", "2.24") - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_quotas_detail_with_wrong_version(self, microversion): - self.assertRaises(lib_exc.NotFound, - self.shares_v2_client.detail_quotas, - self.shares_v2_client.tenant_id, - version=microversion) diff --git a/manila_tempest_tests/tests/api/test_replication.py b/manila_tempest_tests/tests/api/test_replication.py deleted file mode 100644 index 589f47cfb3..0000000000 --- a/manila_tempest_tests/tests/api/test_replication.py +++ /dev/null @@ -1,406 +0,0 @@ -# Copyright 2015 Yogesh Kshirsagar -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib.common.utils import data_utils -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests import share_exceptions -from manila_tempest_tests.tests.api import base - -CONF = config.CONF -_MIN_SUPPORTED_MICROVERSION = '2.11' -SUMMARY_KEYS = ['share_id', 'id', 'replica_state', 'status'] -DETAIL_KEYS = SUMMARY_KEYS + ['availability_zone', 'updated_at', - 'share_network_id', 'created_at'] - - -@testtools.skipUnless(CONF.share.run_replication_tests, - 'Replication tests are disabled.') -@base.skip_if_microversion_lt(_MIN_SUPPORTED_MICROVERSION) -class ReplicationTest(base.BaseSharesMixedTest): - - @classmethod - def resource_setup(cls): - super(ReplicationTest, cls).resource_setup() - # Create share_type - name = data_utils.rand_name(constants.TEMPEST_MANILA_PREFIX) - cls.admin_client = cls.admin_shares_v2_client - cls.replication_type = CONF.share.backend_replication_type - - if cls.replication_type not in constants.REPLICATION_TYPE_CHOICES: - raise share_exceptions.ShareReplicationTypeException( - replication_type=cls.replication_type - ) - cls.zones = cls.get_availability_zones(client=cls.admin_client) - cls.share_zone = cls.zones[0] - cls.replica_zone = cls.zones[-1] - - cls.extra_specs = cls.add_extra_specs_to_dict( - {"replication_type": cls.replication_type}) - share_type = cls.create_share_type( - name, - extra_specs=cls.extra_specs, - client=cls.admin_client) - cls.share_type = share_type["share_type"] - # Create share with above share_type - cls.creation_data = {'kwargs': { - 'share_type_id': cls.share_type['id'], - 'availability_zone': cls.share_zone, - }} - - # Data for creating shares in parallel - data = [cls.creation_data, cls.creation_data] - cls.shares = cls.create_shares(data) - cls.shares = [cls.shares_v2_client.get_share(s['id']) for s in - cls.shares] - cls.instance_id1 = cls._get_instance(cls.shares[0]) - cls.instance_id2 = cls._get_instance(cls.shares[1]) - - @classmethod - def _get_instance(cls, share): - share_instances = cls.admin_client.get_instances_of_share(share["id"]) - return share_instances[0]["id"] - - def _verify_create_replica(self): - # Create the replica - share_replica = self.create_share_replica(self.shares[0]["id"], - self.replica_zone, - cleanup_in_class=False) - share_replicas = self.shares_v2_client.list_share_replicas( - share_id=self.shares[0]["id"]) - # Ensure replica is created successfully. - replica_ids = [replica["id"] for replica in share_replicas] - self.assertIn(share_replica["id"], replica_ids) - return share_replica - - def _verify_active_replica_count(self, share_id): - # List replicas - replica_list = self.shares_v2_client.list_share_replicas( - share_id=share_id) - - # Check if there is only 1 'active' replica before promotion. - active_replicas = self._filter_replica_list( - replica_list, constants.REPLICATION_STATE_ACTIVE) - self.assertEqual(1, len(active_replicas)) - - def _filter_replica_list(self, replica_list, r_state): - # Iterate through replica list to filter based on replica_state - return [replica for replica in replica_list - if replica['replica_state'] == r_state] - - def _verify_in_sync_replica_promotion(self, share, original_replica): - # Verify that 'in-sync' replica has been promoted successfully - - # NOTE(Yogi1): Cleanup needs to be disabled for replica that is - # being promoted since it will become the 'primary'/'active' replica. - replica = self.create_share_replica(share["id"], self.replica_zone, - cleanup=False) - # Wait for replica state to update after creation - self.shares_v2_client.wait_for_share_replica_status( - replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') - # Promote the first in_sync replica to active state - promoted_replica = self.promote_share_replica(replica['id']) - # Delete the demoted replica so promoted replica can be cleaned - # during the cleanup of the share. - self.addCleanup(self.delete_share_replica, original_replica['id']) - self._verify_active_replica_count(share["id"]) - # Verify the replica_state for promoted replica - promoted_replica = self.shares_v2_client.get_share_replica( - promoted_replica["id"]) - self.assertEqual(constants.REPLICATION_STATE_ACTIVE, - promoted_replica["replica_state"]) - - def _check_skip_promotion_tests(self): - # Check if the replication type is right for replica promotion tests - if (self.replication_type - not in constants.REPLICATION_PROMOTION_CHOICES): - msg = "Option backend_replication_type should be one of (%s)!" - raise self.skipException( - msg % ','.join(constants.REPLICATION_PROMOTION_CHOICES)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_add_delete_share_replica(self): - # Create the replica - share_replica = self._verify_create_replica() - - # Delete the replica - self.delete_share_replica(share_replica["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_add_access_rule_create_replica_delete_rule(self): - # Add access rule to the share - access_type, access_to = self._get_access_rule_data_from_config() - rule = self.shares_v2_client.create_access_rule( - self.shares[0]["id"], access_type, access_to, 'ro') - self.shares_v2_client.wait_for_access_rule_status( - self.shares[0]["id"], rule["id"], constants.RULE_STATE_ACTIVE) - - # Create the replica - self._verify_create_replica() - - # Verify access_rules_status transitions to 'active' state. - self.shares_v2_client.wait_for_share_status( - self.shares[0]["id"], constants.RULE_STATE_ACTIVE, - status_attr='access_rules_status') - - # Delete rule and wait for deletion - self.shares_v2_client.delete_access_rule(self.shares[0]["id"], - rule["id"]) - self.shares_v2_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.shares[0]['id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_create_replica_add_access_rule_delete_replica(self): - access_type, access_to = self._get_access_rule_data_from_config() - # Create the replica - share_replica = self._verify_create_replica() - - # Add access rule - self.shares_v2_client.create_access_rule( - self.shares[0]["id"], access_type, access_to, 'ro') - - self.shares_v2_client.wait_for_share_status( - self.shares[0]["id"], constants.RULE_STATE_ACTIVE, - status_attr='access_rules_status') - - # Delete the replica - self.delete_share_replica(share_replica["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipUnless(CONF.share.run_multiple_share_replicas_tests, - 'Multiple share replicas tests are disabled.') - def test_add_multiple_share_replicas(self): - rep_domain, pools = self.get_pools_for_replication_domain() - if len(pools) < 3: - msg = ("Replication domain %(domain)s has only %(count)s pools. " - "Need at least 3 pools to run this test." % - {"domain": rep_domain, "count": len(pools)}) - raise self.skipException(msg) - # Add the replicas - share_replica1 = self.create_share_replica(self.shares[0]["id"], - self.replica_zone, - cleanup_in_class=False) - share_replica2 = self.create_share_replica(self.shares[0]["id"], - self.replica_zone, - cleanup_in_class=False) - self.shares_v2_client.get_share_replica(share_replica2['id']) - - share_replicas = self.admin_client.list_share_replicas( - share_id=self.shares[0]["id"]) - replica_host_set = {r['host'] for r in share_replicas} - - # Assert that replicas are created on different pools. - msg = "More than one replica is created on the same pool." - self.assertEqual(3, len(replica_host_set), msg) - # Verify replicas are in the replica list - replica_ids = [replica["id"] for replica in share_replicas] - self.assertIn(share_replica1["id"], replica_ids) - self.assertIn(share_replica2["id"], replica_ids) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_promote_in_sync_share_replica(self): - # Test promote 'in_sync' share_replica to 'active' state - self._check_skip_promotion_tests() - share = self.create_shares([self.creation_data])[0] - original_replica = self.shares_v2_client.list_share_replicas( - share["id"])[0] - self._verify_in_sync_replica_promotion(share, original_replica) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_add_rule_promote_share_replica_verify_rule(self): - # Verify the access rule stays intact after share replica promotion - self._check_skip_promotion_tests() - - share = self.create_shares([self.creation_data])[0] - # Add access rule - access_type, access_to = self._get_access_rule_data_from_config() - rule = self.shares_v2_client.create_access_rule( - share["id"], access_type, access_to, 'ro') - self.shares_v2_client.wait_for_access_rule_status( - share["id"], rule["id"], constants.RULE_STATE_ACTIVE) - - original_replica = self.shares_v2_client.list_share_replicas( - share["id"])[0] - self._verify_in_sync_replica_promotion(share, original_replica) - - # verify rule's values - rules_list = self.shares_v2_client.list_access_rules(share["id"]) - self.assertEqual(1, len(rules_list)) - self.assertEqual(access_type, rules_list[0]["access_type"]) - self.assertEqual(access_to, rules_list[0]["access_to"]) - self.assertEqual('ro', rules_list[0]["access_level"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_promote_and_promote_back(self): - # Test promote back and forth between 2 share replicas - self._check_skip_promotion_tests() - - # Create a new share - share = self.create_shares([self.creation_data])[0] - - # Discover the original replica - initial_replicas = self.shares_v2_client.list_share_replicas( - share_id=share['id']) - self.assertEqual(1, len(initial_replicas), - '%s replicas initially created for share %s' % - (len(initial_replicas), share['id'])) - original_replica = initial_replicas[0] - - # Create a new replica - new_replica = self.create_share_replica(share["id"], - self.replica_zone, - cleanup_in_class=False) - self.shares_v2_client.wait_for_share_replica_status( - new_replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') - - # Promote the new replica to active and verify the replica states - self.promote_share_replica(new_replica['id']) - self._verify_active_replica_count(share["id"]) - self.shares_v2_client.wait_for_share_replica_status( - original_replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') - - # Promote the original replica back to active - self.promote_share_replica(original_replica['id']) - self._verify_active_replica_count(share["id"]) - self.shares_v2_client.wait_for_share_replica_status( - new_replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_active_replication_state(self): - # Verify the replica_state of first instance is set to active. - replica = self.shares_v2_client.get_share_replica(self.instance_id1) - self.assertEqual( - constants.REPLICATION_STATE_ACTIVE, replica['replica_state']) - - -@testtools.skipUnless(CONF.share.run_replication_tests, - 'Replication tests are disabled.') -@base.skip_if_microversion_lt(_MIN_SUPPORTED_MICROVERSION) -class ReplicationActionsTest(base.BaseSharesMixedTest): - - @classmethod - def resource_setup(cls): - super(ReplicationActionsTest, cls).resource_setup() - # Create share_type - name = data_utils.rand_name(constants.TEMPEST_MANILA_PREFIX) - cls.admin_client = cls.admin_shares_v2_client - cls.replication_type = CONF.share.backend_replication_type - - if cls.replication_type not in constants.REPLICATION_TYPE_CHOICES: - raise share_exceptions.ShareReplicationTypeException( - replication_type=cls.replication_type - ) - cls.zones = cls.get_availability_zones(client=cls.admin_client) - cls.share_zone = cls.zones[0] - cls.replica_zone = cls.zones[-1] - - cls.extra_specs = cls.add_extra_specs_to_dict( - {"replication_type": cls.replication_type}) - share_type = cls.create_share_type( - name, - extra_specs=cls.extra_specs, - client=cls.admin_client) - cls.share_type = share_type["share_type"] - # Create share with above share_type - cls.creation_data = {'kwargs': { - 'share_type_id': cls.share_type['id'], - 'availability_zone': cls.share_zone, - }} - - # Data for creating shares in parallel - data = [cls.creation_data, cls.creation_data] - cls.shares = cls.create_shares(data) - cls.shares = [cls.shares_v2_client.get_share(s['id']) for s in - cls.shares] - cls.instance_id1 = cls._get_instance(cls.shares[0]) - cls.instance_id2 = cls._get_instance(cls.shares[1]) - - # Create replicas to 2 shares - cls.replica1 = cls.create_share_replica(cls.shares[0]["id"], - cls.replica_zone, - cleanup_in_class=True) - cls.replica2 = cls.create_share_replica(cls.shares[1]["id"], - cls.replica_zone, - cleanup_in_class=True) - - @classmethod - def _get_instance(cls, share): - share_instances = cls.admin_client.get_instances_of_share(share["id"]) - return share_instances[0]["id"] - - def _validate_replica_list(self, replica_list, detail=True): - # Verify keys - if detail: - keys = DETAIL_KEYS - else: - keys = SUMMARY_KEYS - for replica in replica_list: - self.assertEqual(sorted(keys), sorted(replica.keys())) - # Check for duplicates - replica_id_list = [sr["id"] for sr in replica_list - if sr["id"] == replica["id"]] - msg = "Replica %s appears %s times in replica list." % ( - replica['id'], len(replica_id_list)) - self.assertEqual(1, len(replica_id_list), msg) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_show_share_replica(self): - replica = self.shares_v2_client.get_share_replica(self.replica1["id"]) - - actual_keys = sorted(list(replica.keys())) - detail_keys = sorted(DETAIL_KEYS) - self.assertEqual(detail_keys, actual_keys, - 'Share Replica %s has incorrect keys; ' - 'expected %s, got %s.' % (replica["id"], - detail_keys, actual_keys)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_detail_list_share_replicas_for_share(self): - # List replicas for share - replica_list = self.shares_v2_client.list_share_replicas( - share_id=self.shares[0]["id"]) - replica_ids_list = [rep['id'] for rep in replica_list] - self.assertIn(self.replica1['id'], replica_ids_list, - 'Replica %s was not returned in the list of replicas: %s' - % (self.replica1['id'], replica_list)) - # Verify keys - self._validate_replica_list(replica_list) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_detail_list_share_replicas_for_all_shares(self): - # List replicas for all available shares - replica_list = self.shares_v2_client.list_share_replicas() - replica_ids_list = [rep['id'] for rep in replica_list] - for replica in [self.replica1, self.replica2]: - self.assertIn(replica['id'], replica_ids_list, - 'Replica %s was not returned in the list of ' - 'replicas: %s' % (replica['id'], replica_list)) - # Verify keys - self._validate_replica_list(replica_list) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_summary_list_share_replicas_for_all_shares(self): - # List replicas - replica_list = self.shares_v2_client.list_share_replicas_summary() - - # Verify keys - self._validate_replica_list(replica_list, detail=False) diff --git a/manila_tempest_tests/tests/api/test_replication_negative.py b/manila_tempest_tests/tests/api/test_replication_negative.py deleted file mode 100644 index 21a5ef24cd..0000000000 --- a/manila_tempest_tests/tests/api/test_replication_negative.py +++ /dev/null @@ -1,215 +0,0 @@ -# Copyright 2015 Yogesh Kshirsagar -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib.common.utils import data_utils -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests import share_exceptions -from manila_tempest_tests.tests.api import base - -CONF = config.CONF -_MIN_SUPPORTED_MICROVERSION = '2.11' - - -@testtools.skipUnless(CONF.share.run_replication_tests, - 'Replication tests are disabled.') -@base.skip_if_microversion_lt(_MIN_SUPPORTED_MICROVERSION) -class ReplicationNegativeTest(base.BaseSharesMixedTest): - - @classmethod - def resource_setup(cls): - super(ReplicationNegativeTest, cls).resource_setup() - # Create share_type - name = data_utils.rand_name(constants.TEMPEST_MANILA_PREFIX) - cls.admin_client = cls.admin_shares_v2_client - cls.replication_type = CONF.share.backend_replication_type - - if cls.replication_type not in constants.REPLICATION_TYPE_CHOICES: - raise share_exceptions.ShareReplicationTypeException( - replication_type=cls.replication_type - ) - cls.zones = cls.get_availability_zones(client=cls.admin_client) - cls.share_zone = cls.zones[0] - cls.replica_zone = cls.zones[-1] - - cls.extra_specs = cls.add_extra_specs_to_dict( - {"replication_type": cls.replication_type}) - share_type = cls.create_share_type( - name, - extra_specs=cls.extra_specs, - client=cls.admin_client) - cls.share_type = share_type["share_type"] - # Create share with above share_type - cls.share1, cls.instance_id1 = cls._create_share_get_instance() - - @classmethod - def _create_share_get_instance(cls): - share = cls.create_share(share_type_id=cls.share_type["id"], - availability_zone=cls.share_zone,) - share_instances = cls.admin_client.get_instances_of_share( - share["id"], version=_MIN_SUPPORTED_MICROVERSION - ) - instance_id = share_instances[0]["id"] - return share, instance_id - - def _is_replication_type_promotable(self): - if (self.replication_type - not in constants.REPLICATION_PROMOTION_CHOICES): - msg = "Option backend_replication_type should be one of (%s)!" - raise self.skipException( - msg % ','.join(constants.REPLICATION_PROMOTION_CHOICES)) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_add_replica_to_share_with_no_replication_share_type(self): - # Create share without replication type - share_type = self.create_share_type( - data_utils.rand_name(constants.TEMPEST_MANILA_PREFIX), - extra_specs=self.add_extra_specs_to_dict(), - client=self.admin_client)["share_type"] - share = self.create_share(share_type_id=share_type["id"]) - self.assertRaises(lib_exc.BadRequest, - self.create_share_replica, - share['id'], - self.replica_zone) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_add_replica_to_share_with_error_state(self): - # Set "error" state - self.admin_client.reset_state( - self.share1['id'], constants.STATUS_ERROR) - self.addCleanup(self.admin_client.reset_state, - self.share1['id'], - constants.STATUS_AVAILABLE) - self.assertRaises(lib_exc.BadRequest, - self.create_share_replica, - self.share1['id'], - self.replica_zone) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_delete_last_active_replica(self): - self.assertRaises(lib_exc.BadRequest, - self.shares_v2_client.delete_share_replica, - self.instance_id1) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_try_delete_share_having_replica(self): - self.create_share_replica(self.share1["id"], self.replica_zone, - cleanup_in_class=False) - self.assertRaises(lib_exc.Conflict, - self.shares_v2_client.delete_share, - self.share1["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_promote_out_of_sync_share_replica(self): - # Test promoting an out_of_sync share_replica to active state - self._is_replication_type_promotable() - share, instance_id = self._create_share_get_instance() - replica = self.create_share_replica(share["id"], self.replica_zone, - cleanup_in_class=False) - # Set replica state to out of sync - self.admin_client.reset_share_replica_state( - replica['id'], constants.REPLICATION_STATE_OUT_OF_SYNC) - self.shares_v2_client.wait_for_share_replica_status( - replica['id'], constants.REPLICATION_STATE_OUT_OF_SYNC, - status_attr='replica_state') - # Try promoting the first out_of_sync replica to active state - self.assertRaises(lib_exc.Forbidden, - self.shares_v2_client.promote_share_replica, - replica['id']) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_promote_active_share_replica(self): - # Test promote active share_replica - self._is_replication_type_promotable() - - # Try promoting the active replica - self.shares_v2_client.promote_share_replica(self.instance_id1, - expected_status=200) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_promote_share_replica_for_writable_share_type(self): - # Test promote active share_replica for writable share - if self.replication_type != "writable": - raise self.skipException("Option backend_replication_type " - "should be writable!") - share, instance_id = self._create_share_get_instance() - replica = self.create_share_replica(share["id"], self.replica_zone, - cleanup_in_class=False) - # By default, 'writable' replica is expected to be in active state - self.shares_v2_client.wait_for_share_replica_status( - replica["id"], constants.REPLICATION_STATE_ACTIVE, - status_attr='replica_state') - - # Try promoting the replica - self.shares_v2_client.promote_share_replica(replica['id']) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_add_access_rule_share_replica_error_status(self): - access_type, access_to = self._get_access_rule_data_from_config() - # Create the replica - share_replica = self.create_share_replica(self.share1["id"], - self.replica_zone, - cleanup_in_class=False) - # Reset the replica status to error - self.admin_client.reset_share_replica_status( - share_replica['id'], constants.STATUS_ERROR) - - # Verify access rule cannot be added - self.assertRaises(lib_exc.BadRequest, - self.admin_client.create_access_rule, - self.share1["id"], access_type, access_to, 'ro') - - @testtools.skipUnless(CONF.share.run_host_assisted_migration_tests or - CONF.share.run_driver_assisted_migration_tests, - "Share migration tests are disabled.") - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.29") - def test_migration_of_replicated_share(self): - pools = self.admin_client.list_pools(detail=True)['pools'] - hosts = [p['name'] for p in pools] - self.create_share_replica(self.share1["id"], self.replica_zone, - cleanup_in_class=False) - share_host = self.admin_client.get_share(self.share1['id'])['host'] - - for host in hosts: - if host != share_host: - dest_host = host - break - - self.assertRaises( - lib_exc.Conflict, self.admin_client.migrate_share, - self.share1['id'], dest_host) - - -@testtools.skipUnless(CONF.share.run_replication_tests, - 'Replication tests are disabled.') -@base.skip_if_microversion_lt(_MIN_SUPPORTED_MICROVERSION) -class ReplicationAPIOnlyNegativeTest(base.BaseSharesTest): - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_replica_by_nonexistent_id(self): - self.assertRaises(lib_exc.NotFound, - self.shares_v2_client.get_share_replica, - data_utils.rand_uuid()) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_delete_replica_by_nonexistent_id(self): - self.assertRaises(lib_exc.NotFound, - self.shares_v2_client.delete_share_replica, - data_utils.rand_uuid()) diff --git a/manila_tempest_tests/tests/api/test_replication_snapshots.py b/manila_tempest_tests/tests/api/test_replication_snapshots.py deleted file mode 100644 index 331437cadb..0000000000 --- a/manila_tempest_tests/tests/api/test_replication_snapshots.py +++ /dev/null @@ -1,218 +0,0 @@ -# Copyright 2016 Yogesh Kshirsagar -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib.common.utils import data_utils -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests import share_exceptions -from manila_tempest_tests.tests.api import base - -CONF = config.CONF -_MIN_SUPPORTED_MICROVERSION = '2.11' - - -@testtools.skipUnless(CONF.share.run_replication_tests, - 'Replication tests are disabled.') -@testtools.skipUnless(CONF.share.run_snapshot_tests, - 'Snapshot tests disabled.') -@base.skip_if_microversion_lt(_MIN_SUPPORTED_MICROVERSION) -class ReplicationSnapshotTest(base.BaseSharesMixedTest): - - @classmethod - def resource_setup(cls): - super(ReplicationSnapshotTest, cls).resource_setup() - # Create share_type - name = data_utils.rand_name(constants.TEMPEST_MANILA_PREFIX) - cls.admin_client = cls.admin_shares_v2_client - cls.replication_type = CONF.share.backend_replication_type - - if cls.replication_type not in constants.REPLICATION_TYPE_CHOICES: - raise share_exceptions.ShareReplicationTypeException( - replication_type=cls.replication_type - ) - cls.zones = cls.get_availability_zones(client=cls.admin_client) - cls.share_zone = cls.zones[0] - cls.replica_zone = cls.zones[-1] - - cls.extra_specs = cls.add_extra_specs_to_dict( - {"replication_type": cls.replication_type}) - share_type = cls.create_share_type( - name, - extra_specs=cls.extra_specs, - client=cls.admin_client) - cls.share_type = share_type["share_type"] - # Create share with above share_type - cls.creation_data = {'kwargs': { - 'share_type_id': cls.share_type['id'], - 'availability_zone': cls.share_zone, - }} - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_snapshot_after_share_replica(self): - """Test the snapshot for replicated share. - - Create replica first and then create a snapshot. - Verify that the snapshot is properly created under replica by - creating a share from that snapshot. - """ - share = self.create_share(share_type_id=self.share_type['id'], - availability_zone=self.share_zone) - original_replica = self.shares_v2_client.list_share_replicas( - share["id"])[0] - - share_replica = self.create_share_replica(share["id"], - self.replica_zone, - cleanup=False) - self.addCleanup(self.delete_share_replica, original_replica['id']) - self.shares_v2_client.wait_for_share_replica_status( - share_replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') - - snapshot = self.create_snapshot_wait_for_active(share["id"]) - self.promote_share_replica(share_replica['id']) - self.delete_share_replica(original_replica['id']) - - snapshot = self.shares_v2_client.get_snapshot(snapshot['id']) - self.assertEqual(constants.STATUS_AVAILABLE, snapshot['status']) - - if CONF.share.capability_create_share_from_snapshot_support: - self.create_share(snapshot_id=snapshot['id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_snapshot_before_share_replica(self): - """Test the snapshot for replicated share. - - Create snapshot before creating share replica for the same - share. - Verify snapshot by creating share from the snapshot. - """ - share = self.create_share(share_type_id=self.share_type['id'], - availability_zone=self.share_zone) - snapshot = self.create_snapshot_wait_for_active(share["id"]) - - original_replica = self.shares_v2_client.list_share_replicas( - share["id"])[0] - share_replica = self.create_share_replica(share["id"], - self.replica_zone, - cleanup=False) - self.addCleanup(self.delete_share_replica, original_replica['id']) - self.shares_v2_client.wait_for_share_replica_status( - share_replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') - - # Wait for snapshot1 to become available - self.shares_v2_client.wait_for_snapshot_status( - snapshot['id'], "available") - - self.promote_share_replica(share_replica['id']) - self.delete_share_replica(original_replica['id']) - - snapshot = self.shares_v2_client.get_snapshot(snapshot['id']) - self.assertEqual(constants.STATUS_AVAILABLE, snapshot['status']) - - if CONF.share.capability_create_share_from_snapshot_support: - self.create_share(snapshot_id=snapshot['id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_snapshot_before_and_after_share_replica(self): - """Test the snapshot for replicated share. - - Verify that snapshot can be created before and after share replica - being created. - Verify snapshots by creating share from the snapshots. - """ - share = self.create_share(share_type_id=self.share_type['id'], - availability_zone=self.share_zone) - snapshot1 = self.create_snapshot_wait_for_active(share["id"]) - - original_replica = self.shares_v2_client.list_share_replicas( - share["id"])[0] - - share_replica = self.create_share_replica(share["id"], - self.replica_zone, - cleanup=False) - self.addCleanup(self.delete_share_replica, original_replica['id']) - self.shares_v2_client.wait_for_share_replica_status( - share_replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') - - snapshot2 = self.create_snapshot_wait_for_active(share["id"]) - - # Wait for snapshot1 to become available - self.shares_v2_client.wait_for_snapshot_status( - snapshot1['id'], "available") - - self.promote_share_replica(share_replica['id']) - # Remove the original active replica to ensure that snapshot is - # still being created successfully. - self.delete_share_replica(original_replica['id']) - - snapshot1 = self.shares_v2_client.get_snapshot(snapshot1['id']) - self.assertEqual(constants.STATUS_AVAILABLE, snapshot1['status']) - - snapshot2 = self.shares_v2_client.get_snapshot(snapshot2['id']) - self.assertEqual(constants.STATUS_AVAILABLE, snapshot2['status']) - - if CONF.share.capability_create_share_from_snapshot_support: - self.create_share(snapshot_id=snapshot1['id']) - self.create_share(snapshot_id=snapshot2['id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_delete_snapshot_after_adding_replica(self): - """Verify the snapshot delete. - - Ensure that deleting the original snapshot also deletes the - snapshot from replica. - """ - - share = self.create_share(share_type_id=self.share_type['id'], - availability_zone=self.share_zone) - share_replica = self.create_share_replica(share["id"], - self.replica_zone) - self.shares_v2_client.wait_for_share_replica_status( - share_replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') - snapshot = self.create_snapshot_wait_for_active(share["id"]) - self.shares_v2_client.delete_snapshot(snapshot['id']) - self.shares_v2_client.wait_for_resource_deletion( - snapshot_id=snapshot["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipUnless( - CONF.share.capability_create_share_from_snapshot_support, - "Create share from snapshot tests are disabled.") - def test_create_replica_from_snapshot_share(self): - """Test replica for a share that was created from snapshot.""" - - share = self.create_share(share_type_id=self.share_type['id'], - availability_zone=self.share_zone) - orig_snapshot = self.create_snapshot_wait_for_active(share["id"]) - snap_share = self.create_share(snapshot_id=orig_snapshot['id']) - original_replica = self.shares_v2_client.list_share_replicas( - snap_share["id"])[0] - share_replica = self.create_share_replica(snap_share["id"], - self.replica_zone, - cleanup=False) - self.addCleanup(self.delete_share_replica, original_replica['id']) - self.shares_v2_client.wait_for_share_replica_status( - share_replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') - self.promote_share_replica(share_replica['id']) - # Delete the demoted replica so promoted replica can be cleaned - # during the cleanup - self.delete_share_replica(original_replica['id']) diff --git a/manila_tempest_tests/tests/api/test_revert_to_snapshot.py b/manila_tempest_tests/tests/api/test_revert_to_snapshot.py deleted file mode 100644 index 156a396c62..0000000000 --- a/manila_tempest_tests/tests/api/test_revert_to_snapshot.py +++ /dev/null @@ -1,161 +0,0 @@ -# Copyright 2016 Andrew Kerr -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt - -from tempest import config -from tempest.lib.common.utils import data_utils -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests import share_exceptions -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@base.skip_if_microversion_not_supported( - constants.REVERT_TO_SNAPSHOT_MICROVERSION) -@ddt.ddt -class RevertToSnapshotTest(base.BaseSharesMixedTest): - - @classmethod - def skip_checks(cls): - super(RevertToSnapshotTest, cls).skip_checks() - if not CONF.share.run_revert_to_snapshot_tests: - msg = "Revert to snapshot tests are disabled." - raise cls.skipException(msg) - if not CONF.share.capability_snapshot_support: - msg = "Snapshot support is disabled." - raise cls.skipException(msg) - if not CONF.share.run_snapshot_tests: - msg = "Snapshot tests are disabled." - raise cls.skipException(msg) - - @classmethod - def resource_setup(cls): - super(RevertToSnapshotTest, cls).resource_setup() - cls.admin_client = cls.admin_shares_v2_client - pools = cls.admin_client.list_pools(detail=True)['pools'] - revert_support = [ - pool['capabilities'][constants.REVERT_TO_SNAPSHOT_SUPPORT] - for pool in pools] - if not any(revert_support): - msg = "Revert to snapshot not supported." - raise cls.skipException(msg) - - cls.share_type_name = data_utils.rand_name("share-type") - extra_specs = {constants.REVERT_TO_SNAPSHOT_SUPPORT: True} - cls.revert_enabled_extra_specs = cls.add_extra_specs_to_dict( - extra_specs=extra_specs) - - cls.share_type = cls.create_share_type( - cls.share_type_name, - extra_specs=cls.revert_enabled_extra_specs, - client=cls.admin_client) - - cls.st_id = cls.share_type['share_type']['id'] - - cls.share = cls.create_share(share_type_id=cls.st_id) - - if CONF.share.run_replication_tests: - # Create replicated share type - cls.replicated_share_type_name = data_utils.rand_name("share-type") - cls.replication_type = CONF.share.backend_replication_type - if cls.replication_type not in constants.REPLICATION_TYPE_CHOICES: - raise share_exceptions.ShareReplicationTypeException( - replication_type=cls.replication_type - ) - cls.zones = cls.get_availability_zones(client=cls.admin_client) - cls.share_zone = cls.zones[0] - cls.replica_zone = cls.zones[-1] - - extra_specs = cls.add_extra_specs_to_dict({ - "replication_type": cls.replication_type, - constants.REVERT_TO_SNAPSHOT_SUPPORT: True, - }) - share_type = cls.create_share_type( - cls.replicated_share_type_name, - extra_specs=extra_specs, - client=cls.admin_client) - cls.replicated_share_type = share_type["share_type"] - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @ddt.data( - *{constants.REVERT_TO_SNAPSHOT_MICROVERSION, - CONF.share.max_api_microversion} - ) - def test_revert_to_latest_snapshot(self, version): - snapshot = self.create_snapshot_wait_for_active(self.share['id'], - cleanup_in_class=False) - self.shares_v2_client.revert_to_snapshot( - self.share['id'], - snapshot['id'], - version=version) - self.shares_v2_client.wait_for_share_status(self.share['id'], - constants.STATUS_AVAILABLE) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @ddt.data( - *{constants.REVERT_TO_SNAPSHOT_MICROVERSION, - CONF.share.max_api_microversion} - ) - def test_revert_to_previous_snapshot(self, version): - snapshot1 = self.create_snapshot_wait_for_active( - self.share['id'], cleanup_in_class=False) - snapshot2 = self.create_snapshot_wait_for_active( - self.share['id'], cleanup_in_class=False) - - self.shares_v2_client.delete_snapshot(snapshot2['id']) - self.shares_v2_client.wait_for_resource_deletion( - snapshot_id=snapshot2['id']) - - self.shares_v2_client.revert_to_snapshot(self.share['id'], - snapshot1['id'], - version=version) - self.shares_v2_client.wait_for_share_status(self.share['id'], - constants.STATUS_AVAILABLE) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @tc.skipUnless(CONF.share.run_replication_tests, - 'Replication tests are disabled.') - @ddt.data( - *{constants.REVERT_TO_SNAPSHOT_MICROVERSION, - CONF.share.max_api_microversion} - ) - def test_revert_to_replicated_snapshot(self, version): - """Test reverting to a replicated snapshot.""" - share = self.create_share( - share_type_id=self.replicated_share_type['id'], - availability_zone=self.share_zone - ) - - share_replica = self.create_share_replica(share["id"], - self.replica_zone) - self.shares_v2_client.wait_for_share_replica_status( - share_replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') - - snapshot = self.create_snapshot_wait_for_active(share["id"]) - - self.shares_v2_client.revert_to_snapshot( - share['id'], - snapshot['id'], - version=version) - self.shares_v2_client.wait_for_share_status(share['id'], - constants.STATUS_AVAILABLE) - self.shares_v2_client.wait_for_share_replica_status( - share_replica['id'], constants.REPLICATION_STATE_IN_SYNC, - status_attr='replica_state') diff --git a/manila_tempest_tests/tests/api/test_revert_to_snapshot_negative.py b/manila_tempest_tests/tests/api/test_revert_to_snapshot_negative.py deleted file mode 100644 index 505a614243..0000000000 --- a/manila_tempest_tests/tests/api/test_revert_to_snapshot_negative.py +++ /dev/null @@ -1,159 +0,0 @@ -# Copyright 2016 Andrew Kerr -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt - -from tempest import config -from tempest.lib.common.utils import data_utils -from tempest.lib import exceptions -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@base.skip_if_microversion_not_supported( - constants.REVERT_TO_SNAPSHOT_MICROVERSION) -@ddt.ddt -class RevertToSnapshotNegativeTest(base.BaseSharesMixedTest): - - @classmethod - def skip_checks(cls): - super(RevertToSnapshotNegativeTest, cls).skip_checks() - if not CONF.share.run_revert_to_snapshot_tests: - msg = "Revert to snapshot tests are disabled." - raise cls.skipException(msg) - if not CONF.share.capability_snapshot_support: - msg = "Snapshot support is disabled." - raise cls.skipException(msg) - if not CONF.share.run_snapshot_tests: - msg = "Snapshot tests are disabled." - raise cls.skipException(msg) - - @classmethod - def resource_setup(cls): - super(RevertToSnapshotNegativeTest, cls).resource_setup() - cls.admin_client = cls.admin_shares_v2_client - pools = cls.admin_client.list_pools(detail=True)['pools'] - revert_support = [ - pool['capabilities'][constants.REVERT_TO_SNAPSHOT_SUPPORT] - for pool in pools] - if not any(revert_support): - msg = "Revert to snapshot not supported." - raise cls.skipException(msg) - - cls.share_type_name = data_utils.rand_name("share-type") - extra_specs = {constants.REVERT_TO_SNAPSHOT_SUPPORT: True} - cls.revert_enabled_extra_specs = cls.add_extra_specs_to_dict( - extra_specs=extra_specs) - - cls.share_type = cls.create_share_type( - cls.share_type_name, - extra_specs=cls.revert_enabled_extra_specs, - client=cls.admin_client) - - cls.st_id = cls.share_type['share_type']['id'] - - cls.share = cls.create_share(share_type_id=cls.st_id) - cls.share2 = cls.create_share(share_type_id=cls.st_id) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data( - *{constants.REVERT_TO_SNAPSHOT_MICROVERSION, - CONF.share.max_api_microversion} - ) - def test_revert_to_second_latest_snapshot(self, version): - snapshot1 = self.create_snapshot_wait_for_active( - self.share['id'], cleanup_in_class=False) - self.create_snapshot_wait_for_active(self.share['id'], - cleanup_in_class=False) - - self.assertRaises(exceptions.Conflict, - self.shares_v2_client.revert_to_snapshot, - self.share['id'], - snapshot1['id'], - version=version) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data( - *{constants.REVERT_TO_SNAPSHOT_MICROVERSION, - CONF.share.max_api_microversion} - ) - def test_revert_to_error_snapshot(self, version): - snapshot = self.create_snapshot_wait_for_active(self.share['id'], - cleanup_in_class=False) - - self.admin_client.reset_state(snapshot['id'], - status=constants.STATUS_ERROR, - s_type='snapshots') - - self.assertRaises(exceptions.Conflict, - self.shares_v2_client.revert_to_snapshot, - self.share['id'], - snapshot['id'], - version=version) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data( - *{constants.REVERT_TO_SNAPSHOT_MICROVERSION, - CONF.share.max_api_microversion} - ) - def test_revert_error_share_to_snapshot(self, version): - snapshot = self.create_snapshot_wait_for_active(self.share['id'], - cleanup_in_class=False) - - self.admin_client.reset_state(self.share['id'], - status=constants.STATUS_ERROR, - s_type='shares') - - self.addCleanup(self.admin_client.reset_state, - self.share['id'], - status=constants.STATUS_AVAILABLE, - s_type='shares') - - self.assertRaises(exceptions.Conflict, - self.shares_v2_client.revert_to_snapshot, - self.share['id'], - snapshot['id'], - version=version) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data( - *{constants.REVERT_TO_SNAPSHOT_MICROVERSION, - CONF.share.max_api_microversion} - ) - def test_revert_to_missing_snapshot(self, version): - self.assertRaises(exceptions.BadRequest, - self.shares_v2_client.revert_to_snapshot, - self.share['id'], - self.share['id'], - version=version) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data( - *{constants.REVERT_TO_SNAPSHOT_MICROVERSION, - CONF.share.max_api_microversion} - ) - def test_revert_to_invalid_snapshot(self, version): - snapshot = self.create_snapshot_wait_for_active( - self.share['id'], cleanup_in_class=False) - - self.assertRaises(exceptions.BadRequest, - self.shares_v2_client.revert_to_snapshot, - self.share2['id'], - snapshot['id'], - version=version) diff --git a/manila_tempest_tests/tests/api/test_rules.py b/manila_tempest_tests/tests/api/test_rules.py deleted file mode 100644 index 0b44a5c8e9..0000000000 --- a/manila_tempest_tests/tests/api/test_rules.py +++ /dev/null @@ -1,631 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import ddt -import itertools -from tempest import config -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base -from manila_tempest_tests import utils - -CONF = config.CONF -LATEST_MICROVERSION = CONF.share.max_api_microversion - - -def _create_delete_ro_access_rule(self, version): - """Common test case for usage in test suites with different decorators. - - :param self: instance of test class - """ - - if utils.is_microversion_eq(version, '1.0'): - rule = self.shares_client.create_access_rule( - self.share["id"], self.access_type, self.access_to, 'ro') - else: - rule = self.shares_v2_client.create_access_rule( - self.share["id"], self.access_type, self.access_to, 'ro', - version=version) - - self.assertEqual('ro', rule['access_level']) - for key in ('deleted', 'deleted_at', 'instance_mappings'): - self.assertNotIn(key, rule.keys()) - - # rules must start out in 'new' until 2.28 & 'queued_to_apply' after 2.28 - if utils.is_microversion_le(version, "2.27"): - self.assertEqual("new", rule['state']) - else: - self.assertEqual("queued_to_apply", rule['state']) - - if utils.is_microversion_le(version, '2.9'): - self.shares_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - else: - self.shares_v2_client.wait_for_share_status( - self.share["id"], "active", status_attr='access_rules_status', - version=version) - # If the 'access_rules_status' transitions to 'active', - # rule state must too - rules = self.shares_v2_client.list_access_rules(self.share['id']) - rule = [r for r in rules if r['id'] == rule['id']][0] - self.assertEqual("active", rule['state']) - - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.delete_access_rule(self.share["id"], rule["id"]) - self.shares_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id']) - else: - self.shares_v2_client.delete_access_rule( - self.share["id"], rule["id"], version=version) - self.shares_v2_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id'], version=version) - - -@ddt.ddt -class ShareIpRulesForNFSTest(base.BaseSharesTest): - protocol = "nfs" - - @classmethod - def resource_setup(cls): - super(ShareIpRulesForNFSTest, cls).resource_setup() - if (cls.protocol not in CONF.share.enable_protocols or - cls.protocol not in CONF.share.enable_ip_rules_for_protocols): - msg = "IP rule tests for %s protocol are disabled" % cls.protocol - raise cls.skipException(msg) - cls.share = cls.create_share(cls.protocol) - cls.access_type = "ip" - cls.access_to = "2.2.2.2" - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @ddt.data(*itertools.chain( - itertools.product({'1.0', '2.9', '2.37', LATEST_MICROVERSION}, {4}), - itertools.product({'2.38', LATEST_MICROVERSION}, {6}) - )) - @ddt.unpack - def test_create_delete_access_rules_with_one_ip(self, version, - ip_version): - - if ip_version == 4: - access_to = utils.rand_ip() - else: - access_to = utils.rand_ipv6_ip() - # create rule - if utils.is_microversion_eq(version, '1.0'): - rule = self.shares_client.create_access_rule( - self.share["id"], self.access_type, access_to) - else: - rule = self.shares_v2_client.create_access_rule( - self.share["id"], self.access_type, access_to, - version=version) - - self.assertEqual('rw', rule['access_level']) - for key in ('deleted', 'deleted_at', 'instance_mappings'): - self.assertNotIn(key, rule.keys()) - - # rules must start out in 'new' until 2.28 & 'queued_to_apply' after - if utils.is_microversion_le(version, "2.27"): - self.assertEqual("new", rule['state']) - else: - self.assertEqual("queued_to_apply", rule['state']) - - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - elif utils.is_microversion_eq(version, '2.9'): - self.shares_v2_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - else: - self.shares_v2_client.wait_for_share_status( - self.share["id"], "active", status_attr='access_rules_status', - version=version) - - # delete rule and wait for deletion - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.delete_access_rule(self.share["id"], rule["id"]) - self.shares_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id']) - else: - self.shares_v2_client.delete_access_rule( - self.share["id"], rule["id"], version=version) - self.shares_v2_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id'], version=version) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @ddt.data(*itertools.chain( - itertools.product({'1.0', '2.9', '2.37', LATEST_MICROVERSION}, {4}), - itertools.product({'2.38', LATEST_MICROVERSION}, {6}) - )) - @ddt.unpack - def test_create_delete_access_rule_with_cidr(self, version, ip_version): - if ip_version == 4: - access_to = utils.rand_ip(network=True) - else: - access_to = utils.rand_ipv6_ip(network=True) - # create rule - if utils.is_microversion_eq(version, '1.0'): - rule = self.shares_client.create_access_rule( - self.share["id"], self.access_type, access_to) - else: - rule = self.shares_v2_client.create_access_rule( - self.share["id"], self.access_type, access_to, - version=version) - - for key in ('deleted', 'deleted_at', 'instance_mappings'): - self.assertNotIn(key, rule.keys()) - self.assertEqual('rw', rule['access_level']) - - # rules must start out in 'new' until 2.28 & 'queued_to_apply' after - if utils.is_microversion_le(version, "2.27"): - self.assertEqual("new", rule['state']) - else: - self.assertEqual("queued_to_apply", rule['state']) - - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - elif utils.is_microversion_eq(version, '2.9'): - self.shares_v2_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - else: - self.shares_v2_client.wait_for_share_status( - self.share["id"], "active", status_attr='access_rules_status', - version=version) - - # delete rule and wait for deletion - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.delete_access_rule(self.share["id"], rule["id"]) - self.shares_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id']) - else: - self.shares_v2_client.delete_access_rule( - self.share["id"], rule["id"], version=version) - self.shares_v2_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id'], version=version) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipIf( - "nfs" not in CONF.share.enable_ro_access_level_for_protocols, - "RO access rule tests are disabled for NFS protocol.") - @ddt.data(*set(['1.0', '2.9', '2.27', '2.28', LATEST_MICROVERSION])) - def test_create_delete_ro_access_rule(self, client_name): - _create_delete_ro_access_rule(self, client_name) - - -@ddt.ddt -class ShareIpRulesForCIFSTest(ShareIpRulesForNFSTest): - protocol = "cifs" - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipIf( - "cifs" not in CONF.share.enable_ro_access_level_for_protocols, - "RO access rule tests are disabled for CIFS protocol.") - @ddt.data(*set(['1.0', '2.9', '2.27', '2.28', LATEST_MICROVERSION])) - def test_create_delete_ro_access_rule(self, version): - _create_delete_ro_access_rule(self, version) - - -@ddt.ddt -class ShareUserRulesForNFSTest(base.BaseSharesTest): - protocol = "nfs" - - @classmethod - def resource_setup(cls): - super(ShareUserRulesForNFSTest, cls).resource_setup() - if (cls.protocol not in CONF.share.enable_protocols or - cls.protocol not in - CONF.share.enable_user_rules_for_protocols): - msg = "USER rule tests for %s protocol are disabled" % cls.protocol - raise cls.skipException(msg) - cls.share = cls.create_share(cls.protocol) - cls.access_type = "user" - cls.access_to = CONF.share.username_for_user_rules - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @ddt.data(*set(['1.0', '2.9', '2.27', '2.28', LATEST_MICROVERSION])) - def test_create_delete_user_rule(self, version): - - # create rule - if utils.is_microversion_eq(version, '1.0'): - rule = self.shares_client.create_access_rule( - self.share["id"], self.access_type, self.access_to) - else: - rule = self.shares_v2_client.create_access_rule( - self.share["id"], self.access_type, self.access_to, - version=version) - - self.assertEqual('rw', rule['access_level']) - for key in ('deleted', 'deleted_at', 'instance_mappings'): - self.assertNotIn(key, rule.keys()) - - # rules must start out in 'new' until 2.28 & 'queued_to_apply' after - if utils.is_microversion_le(version, "2.27"): - self.assertEqual("new", rule['state']) - else: - self.assertEqual("queued_to_apply", rule['state']) - - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - elif utils.is_microversion_eq(version, '2.9'): - self.shares_v2_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - else: - self.shares_v2_client.wait_for_share_status( - self.share["id"], "active", status_attr='access_rules_status', - version=version) - - # delete rule and wait for deletion - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.delete_access_rule(self.share["id"], rule["id"]) - self.shares_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id']) - else: - self.shares_v2_client.delete_access_rule( - self.share["id"], rule["id"], version=version) - self.shares_v2_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id'], version=version) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipIf( - "nfs" not in CONF.share.enable_ro_access_level_for_protocols, - "RO access rule tests are disabled for NFS protocol.") - @ddt.data(*set(['1.0', '2.9', '2.27', '2.28', LATEST_MICROVERSION])) - def test_create_delete_ro_access_rule(self, version): - _create_delete_ro_access_rule(self, version) - - -@ddt.ddt -class ShareUserRulesForCIFSTest(ShareUserRulesForNFSTest): - protocol = "cifs" - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipIf( - "cifs" not in CONF.share.enable_ro_access_level_for_protocols, - "RO access rule tests are disabled for CIFS protocol.") - @ddt.data(*set(['1.0', '2.9', '2.27', '2.28', LATEST_MICROVERSION])) - def test_create_delete_ro_access_rule(self, version): - _create_delete_ro_access_rule(self, version) - - -@ddt.ddt -class ShareCertRulesForGLUSTERFSTest(base.BaseSharesTest): - protocol = "glusterfs" - - @classmethod - def resource_setup(cls): - super(ShareCertRulesForGLUSTERFSTest, cls).resource_setup() - if (cls.protocol not in CONF.share.enable_protocols or - cls.protocol not in - CONF.share.enable_cert_rules_for_protocols): - msg = "Cert rule tests for %s protocol are disabled" % cls.protocol - raise cls.skipException(msg) - cls.share = cls.create_share(cls.protocol) - cls.access_type = "cert" - # Provide access to a client identified by a common name (CN) of the - # certificate that it possesses. - cls.access_to = "client1.com" - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @ddt.data(*set(['1.0', '2.9', '2.27', '2.28', LATEST_MICROVERSION])) - def test_create_delete_cert_rule(self, version): - - # create rule - if utils.is_microversion_eq(version, '1.0'): - rule = self.shares_client.create_access_rule( - self.share["id"], self.access_type, self.access_to) - else: - rule = self.shares_v2_client.create_access_rule( - self.share["id"], self.access_type, self.access_to, - version=version) - - self.assertEqual('rw', rule['access_level']) - for key in ('deleted', 'deleted_at', 'instance_mappings'): - self.assertNotIn(key, rule.keys()) - - # rules must start out in 'new' until 2.28 & 'queued_to_apply' after - if utils.is_microversion_le(version, "2.27"): - self.assertEqual("new", rule['state']) - else: - self.assertEqual("queued_to_apply", rule['state']) - - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - elif utils.is_microversion_eq(version, '2.9'): - self.shares_v2_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - else: - self.shares_v2_client.wait_for_share_status( - self.share["id"], "active", status_attr='access_rules_status', - version=version) - - # delete rule - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.delete_access_rule(self.share["id"], rule["id"]) - self.shares_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id']) - else: - self.shares_v2_client.delete_access_rule( - self.share["id"], rule["id"], version=version) - self.shares_v2_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id'], version=version) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipIf( - "glusterfs" not in CONF.share.enable_ro_access_level_for_protocols, - "RO access rule tests are disabled for GLUSTERFS protocol.") - @ddt.data(*set(['1.0', '2.9', '2.27', '2.28', LATEST_MICROVERSION])) - def test_create_delete_cert_ro_access_rule(self, version): - if utils.is_microversion_eq(version, '1.0'): - rule = self.shares_client.create_access_rule( - self.share["id"], 'cert', 'client2.com', 'ro') - else: - rule = self.shares_v2_client.create_access_rule( - self.share["id"], 'cert', 'client2.com', 'ro', - version=version) - - self.assertEqual('ro', rule['access_level']) - for key in ('deleted', 'deleted_at', 'instance_mappings'): - self.assertNotIn(key, rule.keys()) - - # rules must start out in 'new' until 2.28 & 'queued_to_apply' after - if utils.is_microversion_le(version, "2.27"): - self.assertEqual("new", rule['state']) - else: - self.assertEqual("queued_to_apply", rule['state']) - - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - elif utils.is_microversion_eq(version, '2.9'): - self.shares_v2_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - else: - self.shares_v2_client.wait_for_share_status( - self.share["id"], "active", status_attr='access_rules_status', - version=version) - - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.delete_access_rule(self.share["id"], rule["id"]) - self.shares_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id']) - else: - self.shares_v2_client.delete_access_rule( - self.share["id"], rule["id"], version=version) - self.shares_v2_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id'], version=version) - - -@ddt.ddt -class ShareCephxRulesForCephFSTest(base.BaseSharesTest): - protocol = "cephfs" - - @classmethod - def resource_setup(cls): - super(ShareCephxRulesForCephFSTest, cls).resource_setup() - if (cls.protocol not in CONF.share.enable_protocols or - cls.protocol not in - CONF.share.enable_cephx_rules_for_protocols): - msg = ("Cephx rule tests for %s protocol are disabled." % - cls.protocol) - raise cls.skipException(msg) - cls.share = cls.create_share(cls.protocol) - cls.access_type = "cephx" - # Provide access to a client identified by a cephx auth id. - cls.access_to = "bob" - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @ddt.data(*itertools.product( - set(['2.13', '2.27', '2.28', LATEST_MICROVERSION]), - ("alice", "alice_bob", "alice bob"), - ('rw', 'ro'))) - @ddt.unpack - def test_create_delete_cephx_rule(self, version, access_to, access_level): - rule = self.shares_v2_client.create_access_rule( - self.share["id"], self.access_type, access_to, version=version, - access_level=access_level) - - self.assertEqual(access_level, rule['access_level']) - for key in ('deleted', 'deleted_at', 'instance_mappings'): - self.assertNotIn(key, rule.keys()) - self.shares_v2_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - - self.shares_v2_client.delete_access_rule( - self.share["id"], rule["id"], version=version) - self.shares_v2_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id']) - - -@ddt.ddt -class ShareRulesTest(base.BaseSharesTest): - - @classmethod - def resource_setup(cls): - super(ShareRulesTest, cls).resource_setup() - if not (any(p in CONF.share.enable_ip_rules_for_protocols - for p in cls.protocols) or - any(p in CONF.share.enable_user_rules_for_protocols - for p in cls.protocols) or - any(p in CONF.share.enable_cert_rules_for_protocols - for p in cls.protocols) or - any(p in CONF.share.enable_cephx_rules_for_protocols - for p in cls.protocols)): - cls.message = "Rule tests are disabled" - raise cls.skipException(cls.message) - if CONF.share.enable_ip_rules_for_protocols: - cls.protocol = CONF.share.enable_ip_rules_for_protocols[0] - cls.access_type = "ip" - cls.access_to = "8.8.8.8" - elif CONF.share.enable_user_rules_for_protocols: - cls.protocol = CONF.share.enable_user_rules_for_protocols[0] - cls.access_type = "user" - cls.access_to = CONF.share.username_for_user_rules - elif CONF.share.enable_cert_rules_for_protocols: - cls.protocol = CONF.share.enable_cert_rules_for_protocols[0] - cls.access_type = "cert" - cls.access_to = "client3.com" - elif CONF.share.enable_cephx_rules_for_protocols: - cls.protocol = CONF.share.enable_cephx_rules_for_protocols[0] - cls.access_type = "cephx" - cls.access_to = "eve" - cls.shares_v2_client.share_protocol = cls.protocol - cls.share = cls.create_share() - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @ddt.data(*set(['1.0', '2.9', '2.27', '2.28', LATEST_MICROVERSION])) - def test_list_access_rules(self, version): - if (utils.is_microversion_lt(version, '2.13') and - CONF.share.enable_cephx_rules_for_protocols): - msg = ("API version %s does not support cephx access type, need " - "version >= 2.13." % version) - raise self.skipException(msg) - - # create rule - if utils.is_microversion_eq(version, '1.0'): - rule = self.shares_client.create_access_rule( - self.share["id"], self.access_type, self.access_to) - else: - rule = self.shares_v2_client.create_access_rule( - self.share["id"], self.access_type, self.access_to, - version=version) - - # verify added rule keys since 2.33 when create rule - if utils.is_microversion_ge(version, '2.33'): - self.assertIn('created_at', list(rule.keys())) - self.assertIn('updated_at', list(rule.keys())) - else: - self.assertNotIn('created_at', list(rule.keys())) - self.assertNotIn('updated_at', list(rule.keys())) - - # rules must start out in 'new' until 2.28 & 'queued_to_apply' after - if utils.is_microversion_le(version, "2.27"): - self.assertEqual("new", rule['state']) - else: - self.assertEqual("queued_to_apply", rule['state']) - - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - elif utils.is_microversion_eq(version, '2.9'): - self.shares_v2_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - else: - self.shares_v2_client.wait_for_share_status( - self.share["id"], "active", status_attr='access_rules_status', - version=version) - - # list rules - if utils.is_microversion_eq(version, '1.0'): - rules = self.shares_client.list_access_rules(self.share["id"]) - else: - rules = self.shares_v2_client.list_access_rules(self.share["id"], - version=version) - - # verify keys - keys = ("id", "access_type", "access_to", "access_level") - if utils.is_microversion_ge(version, '2.21'): - keys += ("access_key", ) - if utils.is_microversion_ge(version, '2.33'): - keys += ("created_at", "updated_at", ) - for key in keys: - [self.assertIn(key, r.keys()) for r in rules] - for key in ('deleted', 'deleted_at', 'instance_mappings'): - [self.assertNotIn(key, r.keys()) for r in rules] - - # verify values - self.assertEqual(self.access_type, rules[0]["access_type"]) - self.assertEqual(self.access_to, rules[0]["access_to"]) - self.assertEqual('rw', rules[0]["access_level"]) - if utils.is_microversion_ge(version, '2.21'): - if self.access_type == 'cephx': - self.assertIsNotNone(rules[0]['access_key']) - else: - self.assertIsNone(rules[0]['access_key']) - - # our share id in list and have no duplicates - gen = [r["id"] for r in rules if r["id"] in rule["id"]] - msg = "expected id lists %s times in rule list" % (len(gen)) - self.assertEqual(1, len(gen), msg) - - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.delete_access_rule(self.share["id"], rule["id"]) - self.shares_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id']) - else: - self.shares_v2_client.delete_access_rule( - self.share["id"], rule["id"], version=version) - self.shares_v2_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share['id'], version=version) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @ddt.data(*set(['1.0', '2.9', '2.27', '2.28', LATEST_MICROVERSION])) - def test_access_rules_deleted_if_share_deleted(self, version): - if (utils.is_microversion_lt(version, '2.13') and - CONF.share.enable_cephx_rules_for_protocols): - msg = ("API version %s does not support cephx access type, need " - "version >= 2.13." % version) - raise self.skipException(msg) - - # create share - share = self.create_share() - - # create rule - if utils.is_microversion_eq(version, '1.0'): - rule = self.shares_client.create_access_rule( - share["id"], self.access_type, self.access_to) - else: - rule = self.shares_v2_client.create_access_rule( - share["id"], self.access_type, self.access_to, - version=version) - - # rules must start out in 'new' until 2.28 & 'queued_to_apply' after - if utils.is_microversion_le(version, "2.27"): - self.assertEqual("new", rule['state']) - else: - self.assertEqual("queued_to_apply", rule['state']) - - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.wait_for_access_rule_status( - share["id"], rule["id"], "active") - elif utils.is_microversion_eq(version, '2.9'): - self.shares_v2_client.wait_for_access_rule_status( - share["id"], rule["id"], "active") - else: - self.shares_v2_client.wait_for_share_status( - share["id"], "active", status_attr='access_rules_status', - version=version) - - # delete share - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.delete_share(share['id']) - self.shares_client.wait_for_resource_deletion(share_id=share['id']) - else: - self.shares_v2_client.delete_share(share['id'], version=version) - self.shares_v2_client.wait_for_resource_deletion( - share_id=share['id'], version=version) - - # verify absence of rules for nonexistent share id - if utils.is_microversion_eq(version, '1.0'): - self.assertRaises(lib_exc.NotFound, - self.shares_client.list_access_rules, - share['id']) - else: - self.assertRaises(lib_exc.NotFound, - self.shares_v2_client.list_access_rules, - share['id'], version) diff --git a/manila_tempest_tests/tests/api/test_rules_negative.py b/manila_tempest_tests/tests/api/test_rules_negative.py deleted file mode 100644 index 22f232fbfb..0000000000 --- a/manila_tempest_tests/tests/api/test_rules_negative.py +++ /dev/null @@ -1,406 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from tempest import config -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests.tests.api import base -from manila_tempest_tests import utils - -CONF = config.CONF -LATEST_MICROVERSION = CONF.share.max_api_microversion - - -@ddt.ddt -class ShareIpRulesForNFSNegativeTest(base.BaseSharesMixedTest): - protocol = "nfs" - - @classmethod - def resource_setup(cls): - super(ShareIpRulesForNFSNegativeTest, cls).resource_setup() - cls.admin_client = cls.admin_shares_v2_client - if not (cls.protocol in CONF.share.enable_protocols and - cls.protocol in CONF.share.enable_ip_rules_for_protocols): - msg = "IP rule tests for %s protocol are disabled" % cls.protocol - raise cls.skipException(msg) - # create share - cls.share = cls.create_share(cls.protocol) - if CONF.share.run_snapshot_tests: - # create snapshot - cls.snap = cls.create_snapshot_wait_for_active(cls.share["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('1.2.3.256', - '1.1.1.-', - '1.2.3.4/33', - '1.2.3.*', - '1.2.3.*/23', - '1.2.3.1|23', - '1.2.3.1/-1', - '1.2.3.1/', - 'ad80::abaa:0:c2:2/-3', - 'AD80:ABAA::|26', - '2001:DB8:2de:0:0:0:0:e13:200a', - ) - def test_create_access_rule_ip_with_wrong_target(self, ip_address): - for client_name in ['shares_client', 'shares_v2_client']: - self.assertRaises(lib_exc.BadRequest, - getattr(self, client_name).create_access_rule, - self.share["id"], "ip", ip_address) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_with_wrong_level(self, client_name): - self.assertRaises(lib_exc.BadRequest, - getattr(self, client_name).create_access_rule, - self.share["id"], - 'ip', - '2.2.2.2', - 'su') - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('1.0', '2.9', LATEST_MICROVERSION) - def test_create_duplicate_of_ip_rule(self, version): - # test data - access_type = "ip" - access_to = "1.2.3.4" - - # create rule - if utils.is_microversion_eq(version, '1.0'): - rule = self.shares_client.create_access_rule( - self.share["id"], access_type, access_to) - else: - rule = self.shares_v2_client.create_access_rule( - self.share["id"], access_type, access_to, version=version) - - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - elif utils.is_microversion_eq(version, '2.9'): - self.shares_v2_client.wait_for_access_rule_status( - self.share["id"], rule["id"], "active") - else: - self.shares_v2_client.wait_for_share_status( - self.share["id"], "active", status_attr='access_rules_status', - version=version) - - # try create duplicate of rule - if utils.is_microversion_eq(version, '1.0'): - self.assertRaises(lib_exc.BadRequest, - self.shares_client.create_access_rule, - self.share["id"], access_type, access_to) - else: - self.assertRaises(lib_exc.BadRequest, - self.shares_v2_client.create_access_rule, - self.share["id"], access_type, access_to, - version=version) - - # delete rule and wait for deletion - if utils.is_microversion_eq(version, '1.0'): - self.shares_client.delete_access_rule(self.share["id"], - rule["id"]) - self.shares_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share["id"]) - else: - self.shares_v2_client.delete_access_rule(self.share["id"], - rule["id"]) - self.shares_v2_client.wait_for_resource_deletion( - rule_id=rule["id"], share_id=self.share["id"], version=version) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_add_access_rule_on_share_with_no_host(self): - access_type, access_to = self._get_access_rule_data_from_config() - extra_specs = self.add_extra_specs_to_dict( - {"share_backend_name": 'invalid_backend'}) - share_type = self.create_share_type('invalid_backend', - extra_specs=extra_specs, - client=self.admin_client, - cleanup_in_class=False) - share_type = share_type['share_type'] - share = self.create_share(share_type_id=share_type['id'], - cleanup_in_class=False, - wait_for_status=False) - self.shares_v2_client.wait_for_share_status( - share['id'], constants.STATUS_ERROR) - self.assertRaises(lib_exc.BadRequest, - self.admin_client.create_access_rule, - share["id"], access_type, access_to) - - -@ddt.ddt -class ShareIpRulesForCIFSNegativeTest(ShareIpRulesForNFSNegativeTest): - protocol = "cifs" - - -@ddt.ddt -class ShareUserRulesForNFSNegativeTest(base.BaseSharesTest): - protocol = "nfs" - - @classmethod - def resource_setup(cls): - super(ShareUserRulesForNFSNegativeTest, cls).resource_setup() - if not (cls.protocol in CONF.share.enable_protocols and - cls.protocol in CONF.share.enable_user_rules_for_protocols): - msg = "USER rule tests for %s protocol are disabled" % cls.protocol - raise cls.skipException(msg) - # create share - cls.share = cls.create_share(cls.protocol) - if CONF.share.run_snapshot_tests: - # create snapshot - cls.snap = cls.create_snapshot_wait_for_active(cls.share["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_user_with_wrong_input_2(self, client_name): - self.assertRaises(lib_exc.BadRequest, - getattr(self, client_name).create_access_rule, - self.share["id"], "user", - "try+") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_user_with_empty_key(self, client_name): - self.assertRaises(lib_exc.BadRequest, - getattr(self, client_name).create_access_rule, - self.share["id"], "user", "") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_user_with_too_little_key(self, client_name): - self.assertRaises(lib_exc.BadRequest, - getattr(self, client_name).create_access_rule, - self.share["id"], "user", "abc") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_user_with_too_big_key(self, client_name): - self.assertRaises(lib_exc.BadRequest, - getattr(self, client_name).create_access_rule, - self.share["id"], "user", "a" * 256) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_user_with_wrong_input_1(self, client_name): - self.assertRaises(lib_exc.BadRequest, - getattr(self, client_name).create_access_rule, - self.share["id"], "user", - "try+") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_create_access_rule_user_to_snapshot(self, client_name): - self.assertRaises(lib_exc.NotFound, - getattr(self, client_name).create_access_rule, - self.snap["id"], - access_type="user", - access_to="fakeuser") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_user_with_wrong_share_id(self, client_name): - self.assertRaises(lib_exc.NotFound, - getattr(self, client_name).create_access_rule, - "wrong_share_id", - access_type="user", - access_to="fakeuser") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_with_wrong_level(self, client_name): - self.assertRaises(lib_exc.BadRequest, - getattr(self, client_name).create_access_rule, - self.share["id"], - 'user', - CONF.share.username_for_user_rules, - 'su') - - -@ddt.ddt -class ShareUserRulesForCIFSNegativeTest(ShareUserRulesForNFSNegativeTest): - protocol = "cifs" - - -@ddt.ddt -class ShareCertRulesForGLUSTERFSNegativeTest(base.BaseSharesTest): - protocol = "glusterfs" - - @classmethod - def resource_setup(cls): - super(ShareCertRulesForGLUSTERFSNegativeTest, cls).resource_setup() - if not (cls.protocol in CONF.share.enable_protocols and - cls.protocol in CONF.share.enable_cert_rules_for_protocols): - msg = "CERT rule tests for %s protocol are disabled" % cls.protocol - raise cls.skipException(msg) - # create share - cls.share = cls.create_share(cls.protocol) - if CONF.share.run_snapshot_tests: - # create snapshot - cls.snap = cls.create_snapshot_wait_for_active(cls.share["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_cert_with_empty_common_name(self, client_name): - self.assertRaises(lib_exc.BadRequest, - getattr(self, client_name).create_access_rule, - self.share["id"], "cert", "") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_cert_with_whitespace_common_name(self, - client_name): - self.assertRaises(lib_exc.BadRequest, - getattr(self, client_name).create_access_rule, - self.share["id"], "cert", " ") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_cert_with_too_big_common_name(self, - client_name): - # common name cannot be more than 64 characters long - self.assertRaises(lib_exc.BadRequest, - getattr(self, client_name).create_access_rule, - self.share["id"], "cert", "a" * 65) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_cert_to_snapshot(self, client_name): - self.assertRaises(lib_exc.NotFound, - getattr(self, client_name).create_access_rule, - self.snap["id"], - access_type="cert", - access_to="fakeclient1.com") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_cert_with_wrong_share_id(self, client_name): - self.assertRaises(lib_exc.NotFound, - getattr(self, client_name).create_access_rule, - "wrong_share_id", - access_type="cert", - access_to="fakeclient2.com") - - -@ddt.ddt -class ShareCephxRulesForCephFSNegativeTest(base.BaseSharesTest): - protocol = "cephfs" - - @classmethod - def resource_setup(cls): - super(ShareCephxRulesForCephFSNegativeTest, cls).resource_setup() - if not (cls.protocol in CONF.share.enable_protocols and - cls.protocol in CONF.share.enable_cephx_rules_for_protocols): - msg = ("CEPHX rule tests for %s protocol are disabled" % - cls.protocol) - raise cls.skipException(msg) - # create share - cls.share = cls.create_share(cls.protocol) - cls.access_type = "cephx" - cls.access_to = "david" - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('jane.doe', u"bj\u00F6rn") - def test_create_access_rule_cephx_with_invalid_cephx_id(self, access_to): - self.assertRaises(lib_exc.BadRequest, - self.shares_v2_client.create_access_rule, - self.share["id"], self.access_type, access_to) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_create_access_rule_cephx_with_wrong_level(self): - self.assertRaises(lib_exc.BadRequest, - self.shares_v2_client.create_access_rule, - self.share["id"], self.access_type, self.access_to, - access_level="su") - - -def skip_if_cephx_access_type_not_supported_by_client(self, client): - if client == 'shares_client': - version = '1.0' - else: - version = LATEST_MICROVERSION - if (CONF.share.enable_cephx_rules_for_protocols and - utils.is_microversion_lt(version, '2.13')): - msg = ("API version %s does not support cephx access type, need " - "version >= 2.13." % version) - raise self.skipException(msg) - - -@ddt.ddt -class ShareRulesNegativeTest(base.BaseSharesTest): - # Tests independent from rule type and share protocol - - @classmethod - def resource_setup(cls): - super(ShareRulesNegativeTest, cls).resource_setup() - if not (any(p in CONF.share.enable_ip_rules_for_protocols - for p in cls.protocols) or - any(p in CONF.share.enable_user_rules_for_protocols - for p in cls.protocols) or - any(p in CONF.share.enable_cert_rules_for_protocols - for p in cls.protocols) or - any(p in CONF.share.enable_cephx_rules_for_protocols - for p in cls.protocols)): - cls.message = "Rule tests are disabled" - raise cls.skipException(cls.message) - # create share - cls.share = cls.create_share() - if CONF.share.run_snapshot_tests: - # create snapshot - cls.snap = cls.create_snapshot_wait_for_active(cls.share["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_delete_access_rule_with_wrong_id(self, client_name): - skip_if_cephx_access_type_not_supported_by_client(self, client_name) - self.assertRaises(lib_exc.NotFound, - getattr(self, client_name).delete_access_rule, - self.share["id"], "wrong_rule_id") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_ip_with_wrong_type(self, client_name): - skip_if_cephx_access_type_not_supported_by_client(self, client_name) - self.assertRaises(lib_exc.BadRequest, - getattr(self, client_name).create_access_rule, - self.share["id"], "wrong_type", "1.2.3.4") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @ddt.data('shares_client', 'shares_v2_client') - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_create_access_rule_ip_to_snapshot(self, client_name): - skip_if_cephx_access_type_not_supported_by_client(self, client_name) - self.assertRaises(lib_exc.NotFound, - getattr(self, client_name).create_access_rule, - self.snap["id"]) - - -@ddt.ddt -class ShareRulesAPIOnlyNegativeTest(base.BaseSharesTest): - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - @ddt.data('shares_client', 'shares_v2_client') - def test_create_access_rule_ip_with_wrong_share_id(self, client_name): - skip_if_cephx_access_type_not_supported_by_client(self, client_name) - self.assertRaises(lib_exc.NotFound, - getattr(self, client_name).create_access_rule, - "wrong_share_id") diff --git a/manila_tempest_tests/tests/api/test_scheduler_stats_negative.py b/manila_tempest_tests/tests/api/test_scheduler_stats_negative.py deleted file mode 100644 index c7dc3f09b5..0000000000 --- a/manila_tempest_tests/tests/api/test_scheduler_stats_negative.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2015 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest.lib import exceptions as lib_exc -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -class SchedulerStatsNegativeTest(base.BaseSharesTest): - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_list_pools_with_user(self): - self.assertRaises(lib_exc.Forbidden, - self.shares_client.list_pools) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_list_pools_detailed_with_user(self): - self.assertRaises(lib_exc.Forbidden, - self.shares_client.list_pools, - detail=True) diff --git a/manila_tempest_tests/tests/api/test_security_services.py b/manila_tempest_tests/tests/api/test_security_services.py deleted file mode 100644 index 30cf6a9293..0000000000 --- a/manila_tempest_tests/tests/api/test_security_services.py +++ /dev/null @@ -1,210 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -import six -from tempest import config -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - -CONF = config.CONF -LOG = log.getLogger(__name__) - - -class SecurityServiceListMixin(object): - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_list_security_services(self): - listed = self.shares_client.list_security_services() - self.assertTrue(any(self.ss_ldap['id'] == ss['id'] for ss in listed)) - self.assertTrue(any(self.ss_kerberos['id'] == ss['id'] - for ss in listed)) - - # verify keys - keys = ["name", "id", "status", "type", ] - [self.assertIn(key, s_s.keys()) for s_s in listed for key in keys] - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_list_security_services_with_detail(self): - listed = self.shares_client.list_security_services(detailed=True) - self.assertTrue(any(self.ss_ldap['id'] == ss['id'] for ss in listed)) - self.assertTrue(any(self.ss_kerberos['id'] == ss['id'] - for ss in listed)) - - # verify keys - keys = [ - "name", "id", "status", "description", - "domain", "server", "dns_ip", "user", "password", "type", - "created_at", "updated_at", "project_id", - ] - [self.assertIn(key, s_s.keys()) for s_s in listed for key in keys] - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @testtools.skipIf( - not CONF.share.multitenancy_enabled, "Only for multitenancy.") - def test_list_security_services_filter_by_share_network(self): - sn = self.shares_client.get_share_network( - self.shares_client.share_network_id) - fresh_sn = [] - for i in range(2): - sn = self.create_share_network( - neutron_net_id=sn["neutron_net_id"], - neutron_subnet_id=sn["neutron_subnet_id"]) - fresh_sn.append(sn) - - self.shares_client.add_sec_service_to_share_network( - fresh_sn[0]["id"], self.ss_ldap["id"]) - self.shares_client.add_sec_service_to_share_network( - fresh_sn[1]["id"], self.ss_kerberos["id"]) - - listed = self.shares_client.list_security_services( - params={'share_network_id': fresh_sn[0]['id']}) - self.assertEqual(1, len(listed)) - self.assertEqual(self.ss_ldap['id'], listed[0]['id']) - - keys = ["name", "id", "status", "type", ] - [self.assertIn(key, s_s.keys()) for s_s in listed for key in keys] - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_list_security_services_detailed_filter_by_ss_attributes(self): - search_opts = { - 'name': 'ss_ldap', - 'type': 'ldap', - 'user': 'fake_user', - 'server': 'fake_server_1', - 'dns_ip': '1.1.1.1', - 'domain': 'fake_domain_1', - } - listed = self.shares_client.list_security_services( - detailed=True, - params=search_opts) - self.assertTrue(any(self.ss_ldap['id'] == ss['id'] for ss in listed)) - for ss in listed: - self.assertTrue(all(ss[key] == value for key, value - in search_opts.items())) - - -class SecurityServicesTest(base.BaseSharesTest, - SecurityServiceListMixin): - def setUp(self): - super(SecurityServicesTest, self).setUp() - ss_ldap_data = { - 'name': 'ss_ldap', - 'dns_ip': '1.1.1.1', - 'server': 'fake_server_1', - 'domain': 'fake_domain_1', - 'user': 'fake_user', - 'password': 'pass', - } - ss_kerberos_data = { - 'name': 'ss_kerberos', - 'dns_ip': '2.2.2.2', - 'server': 'fake_server_2', - 'domain': 'fake_domain_2', - 'user': 'test_user', - 'password': 'word', - } - self.ss_ldap = self.create_security_service('ldap', **ss_ldap_data) - self.ss_kerberos = self.create_security_service( - 'kerberos', **ss_kerberos_data) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_create_delete_security_service(self): - data = self.generate_security_service_data() - self.service_names = ["ldap", "kerberos", "active_directory"] - for ss_name in self.service_names: - ss = self.create_security_service(ss_name, **data) - self.assertDictContainsSubset(data, ss) - self.assertEqual(ss_name, ss["type"]) - self.shares_client.delete_security_service(ss["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_get_security_service(self): - data = self.generate_security_service_data() - ss = self.create_security_service(**data) - self.assertDictContainsSubset(data, ss) - - get = self.shares_client.get_security_service(ss["id"]) - self.assertDictContainsSubset(data, get) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_security_service(self): - data = self.generate_security_service_data() - ss = self.create_security_service(**data) - self.assertDictContainsSubset(data, ss) - - upd_data = self.generate_security_service_data() - updated = self.shares_client.update_security_service( - ss["id"], **upd_data) - - get = self.shares_client.get_security_service(ss["id"]) - self.assertDictContainsSubset(upd_data, updated) - self.assertDictContainsSubset(upd_data, get) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipIf( - not CONF.share.multitenancy_enabled, "Only for multitenancy.") - def test_try_update_valid_keys_sh_server_exists(self): - ss_data = self.generate_security_service_data() - ss = self.create_security_service(**ss_data) - - sn = self.shares_client.get_share_network( - self.shares_client.share_network_id) - fresh_sn = self.create_share_network( - neutron_net_id=sn["neutron_net_id"], - neutron_subnet_id=sn["neutron_subnet_id"]) - - self.shares_client.add_sec_service_to_share_network( - fresh_sn["id"], ss["id"]) - - # Security service with fake data is used, so if we use backend driver - # that fails on wrong data, we expect error here. - # We require any share that uses our share-network. - try: - self.create_share( - share_network_id=fresh_sn["id"], cleanup_in_class=False) - except Exception as e: - # we do wait for either 'error' or 'available' status because - # it is the only available statuses for proper deletion. - LOG.warning("Caught exception. It is expected in case backend " - "fails having security-service with improper data " - "that leads to share-server creation error. " - "%s", six.text_type(e)) - - update_data = { - "name": "name", - "description": "new_description", - } - updated = self.shares_client.update_security_service( - ss["id"], **update_data) - self.assertDictContainsSubset(update_data, updated) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_list_security_services_filter_by_invalid_opt(self): - listed = self.shares_client.list_security_services( - params={'fake_opt': 'some_value'}) - self.assertTrue(any(self.ss_ldap['id'] == ss['id'] for ss in listed)) - self.assertTrue(any(self.ss_kerberos['id'] == ss['id'] - for ss in listed)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_try_list_security_services_all_tenants(self): - listed = self.shares_client.list_security_services( - params={'all_tenants': 1}) - self.assertTrue(any(self.ss_ldap['id'] == ss['id'] for ss in listed)) - self.assertTrue(any(self.ss_kerberos['id'] == ss['id'] - for ss in listed)) diff --git a/manila_tempest_tests/tests/api/test_security_services_mapping.py b/manila_tempest_tests/tests/api/test_security_services_mapping.py deleted file mode 100644 index 8025bf7e24..0000000000 --- a/manila_tempest_tests/tests/api/test_security_services_mapping.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -class SecurityServicesMappingTest(base.BaseSharesTest): - - @classmethod - def resource_setup(cls): - super(SecurityServicesMappingTest, cls).resource_setup() - cls.cl = cls.shares_client - - def setUp(self): - super(SecurityServicesMappingTest, self).setUp() - - # create share network - data = self.generate_share_network_data() - - self.sn = self.create_share_network(client=self.cl, **data) - self.assertDictContainsSubset(data, self.sn) - - # create security service - data = self.generate_security_service_data() - - self.ss = self.create_security_service(client=self.cl, **data) - self.assertDictContainsSubset(data, self.ss) - - # Add security service to share network - self.cl.add_sec_service_to_share_network(self.sn["id"], self.ss["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_map_ss_to_sn_and_list(self): - - # List security services for share network - ls = self.cl.list_sec_services_for_share_network(self.sn["id"]) - self.assertEqual(1, len(ls)) - for key in ["status", "id", "name"]: - self.assertIn(self.ss[key], ls[0][key]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_map_ss_to_sn_and_delete(self): - - # Remove security service from share network - self.cl.remove_sec_service_from_share_network( - self.sn["id"], self.ss["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_remap_ss_to_sn(self): - - # Remove security service from share network - self.cl.remove_sec_service_from_share_network( - self.sn["id"], self.ss["id"]) - - # Add security service to share network again - self.cl.add_sec_service_to_share_network(self.sn["id"], self.ss["id"]) diff --git a/manila_tempest_tests/tests/api/test_security_services_mapping_negative.py b/manila_tempest_tests/tests/api/test_security_services_mapping_negative.py deleted file mode 100644 index 4b4912d404..0000000000 --- a/manila_tempest_tests/tests/api/test_security_services_mapping_negative.py +++ /dev/null @@ -1,173 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -import six -from tempest import config -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - -CONF = config.CONF -LOG = log.getLogger(__name__) - - -class SecServicesMappingNegativeTest(base.BaseSharesTest): - - @classmethod - def resource_setup(cls): - super(SecServicesMappingNegativeTest, cls).resource_setup() - cls.sn = cls.create_share_network(cleanup_in_class=True) - cls.ss = cls.create_security_service(cleanup_in_class=True) - cls.cl = cls.shares_client - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_add_sec_service_twice_to_share_network(self): - self.cl.add_sec_service_to_share_network(self.sn["id"], self.ss["id"]) - self.assertRaises(lib_exc.Conflict, - self.cl.add_sec_service_to_share_network, - self.sn["id"], self.ss["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_add_nonexistant_sec_service_to_share_network(self): - self.assertRaises(lib_exc.NotFound, - self.cl.add_sec_service_to_share_network, - self.sn["id"], "wrong_ss_id") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_add_empty_sec_service_id_to_share_network(self): - self.assertRaises(lib_exc.NotFound, - self.cl.add_sec_service_to_share_network, - self.sn["id"], "") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_add_sec_service_to_nonexistant_share_network(self): - self.assertRaises(lib_exc.NotFound, - self.cl.add_sec_service_to_share_network, - "wrong_sn_id", self.ss["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_add_sec_service_to_share_network_with_empty_id(self): - self.assertRaises(lib_exc.NotFound, - self.cl.add_sec_service_to_share_network, - "", self.ss["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_list_sec_services_for_nonexistant_share_network(self): - self.assertRaises(lib_exc.NotFound, - self.cl.list_sec_services_for_share_network, - "wrong_id") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_delete_nonexistant_sec_service_from_share_network(self): - self.assertRaises(lib_exc.NotFound, - self.cl.remove_sec_service_from_share_network, - self.sn["id"], "wrong_id") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_delete_sec_service_from_nonexistant_share_network(self): - self.assertRaises(lib_exc.NotFound, - self.cl.remove_sec_service_from_share_network, - "wrong_id", self.ss["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_delete_nonexistant_ss_from_nonexistant_sn(self): - self.assertRaises(lib_exc.NotFound, - self.cl.remove_sec_service_from_share_network, - "wrong_id", "wrong_id") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipIf( - not CONF.share.multitenancy_enabled, "Only for multitenancy.") - def test_delete_ss_from_sn_used_by_share_server(self): - sn = self.shares_client.get_share_network( - self.shares_client.share_network_id) - fresh_sn = self.create_share_network( - neutron_net_id=sn["neutron_net_id"], - neutron_subnet_id=sn["neutron_subnet_id"]) - - self.shares_client.add_sec_service_to_share_network( - fresh_sn["id"], self.ss["id"]) - - # Security service with fake data is used, so if we use backend driver - # that fails on wrong data, we expect error here. - # We require any share that uses our share-network. - try: - self.create_share( - share_network_id=fresh_sn["id"], cleanup_in_class=False) - except Exception as e: - # we do wait for either 'error' or 'available' status because - # it is the only available statuses for proper deletion. - LOG.warning("Caught exception. It is expected in case backend " - "fails having security-service with improper data " - "that leads to share-server creation error. " - "%s", six.text_type(e)) - - self.assertRaises(lib_exc.Forbidden, - self.cl.remove_sec_service_from_share_network, - fresh_sn["id"], - self.ss["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_map_two_ss_with_same_type_to_sn(self): - # create share network - data = self.generate_share_network_data() - - sn = self.create_share_network(client=self.cl, **data) - self.assertDictContainsSubset(data, sn) - - # create security services with same type - security_services = [] - for i in range(2): - data = self.generate_security_service_data() - ss = self.create_security_service(client=self.cl, **data) - self.assertDictContainsSubset(data, ss) - security_services.insert(i, ss) - - # Add security service to share network - self.cl.add_sec_service_to_share_network( - sn["id"], security_services[0]["id"]) - - # Try to add security service with same type - self.assertRaises(lib_exc.Conflict, - self.cl.add_sec_service_to_share_network, - sn["id"], security_services[1]["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_delete_ss_that_assigned_to_sn(self): - # create share network - data = self.generate_share_network_data() - - sn = self.create_share_network(client=self.cl, **data) - self.assertDictContainsSubset(data, sn) - - # create security service - data = self.generate_security_service_data() - - ss = self.create_security_service(client=self.cl, **data) - self.assertDictContainsSubset(data, ss) - - # Add security service to share network - self.cl.add_sec_service_to_share_network(sn["id"], ss["id"]) - - # Try delete ss, that has been assigned to some sn - self.assertRaises(lib_exc.Forbidden, - self.cl.delete_security_service, - ss["id"], ) - - # remove seurity service from share-network - self.cl.remove_sec_service_from_share_network(sn["id"], ss["id"]) diff --git a/manila_tempest_tests/tests/api/test_security_services_negative.py b/manila_tempest_tests/tests/api/test_security_services_negative.py deleted file mode 100644 index 0814b6d9f5..0000000000 --- a/manila_tempest_tests/tests/api/test_security_services_negative.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -import six -from tempest import config -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - -CONF = config.CONF -LOG = log.getLogger(__name__) - - -class SecurityServicesNegativeTest(base.BaseSharesTest): - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_create_security_service_with_empty_type(self): - self.assertRaises(lib_exc.BadRequest, - self.shares_client.create_security_service, "") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_create_security_service_with_wrong_type(self): - self.assertRaises(lib_exc.BadRequest, - self.shares_client.create_security_service, - "wrong_type") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_get_security_service_without_id(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.get_security_service, "") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_get_security_service_with_wrong_id(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.get_security_service, - "wrong_id") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_delete_security_service_without_id(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.delete_security_service, "") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_delete_security_service_with_wrong_type(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.delete_security_service, - "wrong_id") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_update_nonexistant_security_service(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.update_security_service, - "wrong_id", name="name") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_update_security_service_with_empty_id(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.update_security_service, - "", name="name") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipIf( - not CONF.share.multitenancy_enabled, "Only for multitenancy.") - def test_try_update_invalid_keys_sh_server_exists(self): - ss_data = self.generate_security_service_data() - ss = self.create_security_service(**ss_data) - - sn = self.shares_client.get_share_network( - self.shares_client.share_network_id) - fresh_sn = self.create_share_network( - neutron_net_id=sn["neutron_net_id"], - neutron_subnet_id=sn["neutron_subnet_id"]) - - self.shares_client.add_sec_service_to_share_network( - fresh_sn["id"], ss["id"]) - - # Security service with fake data is used, so if we use backend driver - # that fails on wrong data, we expect error here. - # We require any share that uses our share-network. - try: - self.create_share( - share_network_id=fresh_sn["id"], cleanup_in_class=False) - except Exception as e: - # we do wait for either 'error' or 'available' status because - # it is the only available statuses for proper deletion. - LOG.warning("Caught exception. It is expected in case backend " - "fails having security-service with improper data " - "that leads to share-server creation error. " - "%s", six.text_type(e)) - - self.assertRaises(lib_exc.Forbidden, - self.shares_client.update_security_service, - ss["id"], - user="new_user") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_get_deleted_security_service(self): - data = self.generate_security_service_data() - ss = self.create_security_service(**data) - self.assertDictContainsSubset(data, ss) - - self.shares_client.delete_security_service(ss["id"]) - - # try get deleted security service entity - self.assertRaises(lib_exc.NotFound, - self.shares_client.get_security_service, - ss["id"]) diff --git a/manila_tempest_tests/tests/api/test_share_group_actions.py b/manila_tempest_tests/tests/api/test_share_group_actions.py deleted file mode 100644 index 7196be8732..0000000000 --- a/manila_tempest_tests/tests/api/test_share_group_actions.py +++ /dev/null @@ -1,401 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2016 Andrew Kerr -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from tempest import config -from tempest.lib.common.utils import data_utils -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests.tests.api import base -from manila_tempest_tests import utils - -CONF = config.CONF - - -@testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') -@base.skip_if_microversion_lt(constants.MIN_SHARE_GROUP_MICROVERSION) -@ddt.ddt -class ShareGroupActionsTest(base.BaseSharesTest): - """Covers share group functionality.""" - - @classmethod - def resource_setup(cls): - super(ShareGroupActionsTest, cls).resource_setup() - - # Create first share group - cls.share_group_name = data_utils.rand_name("tempest-sg-name") - cls.share_group_desc = data_utils.rand_name("tempest-sg-description") - cls.share_group = cls.create_share_group( - name=cls.share_group_name, description=cls.share_group_desc) - - # Create second share group for purposes of sorting and snapshot - # filtering - cls.share_group2 = cls.create_share_group( - name=cls.share_group_name, description=cls.share_group_desc) - - # Create 2 shares - inside first and second share groups - cls.share_name = data_utils.rand_name("tempest-share-name") - cls.share_desc = data_utils.rand_name("tempest-share-description") - cls.share_size = 1 - cls.share_size2 = 2 - cls.shares = cls.create_shares([ - {'kwargs': { - 'name': cls.share_name, - 'description': cls.share_desc, - 'size': size, - 'share_group_id': sg_id, - 'experimental': True, - }} for size, sg_id in ((cls.share_size, cls.share_group['id']), - (cls.share_size2, cls.share_group['id']), - (cls.share_size, cls.share_group2['id'])) - ]) - - # Create share group snapshots - cls.sg_snap_name = data_utils.rand_name("tempest-sg-snap-name") - cls.sg_snap_desc = data_utils.rand_name("tempest-sg-snap-desc") - - cls.sg_snapshot = cls.create_share_group_snapshot_wait_for_active( - cls.share_group["id"], - name=cls.sg_snap_name, - description=cls.sg_snap_desc, - ) - - cls.sg_snapshot2 = cls.create_share_group_snapshot_wait_for_active( - cls.share_group2['id'], - name=cls.sg_snap_name, - description=cls.sg_snap_desc, - ) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_get_share_group_min_supported_sg_microversion(self): - - # Get share group - share_group = self.shares_v2_client.get_share_group( - self.share_group['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - - # Verify keys - actual_keys = set(share_group.keys()) - self.assertTrue( - constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS.issubset(actual_keys), - 'Not all required keys returned for share group %s. ' - 'Expected at least: %s, found %s' % ( - share_group['id'], - constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS, - actual_keys)) - - # Verify values - self.assertEqual(self.share_group_name, share_group["name"]) - self.assertEqual(self.share_group_desc, share_group["description"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_get_share_min_supported_sg_microversion(self): - - # Get share - share = self.shares_v2_client.get_share( - self.shares[0]['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION, - experimental=True) - - # Verify keys - expected_keys = { - "status", "description", "links", "availability_zone", - "created_at", "share_proto", "name", "snapshot_id", - "id", "size", "share_group_id", - } - actual_keys = set(share.keys()) - self.assertTrue( - expected_keys.issubset(actual_keys), - 'Not all required keys returned for share %s. ' - 'Expected at least: %s, found %s' % ( - share['id'], expected_keys, actual_keys)) - - # Verify values - self.assertEqual(self.share_name, share["name"]) - self.assertEqual(self.share_desc, share["description"]) - self.assertEqual(self.share_size, int(share["size"])) - self.assertEqual(self.share_group["id"], share["share_group_id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_share_groups_min(self): - - # List share groups - share_groups = self.shares_v2_client.list_share_groups( - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - # Verify keys - self.assertGreater(len(share_groups), 0) - for sg in share_groups: - keys = set(sg.keys()) - self.assertEqual( - constants.SHARE_GROUP_SIMPLE_KEYS, - keys, - 'Incorrect keys returned for share group %s. ' - 'Expected: %s, found %s' % ( - sg['id'], - constants.SHARE_GROUP_SIMPLE_KEYS, - ','.join(keys))) - - # Share group ids are in list exactly once - for sg_id in (self.share_group["id"], self.share_group2["id"]): - gen = [sg["id"] for sg in share_groups if sg["id"] == sg_id] - msg = ("Expected id %s exactly once in share group list" % sg_id) - self.assertEqual(1, len(gen), msg) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @ddt.data(constants.MIN_SHARE_GROUP_MICROVERSION, '2.36') - def test_list_share_groups_with_detail_min(self, version): - params = None - if utils.is_microversion_ge(version, '2.36'): - params = {'name~': 'tempest', 'description~': 'tempest'} - # List share groups - share_groups = self.shares_v2_client.list_share_groups( - detailed=True, params=params, version=version) - - # Verify keys - for sg in share_groups: - keys = set(sg.keys()) - self.assertTrue( - constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS.issubset( - keys), - 'Not all required keys returned for share group %s. ' - 'Expected at least: %s, found %s' % ( - sg['id'], - constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS, - ','.join(keys), - ) - ) - - # Share group ids are in list exactly once - for group_id in (self.share_group["id"], self.share_group2["id"]): - gen = [share_group["id"] for share_group in share_groups - if share_group["id"] == group_id] - msg = ("Expected id %s exactly once in share group list" % - group_id) - self.assertEqual(1, len(gen), msg) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_filter_shares_by_share_group_id_min(self): - shares = self.shares_v2_client.list_shares( - detailed=True, - params={'share_group_id': self.share_group['id']}, - version=constants.MIN_SHARE_GROUP_MICROVERSION, - experimental=True, - ) - - share_ids = [share['id'] for share in shares] - - self.assertEqual( - 2, len(shares), - 'Incorrect number of shares returned. ' - 'Expected 2, got %s' % len(shares)) - self.assertIn( - self.shares[0]['id'], share_ids, - 'Share %s expected in returned list, but got %s' % ( - self.shares[0]['id'], share_ids)) - self.assertIn( - self.shares[1]['id'], share_ids, - 'Share %s expected in returned list, but got %s' % ( - self.shares[0]['id'], share_ids)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_get_share_group_snapshot_min(self): - # Get share group snapshot - sg_snapshot = self.shares_v2_client.get_share_group_snapshot( - self.sg_snapshot['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - - # Verify keys - actual_keys = set(sg_snapshot.keys()) - self.assertTrue( - constants.SHARE_GROUP_SNAPSHOT_DETAIL_REQUIRED_KEYS.issubset( - actual_keys), - 'Not all required keys returned for share group %s. ' - 'Expected at least: %s, found %s' % ( - sg_snapshot['id'], - constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS, - actual_keys, - ) - ) - - # Verify values - self.assertEqual(self.sg_snap_name, sg_snapshot["name"]) - self.assertEqual(self.sg_snap_desc, sg_snapshot["description"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_get_share_group_snapshot_members_min(self): - sg_snapshot = self.shares_v2_client.get_share_group_snapshot( - self.sg_snapshot['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - sg_snapshot_members = sg_snapshot['members'] - member_share_ids = [m['share_id'] for m in sg_snapshot_members] - self.assertEqual( - 2, len(sg_snapshot_members), - 'Unexpected number of share group snapshot members. ' - 'Expected 2, got %s.' % len(sg_snapshot_members)) - # Verify each share is represented in the share group snapshot - # appropriately - for share_id in (self.shares[0]['id'], self.shares[1]['id']): - self.assertIn( - share_id, member_share_ids, - 'Share missing %s missing from share group ' - 'snapshot. Found %s.' % (share_id, member_share_ids)) - for share in (self.shares[0], self.shares[1]): - for member in sg_snapshot_members: - if share['id'] == member['share_id']: - self.assertEqual(share['size'], member['size']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_create_share_group_from_populated_share_group_snapshot_min(self): - - sg_snapshot = self.shares_v2_client.get_share_group_snapshot( - self.sg_snapshot['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - snapshot_members = sg_snapshot['members'] - - new_share_group = self.create_share_group( - cleanup_in_class=False, - source_share_group_snapshot_id=self.sg_snapshot['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - - new_share_group = self.shares_v2_client.get_share_group( - new_share_group['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - - # Verify that share_network information matches source share group - self.assertEqual( - self.share_group['share_network_id'], - new_share_group['share_network_id']) - - new_shares = self.shares_v2_client.list_shares( - params={'share_group_id': new_share_group['id']}, - detailed=True, - version=constants.MIN_SHARE_GROUP_MICROVERSION, - experimental=True, - ) - - # Verify each new share is available - for share in new_shares: - self.assertEqual( - 'available', share['status'], - 'Share %s is not in available status.' % share['id']) - - # Verify each sgsnapshot member is represented in the new sg - # appropriately - share_source_member_ids = [ - share['source_share_group_snapshot_member_id'] - for share in new_shares] - for member in snapshot_members: - self.assertIn( - member['id'], share_source_member_ids, - 'Share group snapshot member %s not represented by ' - 'share group %s.' % (member['id'], new_share_group['id'])) - for share in new_shares: - if (share['source_share_group_snapshot_member_id'] == ( - member['id'])): - self.assertEqual(member['size'], share['size']) - self.assertEqual( - self.share_group['share_network_id'], - share['share_network_id']) - - -@testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') -@base.skip_if_microversion_lt(constants.MIN_SHARE_GROUP_MICROVERSION) -class ShareGroupRenameTest(base.BaseSharesTest): - - @classmethod - def resource_setup(cls): - super(ShareGroupRenameTest, cls).resource_setup() - - # Create share group - cls.share_group_name = data_utils.rand_name("tempest-sg-name") - cls.share_group_desc = data_utils.rand_name("tempest-sg-description") - cls.share_group = cls.create_share_group( - name=cls.share_group_name, - description=cls.share_group_desc, - ) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_update_share_group_min(self): - - # Get share_group - share_group = self.shares_v2_client.get_share_group( - self.share_group['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION - ) - self.assertEqual(self.share_group_name, share_group["name"]) - self.assertEqual(self.share_group_desc, share_group["description"]) - - # Update share_group - new_name = data_utils.rand_name("tempest-new-name") - new_desc = data_utils.rand_name("tempest-new-description") - updated = self.shares_v2_client.update_share_group( - share_group["id"], - name=new_name, - description=new_desc, - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - self.assertEqual(new_name, updated["name"]) - self.assertEqual(new_desc, updated["description"]) - - # Get share_group - share_group = self.shares_v2_client.get_share_group( - self.share_group['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - self.assertEqual(new_name, share_group["name"]) - self.assertEqual(new_desc, share_group["description"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_create_update_read_share_group_with_unicode_min(self): - value1 = u'ಠ_ಠ' - value2 = u'ಠ_ರೃ' - - # Create share_group - share_group = self.create_share_group( - cleanup_in_class=False, - name=value1, - description=value1, - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - self.assertEqual(value1, share_group["name"]) - self.assertEqual(value1, share_group["description"]) - - # Update share group - updated = self.shares_v2_client.update_share_group( - share_group["id"], - name=value2, - description=value2, - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - self.assertEqual(value2, updated["name"]) - self.assertEqual(value2, updated["description"]) - - # Get share group - share_group = self.shares_v2_client.get_share_group( - share_group['id'], version=constants.MIN_SHARE_GROUP_MICROVERSION) - self.assertEqual(value2, share_group["name"]) - self.assertEqual(value2, share_group["description"]) diff --git a/manila_tempest_tests/tests/api/test_share_groups.py b/manila_tempest_tests/tests/api/test_share_groups.py deleted file mode 100644 index 68ff647021..0000000000 --- a/manila_tempest_tests/tests/api/test_share_groups.py +++ /dev/null @@ -1,222 +0,0 @@ -# Copyright 2016 Andrew Kerr -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from tempest import config -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') -@base.skip_if_microversion_lt(constants.MIN_SHARE_GROUP_MICROVERSION) -@ddt.ddt -class ShareGroupsTest(base.BaseSharesTest): - """Covers share group functionality.""" - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_create_populate_delete_share_group_min(self): - # Create a share group - share_group = self.create_share_group( - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - keys = set(share_group.keys()) - self.assertTrue( - constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS.issubset(keys), - 'At least one expected element missing from share group ' - 'response. Expected %(expected)s, got %(actual)s.' % { - "expected": constants.SHARE_GROUP_DETAIL_REQUIRED_KEYS, - "actual": keys} - ) - # Populate - share = self.create_share( - share_group_id=share_group['id'], - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION, - experimental=True) - - # Delete - params = {"share_group_id": share_group['id']} - self.shares_v2_client.delete_share( - share['id'], - params=params, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - self.shares_client.wait_for_resource_deletion(share_id=share['id']) - self.shares_v2_client.delete_share_group( - share_group['id'], version=constants.MIN_SHARE_GROUP_MICROVERSION) - self.shares_v2_client.wait_for_resource_deletion( - share_group_id=share_group['id']) - - # Verify - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.get_share_group, share_group['id']) - self.assertRaises( - lib_exc.NotFound, self.shares_client.get_share, share['id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_create_delete_empty_share_group_snapshot_min(self): - # Create base share group - share_group = self.create_share_group( - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - # Create share group snapshot - sg_snapshot = self.create_share_group_snapshot_wait_for_active( - share_group["id"], - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - keys = set(sg_snapshot.keys()) - self.assertTrue( - constants.SHARE_GROUP_SNAPSHOT_DETAIL_REQUIRED_KEYS.issubset(keys), - 'At least one expected element missing from share group snapshot ' - 'response. Expected %(e)s, got %(a)s.' % { - "e": constants.SHARE_GROUP_SNAPSHOT_DETAIL_REQUIRED_KEYS, - "a": keys}) - - sg_snapshot_members = sg_snapshot['members'] - self.assertEmpty( - sg_snapshot_members, - 'Expected 0 share_group_snapshot members, got %s' % len( - sg_snapshot_members)) - - # Delete snapshot - self.shares_v2_client.delete_share_group_snapshot( - sg_snapshot["id"], version=constants.MIN_SHARE_GROUP_MICROVERSION) - self.shares_v2_client.wait_for_resource_deletion( - share_group_snapshot_id=sg_snapshot["id"]) - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.get_share_group_snapshot, - sg_snapshot['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_create_share_group_from_empty_share_group_snapshot_min(self): - # Create base share group - share_group = self.create_share_group( - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - # Create share group snapshot - sg_snapshot = self.create_share_group_snapshot_wait_for_active( - share_group["id"], cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - snapshot_members = sg_snapshot['members'] - - self.assertEmpty( - snapshot_members, - 'Expected 0 share group snapshot members, got %s' % - len(snapshot_members)) - - new_share_group = self.create_share_group( - cleanup_in_class=False, - source_share_group_snapshot_id=sg_snapshot['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - new_shares = self.shares_v2_client.list_shares( - params={'share_group_id': new_share_group['id']}, - version=constants.MIN_SHARE_GROUP_MICROVERSION, experimental=True) - - self.assertEmpty( - new_shares, 'Expected 0 new shares, got %s' % len(new_shares)) - - msg = ('Expected source_ishare_group_snapshot_id %s ' - 'as source of share group %s' % ( - sg_snapshot['id'], - new_share_group['source_share_group_snapshot_id'])) - self.assertEqual( - new_share_group['source_share_group_snapshot_id'], - sg_snapshot['id'], - msg) - - msg = ('Unexpected share_types on new share group. Expected ' - '%s, got %s.' % (share_group['share_types'], - new_share_group['share_types'])) - self.assertEqual( - sorted(share_group['share_types']), - sorted(new_share_group['share_types']), msg) - - # Assert the share_network information is the same - msg = 'Expected share_network %s as share_network of cg %s' % ( - share_group['share_network_id'], - new_share_group['share_network_id']) - self.assertEqual( - share_group['share_network_id'], - new_share_group['share_network_id'], - msg) - - @base.skip_if_microversion_lt("2.34") - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @ddt.data( - 'sg', 'sg_and_share', 'none', - ) - def test_create_sg_and_share_specifying_az(self, where_specify_az): - # Get list of existing availability zones, at least one always - # should exist - azs = self.shares_v2_client.list_availability_zones() - - sg_kwargs = { - 'version': '2.34', - 'cleanup_in_class': False, - } - if where_specify_az in ('sg', 'sg_and_share'): - sg_kwargs['availability_zone'] = azs[0]['name'] - - # Create share group - share_group = self.create_share_group(**sg_kwargs) - - # Get latest share group info - share_group = self.shares_v2_client.get_share_group( - share_group['id'], '2.34') - - self.assertIn('availability_zone', share_group) - if where_specify_az in ('sg', 'sg_and_share'): - self.assertEqual(azs[0]['name'], share_group['availability_zone']) - else: - self.assertIn( - share_group['availability_zone'], [az['name'] for az in azs]) - - # Test 'consistent_snapshot_support' as part of 2.33 API change - self.assertIn('consistent_snapshot_support', share_group) - self.assertIn( - share_group['consistent_snapshot_support'], ('host', 'pool', None)) - - s_kwargs = { - 'share_group_id': share_group['id'], - 'version': '2.33', - 'cleanup_in_class': False, - 'experimental': True, - } - if where_specify_az == 'sg_and_share': - s_kwargs['availability_zone'] = azs[0]['name'] - - # Create share in share group - share = self.create_share(**s_kwargs) - - # Get latest share info - share = self.shares_v2_client.get_share(share['id'], '2.34') - - # Verify that share always has the same AZ as share group does - self.assertEqual( - share_group['availability_zone'], share['availability_zone']) diff --git a/manila_tempest_tests/tests/api/test_share_groups_negative.py b/manila_tempest_tests/tests/api/test_share_groups_negative.py deleted file mode 100644 index af29879cbf..0000000000 --- a/manila_tempest_tests/tests/api/test_share_groups_negative.py +++ /dev/null @@ -1,273 +0,0 @@ -# Copyright 2016 Andrew Kerr -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib.common.utils import data_utils -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@testtools.skipUnless( - CONF.share.run_share_group_tests, 'Share Group tests disabled.') -@base.skip_if_microversion_lt(constants.MIN_SHARE_GROUP_MICROVERSION) -class ShareGroupsNegativeTest(base.BaseSharesTest): - - @classmethod - def resource_setup(cls): - super(ShareGroupsNegativeTest, cls).resource_setup() - # Create a share group - cls.share_group_name = data_utils.rand_name("tempest-sg-name") - cls.share_group_desc = data_utils.rand_name("tempest-sg-description") - cls.share_group = cls.create_share_group( - name=cls.share_group_name, - description=cls.share_group_desc - ) - # Create a share in the share group - cls.share_name = data_utils.rand_name("tempest-share-name") - cls.share_desc = data_utils.rand_name("tempest-share-description") - cls.share_size = 1 - cls.share = cls.create_share( - name=cls.share_name, - description=cls.share_desc, - size=cls.share_size, - share_group_id=cls.share_group['id'], - experimental=True, - ) - # Create a share group snapshot of the share group - cls.sg_snap_name = data_utils.rand_name("tempest-sg-snap-name") - cls.sg_snap_desc = data_utils.rand_name( - "tempest-group-snap-description") - cls.sg_snapshot = cls.create_share_group_snapshot_wait_for_active( - cls.share_group["id"], - name=cls.sg_snap_name, - description=cls.sg_snap_desc - ) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_create_sg_with_invalid_source_sg_snapshot_id_value_min(self): - self.assertRaises( - lib_exc.BadRequest, - self.create_share_group, - source_share_group_snapshot_id='foobar', - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_create_sg_with_nonexistent_source_sg_snapshot_id_value_min(self): - self.assertRaises( - lib_exc.BadRequest, - self.create_share_group, - source_share_group_snapshot_id=self.share['id'], - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_create_sg_with_invalid_share_network_id_value_min(self): - self.assertRaises( - lib_exc.BadRequest, - self.create_share_group, - share_network_id='foobar', - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_create_group_with_nonexistent_share_network_id_value_min(self): - self.assertRaises( - lib_exc.BadRequest, - self.create_share_group, - share_network_id=self.share['id'], - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_create_sg_with_invalid_share_type_id_value_min(self): - self.assertRaises( - lib_exc.BadRequest, - self.create_share_group, - share_type_ids=['foobar'], - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_create_sg_with_nonexistent_share_type_id_value_min(self): - self.assertRaises( - lib_exc.BadRequest, - self.create_share_group, - share_type_ids=[self.share['id']], - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_create_sg_snapshot_with_invalid_sg_id_value_min(self): - self.assertRaises( - lib_exc.BadRequest, - self.create_share_group_snapshot_wait_for_active, - 'foobar', - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_create_sg_snapshot_with_nonexistent_sg_id_value_min(self): - self.assertRaises( - lib_exc.BadRequest, - self.create_share_group_snapshot_wait_for_active, - self.share['id'], - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_get_sg_with_invalid_id_min(self): - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.get_share_group, - "invalid_share_group_id", - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_get_sg_without_passing_group_id_min(self): - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.get_share_group, - '', version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_update_sg_with_invalid_id_min(self): - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.update_share_group, - 'invalid_share_group_id', - name='new_name', - description='new_description', - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_delete_sg_with_invalid_id_min(self): - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.delete_share_group, - "invalid_share_group_id", - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_delete_sg_without_passing_sg_id_min(self): - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.delete_share_group, - '', version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_delete_sg_in_use_by_sg_snapshot_min(self): - self.assertRaises( - lib_exc.Conflict, - self.shares_v2_client.delete_share_group, - self.share_group['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_delete_share_in_use_by_sg_snapshot_min(self): - params = {'share_group_id': self.share['share_group_id']} - self.assertRaises( - lib_exc.Forbidden, - self.shares_v2_client.delete_share, - self.share['id'], - params=params, - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_delete_sg_containing_a_share_min(self): - self.assertRaises( - lib_exc.Conflict, - self.shares_v2_client.delete_share_group, - self.share_group['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - # Verify share group is not put into error state from conflict - sg = self.shares_v2_client.get_share_group( - self.share_group['id'], - version=constants.MIN_SHARE_GROUP_MICROVERSION) - self.assertEqual('available', sg['status']) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_filter_shares_on_invalid_group_id_min(self): - shares = self.shares_v2_client.list_shares( - detailed=True, - params={'share_group_id': 'foobar'}, - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - self.assertEqual(0, len(shares), 'Incorrect number of shares returned') - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_filter_shares_on_nonexistent_group_id_min(self): - shares = self.shares_v2_client.list_shares( - detailed=True, - params={'share_group_id': self.share['id']}, - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - self.assertEqual(0, len(shares), 'Incorrect number of shares returned') - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_filter_shares_on_empty_share_group_id_min(self): - share_group = self.create_share_group( - name='tempest_sg', - description='tempest_sg_desc', - cleanup_in_class=False, - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - shares = self.shares_v2_client.list_shares( - detailed=True, - params={'share_group_id': share_group['id']}, - version=constants.MIN_SHARE_GROUP_MICROVERSION, - ) - self.assertEqual(0, len(shares), 'Incorrect number of shares returned') - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_create_sg_with_nonexistent_az_min(self): - self.assertRaises( - lib_exc.NotFound, - self.shares_v2_client.create_share_group, - name='tempest_sg', - description='tempest_sg_desc', - availability_zone='fake_nonexistent_az', - version=constants.MIN_SHARE_GROUP_MICROVERSION) - - @base.skip_if_microversion_lt("2.34") - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - def test_create_sg_and_share_with_different_azs(self): - azs = self.shares_v2_client.list_availability_zones() - - if len(azs) < 2: - raise self.skipException( - 'Test requires presence of at least 2 availability zones.') - else: - share_group = self.shares_v2_client.get_share_group( - self.share_group['id'], '2.34') - different_az = [ - az['name'] - for az in azs - if az['name'] != share_group['availability_zone'] - ][0] - - self.assertRaises( - lib_exc.BadRequest, - self.create_share, - share_group_id=self.share_group['id'], - availability_zone=different_az, - version='2.34') diff --git a/manila_tempest_tests/tests/api/test_share_networks.py b/manila_tempest_tests/tests/api/test_share_networks.py deleted file mode 100644 index 4a44c12240..0000000000 --- a/manila_tempest_tests/tests/api/test_share_networks.py +++ /dev/null @@ -1,298 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base -from manila_tempest_tests import utils - -CONF = config.CONF - - -class ShareNetworkListMixin(object): - - @tc.attr("gate", "smoke", ) - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_list_share_networks(self): - listed = self.shares_client.list_share_networks() - any(self.sn_with_ldap_ss["id"] in sn["id"] for sn in listed) - - # verify keys - keys = ["name", "id"] - [self.assertIn(key, sn.keys()) for sn in listed for key in keys] - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_try_list_share_networks_all_tenants(self): - listed = self.shares_client.list_share_networks_with_detail( - params={'all_tenants': 1}) - any(self.sn_with_ldap_ss["id"] in sn["id"] for sn in listed) - - # verify keys - keys = ["name", "id"] - [self.assertIn(key, sn.keys()) for sn in listed for key in keys] - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_try_list_share_networks_project_id(self): - listed = self.shares_client.list_share_networks_with_detail( - params={'project_id': 'some_project'}) - any(self.sn_with_ldap_ss["id"] in sn["id"] for sn in listed) - - # verify keys - keys = ["name", "id"] - [self.assertIn(key, sn.keys()) for sn in listed for key in keys] - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_list_share_networks_with_detail(self): - listed = self.shares_v2_client.list_share_networks_with_detail() - any(self.sn_with_ldap_ss["id"] in sn["id"] for sn in listed) - - # verify keys - keys = [ - "name", "id", "description", "network_type", - "project_id", "cidr", "ip_version", - "neutron_net_id", "neutron_subnet_id", - "created_at", "updated_at", "segmentation_id", - ] - - # In v2.18 and beyond, we expect gateway. - if utils.is_microversion_supported('2.18'): - keys.append('gateway') - - # In v2.20 and beyond, we expect mtu. - if utils.is_microversion_supported('2.20'): - keys.append('mtu') - - [self.assertIn(key, sn.keys()) for sn in listed for key in keys] - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_list_share_networks_filter_by_ss(self): - listed = self.shares_client.list_share_networks_with_detail( - {'security_service_id': self.ss_ldap['id']}) - self.assertTrue(any(self.sn_with_ldap_ss['id'] == sn['id'] - for sn in listed)) - for sn in listed: - ss_list = self.shares_client.list_sec_services_for_share_network( - sn['id']) - self.assertTrue(any(ss['id'] == self.ss_ldap['id'] - for ss in ss_list)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - @base.skip_if_microversion_lt("2.36") - def test_list_share_networks_like_filter(self): - valid_filter_opts = { - 'name': 'sn_with_ldap_ss', - 'description': 'fake', - } - - listed = self.shares_v2_client.list_share_networks_with_detail( - {'name~': 'ldap_ss', 'description~': 'fa'}) - self.assertTrue(any(self.sn_with_ldap_ss['id'] == sn['id'] - for sn in listed)) - for sn in listed: - self.assertTrue(all(value in sn[key] for key, value in - valid_filter_opts.items())) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_list_share_networks_all_filter_opts(self): - valid_filter_opts = { - 'created_before': '2002-10-10', - 'created_since': '2001-01-01', - 'neutron_net_id': '1111', - 'neutron_subnet_id': '2222', - 'network_type': 'vlan', - 'segmentation_id': 1000, - 'cidr': '10.0.0.0/24', - 'ip_version': 4, - 'name': 'sn_with_ldap_ss' - } - - listed = self.shares_client.list_share_networks_with_detail( - valid_filter_opts) - self.assertTrue(any(self.sn_with_ldap_ss['id'] == sn['id'] - for sn in listed)) - created_before = valid_filter_opts.pop('created_before') - created_since = valid_filter_opts.pop('created_since') - for sn in listed: - self.assertTrue(all(sn[key] == value for key, value in - valid_filter_opts.items())) - self.assertLessEqual(sn['created_at'], created_before) - self.assertGreaterEqual(sn['created_at'], created_since) - - -class ShareNetworksTest(base.BaseSharesTest, ShareNetworkListMixin): - - @classmethod - def resource_setup(cls): - super(ShareNetworksTest, cls).resource_setup() - ss_data = cls.generate_security_service_data() - cls.ss_ldap = cls.create_security_service(**ss_data) - - cls.data_sn_with_ldap_ss = { - 'name': 'sn_with_ldap_ss', - 'neutron_net_id': '1111', - 'neutron_subnet_id': '2222', - 'created_at': '2002-02-02', - 'updated_at': None, - 'network_type': 'vlan', - 'segmentation_id': 1000, - 'cidr': '10.0.0.0/24', - 'ip_version': 4, - 'description': 'fake description', - } - cls.sn_with_ldap_ss = cls.create_share_network( - cleanup_in_class=True, - **cls.data_sn_with_ldap_ss) - - cls.shares_client.add_sec_service_to_share_network( - cls.sn_with_ldap_ss["id"], - cls.ss_ldap["id"]) - - cls.data_sn_with_kerberos_ss = { - 'name': 'sn_with_kerberos_ss', - 'created_at': '2003-03-03', - 'updated_at': None, - 'neutron_net_id': 'test net id', - 'neutron_subnet_id': 'test subnet id', - 'network_type': 'local', - 'segmentation_id': 2000, - 'cidr': '10.0.0.0/13', - 'ip_version': 6, - 'description': 'fake description', - } - - cls.ss_kerberos = cls.create_security_service( - ss_type='kerberos', - **cls.data_sn_with_ldap_ss) - - cls.sn_with_kerberos_ss = cls.create_share_network( - cleanup_in_class=True, - **cls.data_sn_with_kerberos_ss) - - cls.shares_client.add_sec_service_to_share_network( - cls.sn_with_kerberos_ss["id"], - cls.ss_kerberos["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_create_delete_share_network(self): - # generate data for share network - data = self.generate_share_network_data() - - # create share network - created = self.shares_client.create_share_network(**data) - self.assertDictContainsSubset(data, created) - - # Delete share_network - self.shares_client.delete_share_network(created["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_get_share_network(self): - get = self.shares_client.get_share_network(self.sn_with_ldap_ss["id"]) - self.assertEqual('2002-02-02T00:00:00.000000', get['created_at']) - data = self.data_sn_with_ldap_ss.copy() - del data['created_at'] - self.assertDictContainsSubset(data, get) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_update_share_network(self): - update_data = self.generate_share_network_data() - updated = self.shares_client.update_share_network( - self.sn_with_ldap_ss["id"], - **update_data) - self.assertDictContainsSubset(update_data, updated) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipIf( - not CONF.share.multitenancy_enabled, "Only for multitenancy.") - def test_update_valid_keys_sh_server_exists(self): - self.create_share(cleanup_in_class=False) - update_dict = { - "name": "new_name", - "description": "new_description", - } - updated = self.shares_client.update_share_network( - self.shares_client.share_network_id, **update_dict) - self.assertDictContainsSubset(update_dict, updated) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_recreate_share_network(self): - # generate data for share network - data = self.generate_share_network_data() - - # create share network - sn1 = self.shares_client.create_share_network(**data) - self.assertDictContainsSubset(data, sn1) - - # Delete first share network - self.shares_client.delete_share_network(sn1["id"]) - - # create second share network with same data - sn2 = self.shares_client.create_share_network(**data) - self.assertDictContainsSubset(data, sn2) - - # Delete second share network - self.shares_client.delete_share_network(sn2["id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API) - def test_create_two_share_networks_with_same_net_and_subnet(self): - # generate data for share network - data = self.generate_share_network_data() - - # create first share network - sn1 = self.create_share_network(**data) - self.assertDictContainsSubset(data, sn1) - - # create second share network - sn2 = self.create_share_network(**data) - self.assertDictContainsSubset(data, sn2) - - @testtools.skipUnless(CONF.share.create_networks_when_multitenancy_enabled, - "Only for setups with network creation.") - @testtools.skipUnless(CONF.share.multitenancy_enabled, - "Only for multitenancy.") - @testtools.skipUnless(CONF.service_available.neutron, "Only with neutron.") - @base.skip_if_microversion_lt("2.18") - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_gateway_with_neutron(self): - subnet_client = self.subnets_client - - self.create_share(cleanup_in_class=False) - share_net_details = self.shares_v2_client.get_share_network( - self.shares_v2_client.share_network_id) - subnet_details = subnet_client.show_subnet( - share_net_details['neutron_subnet_id']) - - self.assertEqual(subnet_details['subnet']['gateway_ip'], - share_net_details['gateway']) - - @testtools.skipUnless(CONF.share.create_networks_when_multitenancy_enabled, - "Only for setups with network creation.") - @testtools.skipUnless(CONF.share.multitenancy_enabled, - "Only for multitenancy.") - @testtools.skipUnless(CONF.service_available.neutron, "Only with neutron.") - @base.skip_if_microversion_lt("2.20") - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_mtu_with_neutron(self): - network_client = self.networks_client - - self.create_share(cleanup_in_class=False) - share_net_details = self.shares_v2_client.get_share_network( - self.shares_v2_client.share_network_id) - network_details = network_client.show_network( - share_net_details['neutron_net_id']) - - self.assertEqual(network_details['network']['mtu'], - share_net_details['mtu']) diff --git a/manila_tempest_tests/tests/api/test_share_networks_negative.py b/manila_tempest_tests/tests/api/test_share_networks_negative.py deleted file mode 100644 index 02bbdb9a45..0000000000 --- a/manila_tempest_tests/tests/api/test_share_networks_negative.py +++ /dev/null @@ -1,134 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -class ShareNetworksNegativeTest(base.BaseSharesTest): - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_get_share_network_without_id(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.get_share_network, "") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_get_share_network_with_wrong_id(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.get_share_network, "wrong_id") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_delete_share_network_without_id(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.delete_share_network, "") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_delete_share_network_with_wrong_type(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.delete_share_network, "wrong_id") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_update_nonexistant_share_network(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.update_share_network, - "wrong_id", name="name") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_update_share_network_with_empty_id(self): - self.assertRaises(lib_exc.NotFound, - self.shares_client.update_share_network, - "", name="name") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipIf( - not CONF.share.multitenancy_enabled, "Only for multitenancy.") - def test_try_update_invalid_keys_sh_server_exists(self): - self.create_share(cleanup_in_class=False) - - self.assertRaises(lib_exc.Forbidden, - self.shares_client.update_share_network, - self.shares_client.share_network_id, - neutron_net_id="new_net_id") - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_get_deleted_share_network(self): - data = self.generate_share_network_data() - sn = self.create_share_network(**data) - self.assertDictContainsSubset(data, sn) - - self.shares_client.delete_share_network(sn["id"]) - - # try get deleted share network entity - self.assertRaises(lib_exc.NotFound, - self.shares_client.get_security_service, - sn["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_list_share_networks_wrong_created_since_value(self): - self.assertRaises( - lib_exc.BadRequest, - self.shares_client.list_share_networks_with_detail, - params={'created_since': '2014-10-23T08:31:58.000000'}) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_list_share_networks_wrong_created_before_value(self): - self.assertRaises( - lib_exc.BadRequest, - self.shares_client.list_share_networks_with_detail, - params={'created_before': '2014-10-23T08:31:58.000000'}) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipIf(not CONF.share.multitenancy_enabled, - 'Can run only with drivers that do handle share servers ' - 'creation. Skipping.') - @testtools.skipIf(CONF.share.share_network_id != "", - "This test is not suitable for pre-existing " - "share_network.") - def test_try_delete_share_network_with_existing_shares(self): - # Get valid network data for successful share creation - share_network = self.shares_client.get_share_network( - self.shares_client.share_network_id) - new_sn = self.create_share_network( - neutron_net_id=share_network['neutron_net_id'], - neutron_subnet_id=share_network['neutron_subnet_id'], - cleanup_in_class=False) - - # Create share with share network - self.create_share( - share_network_id=new_sn['id'], cleanup_in_class=False) - - # Try delete share network - self.assertRaises( - lib_exc.Conflict, - self.shares_client.delete_share_network, new_sn['id']) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_not_supported("2.35") - def test_list_shares_with_like_filter_not_exist(self): - filters = { - 'name~': 'fake_not_exist', - 'description~': 'fake_not_exist', - } - share_networks = ( - self.shares_v2_client.list_share_networks_with_detail( - params=filters)) - - self.assertEqual(0, len(share_networks)) diff --git a/manila_tempest_tests/tests/api/test_share_types_negative.py b/manila_tempest_tests/tests/api/test_share_types_negative.py deleted file mode 100644 index 8a243256e0..0000000000 --- a/manila_tempest_tests/tests/api/test_share_types_negative.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2014 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest.lib.common.utils import data_utils -from tempest.lib import exceptions as lib_exc -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - - -class ShareTypesNegativeTest(base.BaseSharesMixedTest): - - @classmethod - def _create_share_type(cls): - name = data_utils.rand_name("unique_st_name") - extra_specs = cls.add_extra_specs_to_dict() - return cls.create_share_type( - name, extra_specs=extra_specs, - client=cls.admin_client) - - @classmethod - def resource_setup(cls): - super(ShareTypesNegativeTest, cls).resource_setup() - cls.admin_client = cls.admin_shares_v2_client - cls.st = cls._create_share_type() - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_create_share_type_with_user(self): - self.assertRaises(lib_exc.Forbidden, - self.create_share_type, - data_utils.rand_name("used_user_creds"), - client=self.shares_client) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_delete_share_type_with_user(self): - self.assertRaises(lib_exc.Forbidden, - self.shares_client.delete_share_type, - self.st["share_type"]["id"]) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_add_access_to_share_type_with_user(self): - self.assertRaises(lib_exc.Forbidden, - self.shares_client.add_access_to_share_type, - self.st['share_type']['id'], - self.shares_client.tenant_id) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API) - def test_try_remove_access_from_share_type_with_user(self): - self.assertRaises(lib_exc.Forbidden, - self.shares_client.remove_access_from_share_type, - self.st['share_type']['id'], - self.shares_client.tenant_id) diff --git a/manila_tempest_tests/tests/api/test_shares.py b/manila_tempest_tests/tests/api/test_shares.py deleted file mode 100644 index 2543cf5f3e..0000000000 --- a/manila_tempest_tests/tests/api/test_shares.py +++ /dev/null @@ -1,236 +0,0 @@ -# Copyright 2014 mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from tempest import config -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base -from manila_tempest_tests import utils - -CONF = config.CONF - - -class SharesNFSTest(base.BaseSharesTest): - """Covers share functionality, that is related to NFS share type.""" - protocol = "nfs" - - @classmethod - def resource_setup(cls): - super(SharesNFSTest, cls).resource_setup() - if cls.protocol not in CONF.share.enable_protocols: - message = "%s tests are disabled" % cls.protocol - raise cls.skipException(message) - cls.share = cls.create_share(cls.protocol) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - def test_create_get_delete_share(self): - - share = self.create_share(self.protocol) - detailed_elements = {'name', 'id', 'availability_zone', - 'description', 'project_id', - 'created_at', 'share_proto', 'metadata', - 'size', 'snapshot_id', 'share_network_id', - 'status', 'share_type', 'volume_type', 'links', - 'is_public'} - msg = ( - "At least one expected element missing from share " - "response. Expected %(expected)s, got %(actual)s." % { - "expected": detailed_elements, - "actual": share.keys(), - } - ) - self.assertTrue(detailed_elements.issubset(share.keys()), msg) - self.assertFalse(share['is_public']) - - # The 'status' of the share returned by the create API must be - # set and have value either 'creating' or - # 'available' (if share creation is really fast as in - # case of Dummy driver). - self.assertIn(share['status'], ('creating', 'available')) - - # Get share using v 2.1 - we expect key 'snapshot_support' to be absent - share_get = self.shares_v2_client.get_share(share['id'], version='2.1') - detailed_elements.add('export_location') - self.assertTrue(detailed_elements.issubset(share_get.keys()), msg) - - # Get share using v 2.2 - we expect key 'snapshot_support' to exist - share_get = self.shares_v2_client.get_share(share['id'], version='2.2') - detailed_elements.add('snapshot_support') - self.assertTrue(detailed_elements.issubset(share_get.keys()), msg) - - if utils.is_microversion_supported('2.9'): - # Get share using v 2.9 - key 'export_location' is expected - # to be absent - share_get = self.shares_v2_client.get_share( - share['id'], version='2.9') - detailed_elements.remove('export_location') - self.assertTrue(detailed_elements.issubset(share_get.keys()), msg) - - # In v 2.11 and beyond, we expect key 'replication_type' in the - # share data returned by the share create API. - if utils.is_microversion_supported('2.11'): - detailed_elements.add('replication_type') - self.assertTrue(detailed_elements.issubset(share.keys()), msg) - - # In v 2.16 and beyond, we add user_id in show/create/manage - # share echo. - if utils.is_microversion_supported('2.16'): - detailed_elements.add('user_id') - self.assertTrue(detailed_elements.issubset(share.keys()), msg) - - # In v 2.24 and beyond, we add create_share_from_snapshot_support in - # show/create/manage share echo. - if utils.is_microversion_supported('2.24'): - detailed_elements.add('create_share_from_snapshot_support') - self.assertTrue(detailed_elements.issubset(share.keys()), msg) - - # Delete share - self.shares_v2_client.delete_share(share['id']) - self.shares_v2_client.wait_for_resource_deletion(share_id=share['id']) - self.assertRaises(lib_exc.NotFound, - self.shares_v2_client.get_share, - share['id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_create_delete_snapshot(self): - - # create snapshot - snap = self.create_snapshot_wait_for_active(self.share["id"]) - - detailed_elements = {'name', 'id', 'description', - 'created_at', 'share_proto', 'size', 'share_size', - 'share_id', 'status', 'links'} - msg = ( - "At least one expected element missing from share " - "response. Expected %(expected)s, got %(actual)s." % { - "expected": detailed_elements, - "actual": snap.keys(), - } - ) - self.assertTrue(detailed_elements.issubset(snap.keys()), msg) - - # In v2.17 and beyond, we expect user_id and project_id keys - if utils.is_microversion_supported('2.17'): - detailed_elements.update({'user_id', 'project_id'}) - self.assertTrue(detailed_elements.issubset(snap.keys()), msg) - else: - self.assertNotIn('user_id', detailed_elements) - self.assertNotIn('project_id', detailed_elements) - - # delete snapshot - self.shares_client.delete_snapshot(snap["id"]) - self.shares_client.wait_for_resource_deletion(snapshot_id=snap["id"]) - self.assertRaises(lib_exc.NotFound, - self.shares_client.get_snapshot, snap['id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - @testtools.skipUnless( - CONF.share.capability_create_share_from_snapshot_support, - "Create share from snapshot tests are disabled.") - def test_create_share_from_snapshot(self): - # If multitenant driver used, share_network will be provided by default - - # create snapshot - snap = self.create_snapshot_wait_for_active( - self.share["id"], cleanup_in_class=False) - - # create share from snapshot - s2 = self.create_share( - self.protocol, snapshot_id=snap["id"], cleanup_in_class=False) - - # The 'status' of the share returned by the create API must be - # set and have value either 'creating' or - # 'available' (if share creation is really fast as in - # case of Dummy driver). - self.assertIn(s2['status'], ('creating', 'available')) - - # verify share, created from snapshot - get = self.shares_client.get_share(s2["id"]) - msg = ("Expected snapshot_id %s as " - "source of share %s" % (snap["id"], get["snapshot_id"])) - self.assertEqual(get["snapshot_id"], snap["id"], msg) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipIf(not CONF.share.multitenancy_enabled, - "Only for multitenancy.") - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - @testtools.skipUnless( - CONF.share.capability_create_share_from_snapshot_support, - "Create share from snapshot tests are disabled.") - def test_create_share_from_snapshot_share_network_not_provided(self): - # We expect usage of share network from parent's share - # when creating share from snapshot using a driver that supports - # multi-tenancy. - - # get parent share - parent = self.shares_client.get_share(self.share["id"]) - - # create snapshot - snap = self.create_snapshot_wait_for_active( - self.share["id"], cleanup_in_class=False) - - # create share from snapshot - child = self.create_share( - self.protocol, snapshot_id=snap["id"], cleanup_in_class=False) - - # The 'status' of the share returned by the create API must be - # set and have value either 'creating' or - # 'available' (if share creation is really fast as in - # case of Dummy driver). - self.assertIn(child['status'], ('creating', 'available')) - - # verify share, created from snapshot - get = self.shares_client.get_share(child["id"]) - keys = { - "share": self.share["id"], - "actual_sn": get["share_network_id"], - "expected_sn": parent["share_network_id"], - } - msg = ("Expected share_network_id %(expected_sn)s for " - "share %(share)s, but %(actual_sn)s found." % keys) - self.assertEqual( - get["share_network_id"], parent["share_network_id"], msg) - - -class SharesCIFSTest(SharesNFSTest): - """Covers share functionality, that is related to CIFS share type.""" - protocol = "cifs" - - -class SharesGLUSTERFSTest(SharesNFSTest): - """Covers share functionality that is related to GLUSTERFS share type.""" - protocol = "glusterfs" - - -class SharesHDFSTest(SharesNFSTest): - """Covers share functionality that is related to HDFS share type.""" - protocol = "hdfs" - - -class SharesCephFSTest(SharesNFSTest): - """Covers share functionality that is related to CEPHFS share type.""" - protocol = "cephfs" - - -class SharesMapRFSTest(SharesNFSTest): - """Covers share functionality that is related to MapRFS share type.""" - protocol = "maprfs" diff --git a/manila_tempest_tests/tests/api/test_shares_actions.py b/manila_tempest_tests/tests/api/test_shares_actions.py deleted file mode 100644 index 7d28d8372e..0000000000 --- a/manila_tempest_tests/tests/api/test_shares_actions.py +++ /dev/null @@ -1,723 +0,0 @@ -# Copyright 2014 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -import six -from tempest import config -from tempest.lib.common.utils import data_utils -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.common import constants -from manila_tempest_tests.tests.api import base -from manila_tempest_tests import utils - -CONF = config.CONF -LATEST_MICROVERSION = CONF.share.max_api_microversion - - -@ddt.ddt -class SharesActionsTest(base.BaseSharesTest): - """Covers share functionality, that doesn't related to share type.""" - - @classmethod - def resource_setup(cls): - super(SharesActionsTest, cls).resource_setup() - - cls.shares = [] - - # create share - cls.share_name = data_utils.rand_name("tempest-share-name") - cls.share_desc = data_utils.rand_name("tempest-share-description") - cls.metadata = { - 'foo_key_share_1': 'foo_value_share_1', - 'bar_key_share_1': 'foo_value_share_1', - } - cls.shares.append(cls.create_share( - name=cls.share_name, - description=cls.share_desc, - metadata=cls.metadata, - )) - - if CONF.share.run_snapshot_tests: - # create snapshot - cls.snap_name = data_utils.rand_name("tempest-snapshot-name") - cls.snap_desc = data_utils.rand_name( - "tempest-snapshot-description") - cls.snap = cls.create_snapshot_wait_for_active( - cls.shares[0]["id"], cls.snap_name, cls.snap_desc) - - if CONF.share.capability_create_share_from_snapshot_support: - - # create second share from snapshot for purposes of sorting and - # snapshot filtering - cls.share_name2 = data_utils.rand_name("tempest-share-name") - cls.share_desc2 = data_utils.rand_name( - "tempest-share-description") - cls.metadata2 = { - 'foo_key_share_2': 'foo_value_share_2', - 'bar_key_share_2': 'foo_value_share_2', - } - cls.shares.append(cls.create_share( - name=cls.share_name2, - description=cls.share_desc2, - metadata=cls.metadata2, - snapshot_id=cls.snap['id'], - )) - - def _get_share(self, version): - - # get share - share = self.shares_v2_client.get_share( - self.shares[0]['id'], version=six.text_type(version)) - - # verify keys - expected_keys = [ - "status", "description", "links", "availability_zone", - "created_at", "project_id", "volume_type", "share_proto", "name", - "snapshot_id", "id", "size", "share_network_id", "metadata", - "snapshot_id", "is_public", - ] - if utils.is_microversion_lt(version, '2.9'): - expected_keys.extend(["export_location", "export_locations"]) - if utils.is_microversion_ge(version, '2.2'): - expected_keys.append("snapshot_support") - if utils.is_microversion_ge(version, '2.5'): - expected_keys.append("share_type_name") - if utils.is_microversion_ge(version, '2.10'): - expected_keys.append("access_rules_status") - if utils.is_microversion_ge(version, '2.11'): - expected_keys.append("replication_type") - if utils.is_microversion_ge(version, '2.16'): - expected_keys.append("user_id") - if utils.is_microversion_ge(version, '2.24'): - expected_keys.append("create_share_from_snapshot_support") - if utils.is_microversion_ge(version, - constants.REVERT_TO_SNAPSHOT_MICROVERSION): - expected_keys.append("revert_to_snapshot_support") - actual_keys = list(share.keys()) - [self.assertIn(key, actual_keys) for key in expected_keys] - - # verify values - msg = "Expected name: '%s', actual name: '%s'" % (self.share_name, - share["name"]) - self.assertEqual(self.share_name, six.text_type(share["name"]), msg) - - msg = ("Expected description: '%s', " - "actual description: '%s'" % (self.share_desc, - share["description"])) - self.assertEqual( - self.share_desc, six.text_type(share["description"]), msg) - - msg = "Expected size: '%s', actual size: '%s'" % ( - CONF.share.share_size, share["size"]) - self.assertEqual(CONF.share.share_size, int(share["size"]), msg) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_get_share_v2_1(self): - self._get_share('2.1') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_get_share_with_snapshot_support_key(self): - self._get_share('2.2') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.6') - def test_get_share_with_share_type_name_key(self): - self._get_share('2.6') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.9') - def test_get_share_export_locations_removed(self): - self._get_share('2.9') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.10') - def test_get_share_with_access_rules_status(self): - self._get_share('2.10') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.11') - def test_get_share_with_replication_type_key(self): - self._get_share('2.11') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.16') - def test_get_share_with_user_id(self): - self._get_share('2.16') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.24') - def test_get_share_with_create_share_from_snapshot_support(self): - self._get_share('2.24') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported( - constants.REVERT_TO_SNAPSHOT_MICROVERSION) - def test_get_share_with_revert_to_snapshot_support(self): - self._get_share(constants.REVERT_TO_SNAPSHOT_MICROVERSION) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_shares(self): - - # list shares - shares = self.shares_v2_client.list_shares() - - # verify keys - keys = ["name", "id", "links"] - [self.assertIn(key, sh.keys()) for sh in shares for key in keys] - - # our share id in list and have no duplicates - for share in self.shares: - gen = [sid["id"] for sid in shares if sid["id"] in share["id"]] - msg = "expected id lists %s times in share list" % (len(gen)) - self.assertEqual(1, len(gen), msg) - - def _list_shares_with_detail(self, version): - - # list shares - shares = self.shares_v2_client.list_shares_with_detail( - version=six.text_type(version)) - - # verify keys - keys = [ - "status", "description", "links", "availability_zone", - "created_at", "project_id", "volume_type", "share_proto", "name", - "snapshot_id", "id", "size", "share_network_id", "metadata", - "snapshot_id", "is_public", "share_type", - ] - if utils.is_microversion_lt(version, '2.9'): - keys.extend(["export_location", "export_locations"]) - if utils.is_microversion_ge(version, '2.2'): - keys.append("snapshot_support") - if utils.is_microversion_ge(version, '2.6'): - keys.append("share_type_name") - if utils.is_microversion_ge(version, '2.10'): - keys.append("access_rules_status") - if utils.is_microversion_ge(version, '2.11'): - keys.append("replication_type") - if utils.is_microversion_ge(version, '2.16'): - keys.append("user_id") - if utils.is_microversion_ge(version, '2.24'): - keys.append("create_share_from_snapshot_support") - if utils.is_microversion_ge(version, - constants.REVERT_TO_SNAPSHOT_MICROVERSION): - keys.append("revert_to_snapshot_support") - [self.assertIn(key, sh.keys()) for sh in shares for key in keys] - - # our shares in list and have no duplicates - for share in self.shares: - gen = [sid["id"] for sid in shares if sid["id"] in share["id"]] - msg = "expected id lists %s times in share list" % (len(gen)) - self.assertEqual(1, len(gen), msg) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_shares_with_detail_v2_1(self): - self._list_shares_with_detail('2.1') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_shares_with_detail_and_snapshot_support_key(self): - self._list_shares_with_detail('2.2') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.6') - def test_list_shares_with_detail_share_type_name_key(self): - self._list_shares_with_detail('2.6') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.9') - def test_list_shares_with_detail_export_locations_removed(self): - self._list_shares_with_detail('2.9') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.10') - def test_list_shares_with_detail_with_access_rules_status(self): - self._list_shares_with_detail('2.10') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.11') - def test_list_shares_with_detail_replication_type_key(self): - self._list_shares_with_detail('2.11') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported('2.16') - def test_list_shares_with_user_id(self): - self._list_shares_with_detail('2.16') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_shares_with_detail_and_create_share_from_snapshot_support( - self): - self._list_shares_with_detail('2.24') - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @utils.skip_if_microversion_not_supported( - constants.REVERT_TO_SNAPSHOT_MICROVERSION) - def test_list_shares_with_detail_with_revert_to_snapshot_support(self): - self._list_shares_with_detail( - constants.REVERT_TO_SNAPSHOT_MICROVERSION) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_shares_with_detail_filter_by_metadata(self): - filters = {'metadata': self.metadata} - - # list shares - shares = self.shares_client.list_shares_with_detail(params=filters) - - # verify response - self.assertGreater(len(shares), 0) - for share in shares: - self.assertDictContainsSubset( - filters['metadata'], share['metadata']) - if CONF.share.capability_create_share_from_snapshot_support: - self.assertFalse(self.shares[1]['id'] in [s['id'] for s in shares]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipIf( - not CONF.share.multitenancy_enabled, "Only for multitenancy.") - def test_list_shares_with_detail_filter_by_share_network_id(self): - base_share = self.shares_client.get_share(self.shares[0]['id']) - filters = {'share_network_id': base_share['share_network_id']} - - # list shares - shares = self.shares_client.list_shares_with_detail(params=filters) - - # verify response - self.assertGreater(len(shares), 0) - for share in shares: - self.assertEqual( - filters['share_network_id'], share['share_network_id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - @testtools.skipUnless( - CONF.share.capability_create_share_from_snapshot_support, - "Create share from snapshot tests are disabled.") - def test_list_shares_with_detail_filter_by_snapshot_id(self): - filters = {'snapshot_id': self.snap['id']} - - # list shares - shares = self.shares_client.list_shares_with_detail(params=filters) - - # verify response - self.assertGreater(len(shares), 0) - for share in shares: - self.assertEqual(filters['snapshot_id'], share['snapshot_id']) - self.assertFalse(self.shares[0]['id'] in [s['id'] for s in shares]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_shares_with_detail_with_asc_sorting(self): - filters = {'sort_key': 'created_at', 'sort_dir': 'asc'} - - # list shares - shares = self.shares_client.list_shares_with_detail(params=filters) - - # verify response - self.assertGreater(len(shares), 0) - sorted_list = [share['created_at'] for share in shares] - self.assertEqual(sorted(sorted_list), sorted_list) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_shares_with_detail_filter_by_existed_name(self): - # list shares by name, at least one share is expected - params = {"name": self.share_name} - shares = self.shares_client.list_shares_with_detail(params) - self.assertEqual(self.share_name, shares[0]["name"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.36") - def test_list_shares_with_detail_filter_by_existed_description(self): - # list shares by description, at least one share is expected - params = {"description": self.share_desc} - shares = self.shares_v2_client.list_shares_with_detail(params) - self.assertEqual(self.share_name, shares[0]["name"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.36") - def test_list_shares_with_detail_filter_by_inexact_name(self): - # list shares by name, at least one share is expected - params = {"name~": 'tempest-share'} - shares = self.shares_v2_client.list_shares_with_detail(params) - for share in shares: - self.assertIn('tempest-share', share["name"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_shares_with_detail_filter_by_fake_name(self): - # list shares by fake name, no shares are expected - params = {"name": data_utils.rand_name("fake-nonexistent-name")} - shares = self.shares_client.list_shares_with_detail(params) - self.assertEqual(0, len(shares)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_shares_with_detail_filter_by_active_status(self): - # list shares by active status, at least one share is expected - params = {"status": "available"} - shares = self.shares_client.list_shares_with_detail(params) - self.assertGreater(len(shares), 0) - for share in shares: - self.assertEqual(params["status"], share["status"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_shares_with_detail_filter_by_fake_status(self): - # list shares by fake status, no shares are expected - params = {"status": 'fake'} - shares = self.shares_client.list_shares_with_detail(params) - self.assertEqual(0, len(shares)) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_shares_with_detail_filter_by_all_tenants(self): - # non-admin user can get shares only from his project - params = {"all_tenants": 1} - shares = self.shares_client.list_shares_with_detail(params) - self.assertGreater(len(shares), 0) - - # get share with detailed info, we need its 'project_id' - share = self.shares_client.get_share(self.shares[0]["id"]) - project_id = share["project_id"] - for share in shares: - self.assertEqual(project_id, share["project_id"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_lt("2.42") - def test_list_shares_with_detail_with_count(self): - # list shares by name, at least one share is expected - params = {"with_count": 'true'} - shares = self.shares_v2_client.list_shares_with_detail(params) - self.assertGreater(shares["count"], 0) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_list_shares_public_with_detail(self): - public_share = self.create_share( - name='public_share', - description='public_share_desc', - is_public=True, - cleanup_in_class=False - ) - private_share = self.create_share( - name='private_share', - description='private_share_desc', - is_public=False, - cleanup_in_class=False - ) - - params = {"is_public": True} - isolated_client = self.get_client_with_isolated_creds( - type_of_creds='alt') - shares = isolated_client.list_shares_with_detail(params) - - keys = [ - "status", "description", "links", "availability_zone", - "created_at", "export_location", "share_proto", - "name", "snapshot_id", "id", "size", "project_id", "is_public", - ] - [self.assertIn(key, sh.keys()) for sh in shares for key in keys] - - gen = [sid["id"] for sid in shares if sid["id"] == public_share["id"]] - msg = "expected id lists %s times in share list" % (len(gen)) - self.assertEqual(1, len(gen), msg) - - self.assertFalse(any([s["id"] == private_share["id"] for s in shares])) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - @ddt.data(None, '2.16', LATEST_MICROVERSION) - def test_get_snapshot(self, version): - - # get snapshot - if version is None: - snapshot = self.shares_client.get_snapshot(self.snap["id"]) - else: - utils.skip_if_microversion_not_supported(version) - snapshot = self.shares_v2_client.get_snapshot( - self.snap["id"], version=version) - - # verify keys - expected_keys = ["status", "links", "share_id", "name", - "share_proto", "created_at", - "description", "id", "share_size", "size"] - if version and utils.is_microversion_ge(version, '2.17'): - expected_keys.extend(["user_id", "project_id"]) - actual_keys = snapshot.keys() - - # strict key check - self.assertEqual(set(expected_keys), set(actual_keys)) - - # verify data - msg = "Expected name: '%s', actual name: '%s'" % (self.snap_name, - snapshot["name"]) - self.assertEqual(self.snap_name, snapshot["name"], msg) - - msg = ("Expected description: '%s' actual description: '%s'" % - (self.snap_desc, snapshot["description"])) - self.assertEqual(self.snap_desc, snapshot["description"], msg) - - msg = ("Expected share_id: '%s', actual share_id: '%s'" % - (self.shares[0]["id"], snapshot["share_id"])) - self.assertEqual(self.shares[0]["id"], snapshot["share_id"], msg) - - # Verify that the user_id and project_id are same as the one for - # the base share - if version and utils.is_microversion_ge(version, '2.17'): - msg = ("Expected %(key)s in snapshot: '%(expected)s', " - "actual %(key)s in snapshot: '%(actual)s'") - self.assertEqual(self.shares[0]['user_id'], - snapshot['user_id'], - msg % { - 'expected': self.shares[0]['user_id'], - 'actual': snapshot['user_id'], - 'key': 'user_id'}) - self.assertEqual(self.shares[0]['project_id'], - snapshot['project_id'], - msg % { - 'expected': self.shares[0]['project_id'], - 'actual': snapshot['project_id'], - 'key': 'project_id'}) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_list_snapshots(self): - - # list share snapshots - snaps = self.shares_client.list_snapshots() - - # verify keys - keys = ["id", "name", "links"] - [self.assertIn(key, sn.keys()) for sn in snaps for key in keys] - - # our share id in list and have no duplicates - gen = [sid["id"] for sid in snaps if sid["id"] in self.snap["id"]] - msg = "expected id lists %s times in share list" % (len(gen)) - self.assertEqual(1, len(gen), msg) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - @ddt.data(None, '2.16', '2.36', LATEST_MICROVERSION) - def test_list_snapshots_with_detail(self, version): - params = None - if version and utils.is_microversion_ge(version, '2.36'): - params = {'name~': 'tempest', 'description~': 'tempest'} - # list share snapshots - if version is None: - snaps = self.shares_client.list_snapshots_with_detail() - else: - utils.skip_if_microversion_not_supported(version) - snaps = self.shares_v2_client.list_snapshots_with_detail( - version=version, params=params) - - # verify keys - expected_keys = ["status", "links", "share_id", "name", - "share_proto", "created_at", "description", "id", - "share_size", "size"] - if version and utils.is_microversion_ge(version, '2.17'): - expected_keys.extend(["user_id", "project_id"]) - - # strict key check - [self.assertEqual(set(expected_keys), set(s.keys())) for s in snaps] - - # our share id in list and have no duplicates - gen = [sid["id"] for sid in snaps if sid["id"] in self.snap["id"]] - msg = "expected id lists %s times in share list" % (len(gen)) - self.assertEqual(1, len(gen), msg) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_list_snapshots_with_detail_use_limit(self): - for l, o in [('1', '1'), ('0', '1')]: - filters = { - 'limit': l, - 'offset': o, - 'share_id': self.shares[0]['id'], - } - - # list snapshots - snaps = self.shares_client.list_snapshots_with_detail( - params=filters) - - # Our snapshot should not be listed - self.assertEqual(0, len(snaps)) - - # Only our one snapshot should be listed - snaps = self.shares_client.list_snapshots_with_detail( - params={'limit': '1', 'offset': '0', - 'share_id': self.shares[0]['id']}) - - self.assertEqual(1, len(snaps['snapshots'])) - self.assertEqual(self.snap['id'], snaps['snapshots'][0]['id']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_list_snapshots_with_detail_filter_by_status_and_name(self): - filters = {'status': 'available', 'name': self.snap_name} - - # list snapshots - snaps = self.shares_client.list_snapshots_with_detail( - params=filters) - - # verify response - self.assertGreater(len(snaps), 0) - for snap in snaps: - self.assertEqual(filters['status'], snap['status']) - self.assertEqual(filters['name'], snap['name']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - @base.skip_if_microversion_not_supported("2.35") - def test_list_snapshots_with_detail_filter_by_description(self): - filters = {'description': self.snap_desc} - - # list snapshots - snaps = self.shares_client.list_snapshots_with_detail( - params=filters) - - # verify response - self.assertGreater(len(snaps), 0) - for snap in snaps: - self.assertEqual(filters['description'], snap['description']) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_list_snapshots_with_detail_and_asc_sorting(self): - filters = {'sort_key': 'share_id', 'sort_dir': 'asc'} - - # list snapshots - snaps = self.shares_client.list_snapshots_with_detail( - params=filters) - - # verify response - self.assertGreater(len(snaps), 0) - sorted_list = [snap['share_id'] for snap in snaps] - self.assertEqual(sorted(sorted_list), sorted_list) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipUnless( - CONF.share.run_extend_tests, - "Share extend tests are disabled.") - def test_extend_share(self): - share = self.create_share(cleanup_in_class=False) - new_size = int(share['size']) + 1 - - # extend share and wait for active status - self.shares_v2_client.extend_share(share['id'], new_size) - self.shares_client.wait_for_share_status(share['id'], 'available') - - # check state and new size - share_get = self.shares_v2_client.get_share(share['id']) - msg = ( - "Share could not be extended. " - "Expected %(expected)s, got %(actual)s." % { - "expected": new_size, - "actual": share_get['size'], - } - ) - self.assertEqual(new_size, share_get['size'], msg) - - @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND) - @testtools.skipUnless( - CONF.share.run_shrink_tests, - "Share shrink tests are disabled.") - def test_shrink_share(self): - size = CONF.share.share_size + 1 - share = self.create_share(size=size, cleanup_in_class=False) - new_size = int(share['size']) - 1 - - # shrink share and wait for active status - self.shares_v2_client.shrink_share(share['id'], new_size) - self.shares_client.wait_for_share_status(share['id'], 'available') - - # check state and new size - share_get = self.shares_v2_client.get_share(share['id']) - msg = ( - "Share could not be shrunk. " - "Expected %(expected)s, got %(actual)s." % { - "expected": new_size, - "actual": share_get['size'], - } - ) - self.assertEqual(new_size, share_get['size'], msg) - - -class SharesRenameTest(base.BaseSharesTest): - - @classmethod - def resource_setup(cls): - super(SharesRenameTest, cls).resource_setup() - - # create share - cls.share_name = data_utils.rand_name("tempest-share-name") - cls.share_desc = data_utils.rand_name("tempest-share-description") - cls.share = cls.create_share( - name=cls.share_name, description=cls.share_desc) - - if CONF.share.run_snapshot_tests: - # create snapshot - cls.snap_name = data_utils.rand_name("tempest-snapshot-name") - cls.snap_desc = data_utils.rand_name( - "tempest-snapshot-description") - cls.snap = cls.create_snapshot_wait_for_active( - cls.share["id"], cls.snap_name, cls.snap_desc) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - def test_update_share(self): - - # get share - share = self.shares_client.get_share(self.share['id']) - self.assertEqual(self.share_name, share["name"]) - self.assertEqual(self.share_desc, share["description"]) - self.assertFalse(share["is_public"]) - - # update share - new_name = data_utils.rand_name("tempest-new-name") - new_desc = data_utils.rand_name("tempest-new-description") - updated = self.shares_client.update_share( - share["id"], new_name, new_desc, is_public=True) - self.assertEqual(new_name, updated["name"]) - self.assertEqual(new_desc, updated["description"]) - self.assertTrue(updated["is_public"]) - - # get share - share = self.shares_client.get_share(self.share['id']) - self.assertEqual(new_name, share["name"]) - self.assertEqual(new_desc, share["description"]) - self.assertTrue(share["is_public"]) - - @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless(CONF.share.run_snapshot_tests, - "Snapshot tests are disabled.") - def test_rename_snapshot(self): - - # get snapshot - get = self.shares_client.get_snapshot(self.snap["id"]) - self.assertEqual(self.snap_name, get["name"]) - self.assertEqual(self.snap_desc, get["description"]) - - # rename snapshot - new_name = data_utils.rand_name("tempest-new-name-for-snapshot") - new_desc = data_utils.rand_name("tempest-new-description-for-snapshot") - renamed = self.shares_client.rename_snapshot( - self.snap["id"], new_name, new_desc) - self.assertEqual(new_name, renamed["name"]) - self.assertEqual(new_desc, renamed["description"]) - - # get snapshot - get = self.shares_client.get_snapshot(self.snap["id"]) - self.assertEqual(new_name, get["name"]) - self.assertEqual(new_desc, get["description"]) diff --git a/manila_tempest_tests/tests/api/test_shares_actions_negative.py b/manila_tempest_tests/tests/api/test_shares_actions_negative.py deleted file mode 100644 index 6d1fa6c8e5..0000000000 --- a/manila_tempest_tests/tests/api/test_shares_actions_negative.py +++ /dev/null @@ -1,245 +0,0 @@ -# Copyright 2015 Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ddt -from tempest import config -from tempest.lib.common.utils import data_utils -from tempest.lib import exceptions as lib_exc -import testtools -from testtools import testcase as tc - -from manila_tempest_tests.tests.api import base - -CONF = config.CONF - - -@ddt.ddt -class SharesActionsNegativeTest(base.BaseSharesMixedTest): - @classmethod - def resource_setup(cls): - super(SharesActionsNegativeTest, cls).resource_setup() - cls.admin_client = cls.admin_shares_v2_client - cls.share_name = data_utils.rand_name("tempest-share-name") - cls.share_desc = data_utils.rand_name("tempest-share-description") - cls.share = cls.create_share( - name=cls.share_name, - description=cls.share_desc) - if CONF.share.run_snapshot_tests: - # create snapshot - cls.snap_name = data_utils.rand_name("tempest-snapshot-name") - cls.snap_desc = data_utils.rand_name( - "tempest-snapshot-description") - cls.snap = cls.create_snapshot_wait_for_active( - cls.share["id"], cls.snap_name, cls.snap_desc) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless( - CONF.share.run_extend_tests, - "Share extend tests are disabled.") - @testtools.skipUnless( - CONF.share.run_quota_tests, - "Quota tests are disabled.") - def test_share_extend_over_quota(self): - tenant_quotas = self.shares_client.show_quotas( - self.shares_client.tenant_id) - new_size = int(tenant_quotas["gigabytes"]) + 1 - - # extend share with over quota and check result - self.assertRaises(lib_exc.Forbidden, - self.shares_client.extend_share, - self.share['id'], - new_size) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless( - CONF.share.run_extend_tests, - "Share extend tests are disabled.") - def test_share_extend_with_less_size(self): - new_size = int(self.share['size']) - 1 - - # extend share with invalid size and check result - self.assertRaises(lib_exc.BadRequest, - self.shares_client.extend_share, - self.share['id'], - new_size) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless( - CONF.share.run_extend_tests, - "Share extend tests are disabled.") - def test_share_extend_with_same_size(self): - new_size = int(self.share['size']) - - # extend share with invalid size and check result - self.assertRaises(lib_exc.BadRequest, - self.shares_client.extend_share, - self.share['id'], - new_size) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless( - CONF.share.run_extend_tests, - "Share extend tests are disabled.") - def test_share_extend_with_invalid_share_state(self): - share = self.create_share(cleanup_in_class=False) - new_size = int(share['size']) + 1 - - # set "error" state - self.admin_client.reset_state(share['id']) - - # run extend operation on same share and check result - self.assertRaises(lib_exc.BadRequest, - self.shares_client.extend_share, - share['id'], - new_size) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless( - CONF.share.run_shrink_tests, - "Share shrink tests are disabled.") - def test_share_shrink_with_greater_size(self): - new_size = int(self.share['size']) + 1 - - # shrink share with invalid size and check result - self.assertRaises(lib_exc.BadRequest, - self.shares_client.shrink_share, - self.share['id'], - new_size) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless( - CONF.share.run_shrink_tests, - "Share shrink tests are disabled.") - def test_share_shrink_with_same_size(self): - new_size = int(self.share['size']) - - # shrink share with invalid size and check result - self.assertRaises(lib_exc.BadRequest, - self.shares_client.shrink_share, - self.share['id'], - new_size) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @testtools.skipUnless( - CONF.share.run_shrink_tests, - "Share shrink tests are disabled.") - def test_share_shrink_with_invalid_share_state(self): - size = CONF.share.share_size + 1 - share = self.create_share(size=size, cleanup_in_class=False) - new_size = int(share['size']) - 1 - - # set "error" state - self.admin_client.reset_state(share['id']) - - # run shrink operation on same share and check result - self.assertRaises(lib_exc.BadRequest, - self.shares_client.shrink_share, - share['id'], - new_size) - - @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND) - @base.skip_if_microversion_not_supported("2.34") - @ddt.data('path', 'id') - def test_list_shares_with_export_location_and_invalid_version( - self, export_location_type): - # In API versions get_microversion_as_tuple(right) - - -def is_microversion_ge(left, right): - """Is microversion for left is greater than or equal to the right one.""" - return get_microversion_as_tuple(left) >= get_microversion_as_tuple(right) - - -def is_microversion_eq(left, right): - """Is microversion for left is equal to the right one.""" - return get_microversion_as_tuple(left) == get_microversion_as_tuple(right) - - -def is_microversion_ne(left, right): - """Is microversion for left is not equal to the right one.""" - return get_microversion_as_tuple(left) != get_microversion_as_tuple(right) - - -def is_microversion_le(left, right): - """Is microversion for left is less than or equal to the right one.""" - return get_microversion_as_tuple(left) <= get_microversion_as_tuple(right) - - -def is_microversion_lt(left, right): - """Is microversion for left is less than the right one.""" - return get_microversion_as_tuple(left) < get_microversion_as_tuple(right) - - -def is_microversion_supported(microversion): - bottom = get_microversion_as_tuple(CONF.share.min_api_microversion) - microversion = get_microversion_as_tuple(microversion) - top = get_microversion_as_tuple(CONF.share.max_api_microversion) - return bottom <= microversion <= top - - -def skip_if_microversion_not_supported(microversion): - """Decorator for tests that are microversion-specific.""" - if not is_microversion_supported(microversion): - reason = ("Skipped. Test requires microversion '%s'." % microversion) - return testtools.skip(reason) - return lambda f: f - - -def skip_if_microversion_lt(microversion): - """Decorator for tests that are microversion-specific.""" - if is_microversion_lt(CONF.share.max_api_microversion, microversion): - reason = ("Skipped. Test requires microversion greater than or " - "equal to '%s'." % microversion) - return testtools.skip(reason) - return lambda f: f - - -def rand_ip(network=False): - """This uses the TEST-NET-3 range of reserved IP addresses. - - Using this range, which are reserved solely for use in - documentation and example source code, should avoid any potential - conflicts in real-world testing. - """ - test_net_3 = '203.0.113.' - address = test_net_3 + six.text_type(random.randint(0, 255)) - if network: - mask_length = six.text_type(random.randint(24, 32)) - address = '/'.join((address, mask_length)) - ip_network = ip.IPNetwork(address) - return '/'.join((six.text_type(ip_network.network), mask_length)) - return address - - -def rand_ipv6_ip(network=False): - """This uses the IPv6 documentation range of 2001:DB8::/32""" - ran_add = ["%x" % random.randrange(0, 16**4) for i in range(6)] - address = "2001:0DB8:" + ":".join(ran_add) - if network: - mask_length = six.text_type(random.randint(32, 128)) - address = '/'.join((address, mask_length)) - ip_network = ip.IPNetwork(address) - return '/'.join((six.text_type(ip_network.network), mask_length)) - return address - - -def choose_matching_backend(share, pools, share_type): - extra_specs = {} - # fix extra specs with string values instead of boolean - for k, v in share_type['extra_specs'].items(): - extra_specs[k] = (True if six.text_type(v).lower() == 'true' - else False if six.text_type(v).lower() == 'false' - else v) - selected_pool = next( - (x for x in pools if (x['name'] != share['host'] and all( - y in x['capabilities'].items() for y in extra_specs.items()))), - None) - - return selected_pool - - -def get_configured_extra_specs(variation=None): - """Retrieve essential extra specs according to configuration in tempest. - - :param variation: can assume possible values: None to be as configured in - tempest; 'opposite_driver_modes' for as configured in tempest but - inverse driver mode; 'invalid' for inverse as configured in tempest, - ideal for negative tests. - :return: dict containing essential extra specs. - """ - - extra_specs = {'storage_protocol': CONF.share.capability_storage_protocol} - - if variation == 'invalid': - extra_specs['driver_handles_share_servers'] = ( - not CONF.share.multitenancy_enabled) - extra_specs['snapshot_support'] = ( - not CONF.share.capability_snapshot_support) - - elif variation == 'opposite_driver_modes': - extra_specs['driver_handles_share_servers'] = ( - not CONF.share.multitenancy_enabled) - extra_specs['snapshot_support'] = ( - CONF.share.capability_snapshot_support) - - else: - extra_specs['driver_handles_share_servers'] = ( - CONF.share.multitenancy_enabled) - extra_specs['snapshot_support'] = ( - CONF.share.capability_snapshot_support) - - return extra_specs diff --git a/playbooks/legacy/manila-tempest-dsvm-container-scenario-custom-image/run.yaml b/playbooks/legacy/manila-tempest-dsvm-container-scenario-custom-image/run.yaml index 7d5d7978da..08c931eba1 100644 --- a/playbooks/legacy/manila-tempest-dsvm-container-scenario-custom-image/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-container-scenario-custom-image/run.yaml @@ -54,6 +54,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Build custom image if needed diff --git a/playbooks/legacy/manila-tempest-dsvm-generic-no-share-servers/run.yaml b/playbooks/legacy/manila-tempest-dsvm-generic-no-share-servers/run.yaml index 28ec904550..b2be8cc7f7 100644 --- a/playbooks/legacy/manila-tempest-dsvm-generic-no-share-servers/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-generic-no-share-servers/run.yaml @@ -54,6 +54,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Build custom image if needed diff --git a/playbooks/legacy/manila-tempest-dsvm-generic-scenario-custom-image/run.yaml b/playbooks/legacy/manila-tempest-dsvm-generic-scenario-custom-image/run.yaml index daa49cee9a..0f287d214d 100644 --- a/playbooks/legacy/manila-tempest-dsvm-generic-scenario-custom-image/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-generic-scenario-custom-image/run.yaml @@ -54,6 +54,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Build custom image if needed diff --git a/playbooks/legacy/manila-tempest-dsvm-glusterfs-native-heketi/run.yaml b/playbooks/legacy/manila-tempest-dsvm-glusterfs-native-heketi/run.yaml index b905cf9bf4..1643467bf2 100644 --- a/playbooks/legacy/manila-tempest-dsvm-glusterfs-native-heketi/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-glusterfs-native-heketi/run.yaml @@ -57,6 +57,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Configure devstack to run manila installation without handling of share servers diff --git a/playbooks/legacy/manila-tempest-dsvm-glusterfs-native/run.yaml b/playbooks/legacy/manila-tempest-dsvm-glusterfs-native/run.yaml index 87d004ec3f..0e629800b3 100644 --- a/playbooks/legacy/manila-tempest-dsvm-glusterfs-native/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-glusterfs-native/run.yaml @@ -57,6 +57,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Configure devstack to run manila installation without handling of share servers diff --git a/playbooks/legacy/manila-tempest-dsvm-glusterfs-nfs-heketi/run.yaml b/playbooks/legacy/manila-tempest-dsvm-glusterfs-nfs-heketi/run.yaml index 09e269c2a2..7a813062e5 100644 --- a/playbooks/legacy/manila-tempest-dsvm-glusterfs-nfs-heketi/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-glusterfs-nfs-heketi/run.yaml @@ -57,6 +57,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Configure devstack to run manila installation without handling of share servers diff --git a/playbooks/legacy/manila-tempest-dsvm-glusterfs-nfs/run.yaml b/playbooks/legacy/manila-tempest-dsvm-glusterfs-nfs/run.yaml index fc39c868bb..6338027b08 100644 --- a/playbooks/legacy/manila-tempest-dsvm-glusterfs-nfs/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-glusterfs-nfs/run.yaml @@ -53,6 +53,7 @@ export ENABLED_SERVICES=tempest export DEVSTACK_PROJECT_FROM_GIT="python-manilaclient" export PROJECTS="openstack/devstack-plugin-glusterfs $PROJECTS" + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 diff --git a/playbooks/legacy/manila-tempest-dsvm-hdfs/run.yaml b/playbooks/legacy/manila-tempest-dsvm-hdfs/run.yaml index 20524053b5..28634499b3 100644 --- a/playbooks/legacy/manila-tempest-dsvm-hdfs/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-hdfs/run.yaml @@ -50,6 +50,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Configure devstack to run manila installation without handling of share servers diff --git a/playbooks/legacy/manila-tempest-dsvm-mysql-generic/run.yaml b/playbooks/legacy/manila-tempest-dsvm-mysql-generic/run.yaml index 83de354dd7..87b2f843be 100644 --- a/playbooks/legacy/manila-tempest-dsvm-mysql-generic/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-mysql-generic/run.yaml @@ -53,6 +53,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Build custom image if needed diff --git a/playbooks/legacy/manila-tempest-dsvm-postgres-container/run.yaml b/playbooks/legacy/manila-tempest-dsvm-postgres-container/run.yaml index eba61e68d6..c503bff907 100644 --- a/playbooks/legacy/manila-tempest-dsvm-postgres-container/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-postgres-container/run.yaml @@ -54,6 +54,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Build custom image if needed diff --git a/playbooks/legacy/manila-tempest-dsvm-postgres-generic-singlebackend/run.yaml b/playbooks/legacy/manila-tempest-dsvm-postgres-generic-singlebackend/run.yaml index 9310e03fd3..5922c1e4c3 100644 --- a/playbooks/legacy/manila-tempest-dsvm-postgres-generic-singlebackend/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-postgres-generic-singlebackend/run.yaml @@ -54,6 +54,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Build custom image if needed diff --git a/playbooks/legacy/manila-tempest-dsvm-postgres-zfsonlinux/run.yaml b/playbooks/legacy/manila-tempest-dsvm-postgres-zfsonlinux/run.yaml index c9f6c12e26..ffd078d505 100644 --- a/playbooks/legacy/manila-tempest-dsvm-postgres-zfsonlinux/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-postgres-zfsonlinux/run.yaml @@ -54,6 +54,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Build custom image if needed diff --git a/playbooks/legacy/manila-tempest-dsvm-scenario/run.yaml b/playbooks/legacy/manila-tempest-dsvm-scenario/run.yaml index a54560e260..eb687c6dc8 100644 --- a/playbooks/legacy/manila-tempest-dsvm-scenario/run.yaml +++ b/playbooks/legacy/manila-tempest-dsvm-scenario/run.yaml @@ -53,6 +53,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Build custom image if needed diff --git a/playbooks/legacy/manila-tempest-minimal-dsvm-cephfs-native-centos-7/run.yaml b/playbooks/legacy/manila-tempest-minimal-dsvm-cephfs-native-centos-7/run.yaml index 8ab041a9a7..d0b89be5af 100644 --- a/playbooks/legacy/manila-tempest-minimal-dsvm-cephfs-native-centos-7/run.yaml +++ b/playbooks/legacy/manila-tempest-minimal-dsvm-cephfs-native-centos-7/run.yaml @@ -75,6 +75,7 @@ export PROJECTS="openstack/devstack-plugin-ceph $PROJECTS" export DEVSTACK_PROJECT_FROM_GIT="python-manilaclient" export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Configure Manila with a CephFS Native or NFS driver backend. diff --git a/playbooks/legacy/manila-tempest-minimal-dsvm-cephfs-nfs-centos-7/run.yaml b/playbooks/legacy/manila-tempest-minimal-dsvm-cephfs-nfs-centos-7/run.yaml index ed235a30fa..6061f92a5d 100644 --- a/playbooks/legacy/manila-tempest-minimal-dsvm-cephfs-nfs-centos-7/run.yaml +++ b/playbooks/legacy/manila-tempest-minimal-dsvm-cephfs-nfs-centos-7/run.yaml @@ -75,6 +75,7 @@ export PROJECTS="openstack/devstack-plugin-ceph $PROJECTS" export DEVSTACK_PROJECT_FROM_GIT="python-manilaclient" export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Configure Manila with a CephFS Native or NFS driver backend. diff --git a/playbooks/legacy/manila-tempest-minimal-dsvm-dummy/run.yaml b/playbooks/legacy/manila-tempest-minimal-dsvm-dummy/run.yaml index 1e7ea355ec..49674da561 100644 --- a/playbooks/legacy/manila-tempest-minimal-dsvm-dummy/run.yaml +++ b/playbooks/legacy/manila-tempest-minimal-dsvm-dummy/run.yaml @@ -62,6 +62,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # 'dhss' - acronym for 'Driver Handles Share Servers', diff --git a/playbooks/legacy/manila-tempest-minimal-dsvm-lvm-centos-7/run.yaml b/playbooks/legacy/manila-tempest-minimal-dsvm-lvm-centos-7/run.yaml index 5117850753..926c79d9bf 100644 --- a/playbooks/legacy/manila-tempest-minimal-dsvm-lvm-centos-7/run.yaml +++ b/playbooks/legacy/manila-tempest-minimal-dsvm-lvm-centos-7/run.yaml @@ -71,6 +71,7 @@ # Keep localrc to be able to set some vars in pre_test_hook export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # 'dhss' - acronym for 'Driver Handles Share Servers', diff --git a/playbooks/legacy/manila-tempest-minimal-py35-dsvm-cephfs-native-centos-7/run.yaml b/playbooks/legacy/manila-tempest-minimal-py35-dsvm-cephfs-native-centos-7/run.yaml index 4d04a0460b..04a2c03740 100644 --- a/playbooks/legacy/manila-tempest-minimal-py35-dsvm-cephfs-native-centos-7/run.yaml +++ b/playbooks/legacy/manila-tempest-minimal-py35-dsvm-cephfs-native-centos-7/run.yaml @@ -82,6 +82,7 @@ export ENABLED_SERVICES=tempest export PROJECTS="openstack/python-manilaclient openstack/devstack-plugin-ceph $PROJECTS" export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Configure Manila with a CephFS Native or NFS driver backend. diff --git a/playbooks/legacy/manila-tempest-minimal-py35-dsvm-cephfs-nfs-centos-7/run.yaml b/playbooks/legacy/manila-tempest-minimal-py35-dsvm-cephfs-nfs-centos-7/run.yaml index 8afba73f6a..b6a00799ad 100644 --- a/playbooks/legacy/manila-tempest-minimal-py35-dsvm-cephfs-nfs-centos-7/run.yaml +++ b/playbooks/legacy/manila-tempest-minimal-py35-dsvm-cephfs-nfs-centos-7/run.yaml @@ -82,6 +82,7 @@ export ENABLED_SERVICES=tempest export PROJECTS="openstack/python-manilaclient openstack/devstack-plugin-ceph $PROJECTS" export KEEP_LOCALRC=1 + export PROJECTS="openstack/manila-tempest-plugin $PROJECTS" function pre_test_hook { # Configure Manila with a CephFS Native or NFS driver backend. diff --git a/releasenotes/notes/remove-intree-tempest-plugin-9fcf6edbeba47cba.yaml b/releasenotes/notes/remove-intree-tempest-plugin-9fcf6edbeba47cba.yaml new file mode 100644 index 0000000000..42482967c1 --- /dev/null +++ b/releasenotes/notes/remove-intree-tempest-plugin-9fcf6edbeba47cba.yaml @@ -0,0 +1,8 @@ +--- +other: + - | + Remove in-tree manila tempest plugin because it + now lives in the new repo openstack/manila-tempest-plugin + From now on changes to manila tempest tests should be + made in this new repo. + diff --git a/setup.cfg b/setup.cfg index 96ac4a120f..39d54e7c99 100644 --- a/setup.cfg +++ b/setup.cfg @@ -30,7 +30,6 @@ data_files = etc/manila/rootwrap.d = etc/manila/rootwrap.d/* packages = manila - manila_tempest_tests [entry_points] console_scripts = @@ -87,8 +86,6 @@ manila.share.drivers.dell_emc.plugins = manila.tests.scheduler.fakes = FakeWeigher1 = manila.tests.scheduler.fakes:FakeWeigher1 FakeWeigher2 = manila.tests.scheduler.fakes:FakeWeigher2 -tempest.test_plugins = - manila_tests = manila_tempest_tests.plugin:ManilaTempestPlugin [build_sphinx] all_files = 1 @@ -98,7 +95,7 @@ builders = html warning-is-error = 1 [egg_info] -tag_build = +tag_build = tag_date = 0 tag_svn_revision = 0 diff --git a/tox.ini b/tox.ini index 4c43824e18..bdf67a93d4 100644 --- a/tox.ini +++ b/tox.ini @@ -50,7 +50,6 @@ commands = tools/check_logging.sh \ run_tests.sh {toxinidir}/tools/check_exec.py {toxinidir}/manila - {toxinidir}/tools/check_exec.py {toxinidir}/manila_tempest_tests {toxinidir}/tools/check_logging.sh {toxinidir}/manila [testenv:genconfig]