From 9f637252a03168fae425567b2a50d304c68233bb Mon Sep 17 00:00:00 2001 From: rabi Date: Thu, 21 Dec 2017 10:28:03 +0530 Subject: [PATCH] Add functional test for octavia lbaas Change-Id: I22558f2df4cc0eb6ab4dfbbec7041d24c7a298ac --- .../tests/functional/templates/lb_member.yaml | 61 +++++++++++ .../functional/templates/octavia_lbaas.yaml | 86 +++++++++++++++ .../tests/functional/test_octavia_lbaas.py | 101 ++++++++++++++++++ 3 files changed, 248 insertions(+) create mode 100644 heat_tempest_plugin/tests/functional/templates/lb_member.yaml create mode 100644 heat_tempest_plugin/tests/functional/templates/octavia_lbaas.yaml create mode 100644 heat_tempest_plugin/tests/functional/test_octavia_lbaas.py diff --git a/heat_tempest_plugin/tests/functional/templates/lb_member.yaml b/heat_tempest_plugin/tests/functional/templates/lb_member.yaml new file mode 100644 index 0000000..0afa754 --- /dev/null +++ b/heat_tempest_plugin/tests/functional/templates/lb_member.yaml @@ -0,0 +1,61 @@ +heat_template_version: pike +parameters: + image: + type: string + flavor: + type: string + network: + type: string + sec_group: + type: string + pool: + type: string + app_port: + type: number + timeout: + type: number + default: 120 + subnet: + type: string + +resources: + server: + type: OS::Nova::Server + properties: + image: {get_param: image} + flavor: {get_param: flavor} + networks: + - network: {get_param: network} + security_groups: + - {get_param: sec_group} + user_data_format: RAW + user_data: + str_replace: + template: | + #! /bin/sh -v + Body=$(hostname) + Response="HTTP/1.1 200 OK\r\nContent-Length: ${#Body}\r\n\r\n$Body" + wc_notify --data-binary '{"status": "SUCCESS"}' + while true ; do echo -e $Response | nc -llp PORT; done + params: + PORT: {get_param: app_port} + wc_notify: { get_attr: [handle, curl_cli]} + + handle: + type: OS::Heat::WaitConditionHandle + + waiter: + type: OS::Heat::WaitCondition + depends_on: server + properties: + timeout: {get_param: timeout} + handle: {get_resource: handle} + + pool_member: + type: OS::Octavia::PoolMember + depends_on: waiter + properties: + address: {get_attr: [server, networks, {get_param: network}, 0]} + pool: {get_param: pool} + protocol_port: {get_param: app_port} + subnet: {get_param: subnet} diff --git a/heat_tempest_plugin/tests/functional/templates/octavia_lbaas.yaml b/heat_tempest_plugin/tests/functional/templates/octavia_lbaas.yaml new file mode 100644 index 0000000..d20ae60 --- /dev/null +++ b/heat_tempest_plugin/tests/functional/templates/octavia_lbaas.yaml @@ -0,0 +1,86 @@ +heat_template_version: pike +parameters: + app_port: + type: number + default: 8080 + flavor: + type: string + default: m1.nano + image: + type: string + default: cirros-0.3.5-x86_64-disk + lb_port: + type: number + default: 80 + network: + type: string + default: heat-net + subnet: + type: string + default: heat-subnet + member_count: + type: number + default: 1 + lb_algorithm: + type: string + default: ROUND_ROBIN + +resources: + sec_group: + type: OS::Neutron::SecurityGroup + properties: + rules: + - remote_ip_prefix: 0.0.0.0/0 + protocol: tcp + port_range_min: {get_param: app_port} + port_range_max: {get_param: app_port} + + pool_members: + type: OS::Heat::ResourceGroup + properties: + count: {get_param: member_count} + resource_def: + type: OS::Test::PoolMember + properties: + image: {get_param: image} + flavor: {get_param: flavor} + pool: {get_resource: pool} + app_port: {get_param: app_port} + network: {get_param: network} + sec_group: {get_resource: sec_group} + subnet: {get_param: subnet} + + monitor: + type: OS::Octavia::HealthMonitor + properties: + delay: 3 + type: HTTP + timeout: 3 + max_retries: 3 + pool: {get_resource: pool} + + pool: + type: OS::Octavia::Pool + properties: + lb_algorithm: {get_param: lb_algorithm} + protocol: HTTP + listener: {get_resource: listener} + + listener: + type: OS::Octavia::Listener + properties: + loadbalancer: {get_resource: loadbalancer} + protocol: HTTP + protocol_port: {get_param: lb_port} + + loadbalancer: + type: OS::Octavia::LoadBalancer + properties: + vip_subnet: {get_param: subnet} +outputs: + loadbalancer: + value: {get_attr: [loadbalancer, show]} + pool: + value: {get_attr: [pool, show]} + listener: + value: {get_attr: [listener, show]} diff --git a/heat_tempest_plugin/tests/functional/test_octavia_lbaas.py b/heat_tempest_plugin/tests/functional/test_octavia_lbaas.py new file mode 100644 index 0000000..b1e500c --- /dev/null +++ b/heat_tempest_plugin/tests/functional/test_octavia_lbaas.py @@ -0,0 +1,101 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from tempest.lib import decorators + +from heat_tempest_plugin.common import test +from heat_tempest_plugin.tests.functional import functional_base + + +@test.requires_resource_type('OS::Octavia::LoadBalancer') +class LoadBalancerTest(functional_base.FunctionalTestsBase): + def setUp(self): + super(LoadBalancerTest, self).setUp() + self.template_name = 'octavia_lbaas.yaml' + self.member_template_name = 'lb_member.yaml' + self.sub_dir = 'templates' + + def _create_stack(self): + self.parameters = { + 'flavor': self.conf.minimal_instance_type, + 'image': self.conf.minimal_image_ref, + 'network': self.conf.fixed_network_name, + 'subnet': self.conf.fixed_subnet_name + } + member_template = self._load_template( + __file__, self.member_template_name, self.sub_dir + ) + self.files = {'lb_member.yaml': member_template} + self.env = {'resource_registry': { + 'OS::Test::PoolMember': 'lb_member.yaml'}} + + self.template = self._load_template(__file__, self.template_name, + self.sub_dir) + return self.stack_create(template=self.template, + parameters=self.parameters, + files=self.files, + environment=self.env) + + @decorators.idempotent_id('5d2c4452-4433-4438-899c-7711c01d3c50') + def test_create_update_loadbalancer(self): + stack_identifier = self._create_stack() + stack = self.client.stacks.get(stack_identifier) + output = self._stack_output(stack, 'loadbalancer') + self.assertEqual('ONLINE', output['operating_status']) + self.parameters['lb_algorithm'] = 'SOURCE_IP' + + self.update_stack(stack_identifier, + template=self.template, + parameters=self.parameters, + files=self.files, + environment=self.env) + stack = self.client.stacks.get(stack_identifier) + + output = self._stack_output(stack, 'loadbalancer') + self.assertEqual('ONLINE', output['operating_status']) + output = self._stack_output(stack, 'pool') + self.assertEqual('SOURCE_IP', output['lb_algorithm']) + + @decorators.idempotent_id('970e91af-1be8-4990-837b-66f9b5aff2b9') + def test_add_delete_poolmember(self): + stack_identifier = self._create_stack() + stack = self.client.stacks.get(stack_identifier) + output = self._stack_output(stack, 'loadbalancer') + self.assertEqual('ONLINE', output['operating_status']) + output = self._stack_output(stack, 'pool') + self.assertEqual(1, len(output['members'])) + # add pool member + self.parameters['member_count'] = 2 + self.update_stack(stack_identifier, + template=self.template, + parameters=self.parameters, + files=self.files, + environment=self.env) + stack = self.client.stacks.get(stack_identifier) + + output = self._stack_output(stack, 'loadbalancer') + self.assertEqual('ONLINE', output['operating_status']) + output = self._stack_output(stack, 'pool') + self.assertEqual(2, len(output['members'])) + # delete pool member + self.parameters['member_count'] = 1 + self.update_stack(stack_identifier, + template=self.template, + parameters=self.parameters, + files=self.files, + environment=self.env) + stack = self.client.stacks.get(stack_identifier) + + output = self._stack_output(stack, 'loadbalancer') + self.assertEqual('ONLINE', output['operating_status']) + output = self._stack_output(stack, 'pool') + self.assertEqual(1, len(output['members']))