add using cache to metadata
if configured adds the resulting metadata tree to the cache and upon next request retrieves the data from the cache reduces the time of the second and subsequent sequential requests to the metadata approximately from 2.5 seconds to 0.1 Change-Id: Ia1408f6ef407eb97db1789b5b60d6b36b162ba4d
This commit is contained in:
parent
1755447b73
commit
7939ce17e4
|
@ -44,6 +44,8 @@ EC2API_ADMIN_USER=${EC2API_ADMIN_USER:-ec2api}
|
|||
|
||||
EC2API_KEYSTONE_SIGNING_DIR=${EC2API_KEYSTONE_SIGNING_DIR:-/tmp/keystone-signing-ec2api}
|
||||
|
||||
CACHE_BACKEND="oslo_cache.dict"
|
||||
|
||||
# Support entry points installation of console scripts
|
||||
if [[ -d $EC2API_DIR/bin ]]; then
|
||||
EC2API_BIN_DIR=$EC2API_DIR/bin
|
||||
|
@ -215,6 +217,9 @@ function configure_ec2api {
|
|||
iniset $NOVA_CONF DEFAULT metadata_port 8789
|
||||
iniset $NOVA_CONF neutron service_metadata_proxy True
|
||||
fi
|
||||
iniset $EC2API_CONF_FILE cache enabled True
|
||||
iniset $EC2API_CONF_FILE cache backend "$CACHE_BACKEND"
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ import hmac
|
|||
import posixpath
|
||||
|
||||
import httplib2
|
||||
from oslo_cache import core as cache_core
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
import six
|
||||
|
@ -62,14 +63,30 @@ metadata_opts = [
|
|||
default='',
|
||||
help=_('Shared secret to sign instance-id request'),
|
||||
secret=True),
|
||||
cfg.IntOpt("cache_expiration",
|
||||
default=15,
|
||||
min=0,
|
||||
help=_('This option is the time (in seconds) to cache metadata. '
|
||||
'Increasing this setting should improve response times of the '
|
||||
'metadata API when under heavy load. Higher values may '
|
||||
'increase memory usage, and result in longer times for host '
|
||||
'metadata changes to take effect.'))
|
||||
]
|
||||
|
||||
CONF.register_opts(metadata_opts, group='metadata')
|
||||
cache_core.configure(CONF)
|
||||
|
||||
|
||||
class MetadataRequestHandler(wsgi.Application):
|
||||
"""Serve metadata."""
|
||||
|
||||
def __init__(self):
|
||||
if not CONF.cache.enabled:
|
||||
LOG.warning("Metadata doesn't use cache. "
|
||||
"Configure cache options to use cache.")
|
||||
self.cache_region = cache_core.create_region()
|
||||
cache_core.configure_cache_region(CONF, self.cache_region)
|
||||
|
||||
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
||||
def __call__(self, req):
|
||||
LOG.debug('Request: %s', req)
|
||||
|
@ -256,7 +273,8 @@ class MetadataRequestHandler(wsgi.Application):
|
|||
context.project_id = requester['project_id']
|
||||
return api.get_metadata_item(context, path_tokens,
|
||||
requester['os_instance_id'],
|
||||
requester['private_ip'])
|
||||
requester['private_ip'],
|
||||
self.cache_region)
|
||||
|
||||
def _add_response_data(self, response, data):
|
||||
if isinstance(data, six.text_type):
|
||||
|
|
|
@ -16,6 +16,8 @@ import base64
|
|||
import itertools
|
||||
|
||||
from novaclient import exceptions as nova_exception
|
||||
from oslo_cache import core as cache_core
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
import six
|
||||
|
||||
|
@ -25,6 +27,7 @@ from ec2api.api import instance as instance_api
|
|||
from ec2api import exception
|
||||
from ec2api.i18n import _
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
VERSIONS = [
|
||||
|
@ -89,27 +92,35 @@ def get_os_instance_and_project_id_by_provider_id(context, provider_id,
|
|||
return os_instance_id, project_id
|
||||
|
||||
|
||||
def get_metadata_item(context, path_tokens, os_instance_id, remote_ip):
|
||||
def get_metadata_item(context, path_tokens, os_instance_id, remote_ip,
|
||||
cache_region):
|
||||
version = path_tokens[0]
|
||||
if version == "latest":
|
||||
version = VERSIONS[-1]
|
||||
elif version not in VERSIONS:
|
||||
raise exception.EC2MetadataNotFound()
|
||||
|
||||
ec2_instance, ec2_reservation = (
|
||||
_get_ec2_instance_and_reservation(context, os_instance_id))
|
||||
# NOTE(ft): check for case of Neutron metadata proxy.
|
||||
# It sends project_id as X-Tenant-ID HTTP header. We make sure it's correct
|
||||
if context.project_id != ec2_reservation['ownerId']:
|
||||
LOG.warning(_('Tenant_id %(tenant_id)s does not match tenant_id '
|
||||
'of instance %(instance_id)s.'),
|
||||
{'tenant_id': context.project_id,
|
||||
'instance_id': os_instance_id})
|
||||
raise exception.EC2MetadataNotFound()
|
||||
cache_key = 'metadata-%s' % os_instance_id
|
||||
cache = cache_region.get(
|
||||
cache_key, expiration_time=CONF.metadata.cache_expiration)
|
||||
if cache and cache != cache_core.NO_VALUE:
|
||||
_check_instance_owner(context, os_instance_id, cache['owner_id'])
|
||||
LOG.debug("Using cached metadata for instance %s", os_instance_id)
|
||||
else:
|
||||
ec2_instance, ec2_reservation = (
|
||||
_get_ec2_instance_and_reservation(context, os_instance_id))
|
||||
|
||||
metadata = _build_metadata(context, ec2_instance, ec2_reservation,
|
||||
os_instance_id, remote_ip)
|
||||
# TODO(ft): cache built metadata
|
||||
_check_instance_owner(context, os_instance_id,
|
||||
ec2_reservation['ownerId'])
|
||||
|
||||
metadata = _build_metadata(context, ec2_instance, ec2_reservation,
|
||||
os_instance_id, remote_ip)
|
||||
cache = {'metadata': metadata,
|
||||
'owner_id': ec2_reservation['ownerId']}
|
||||
|
||||
cache_region.set(cache_key, cache)
|
||||
|
||||
metadata = cache['metadata']
|
||||
metadata = _cut_down_to_version(metadata, version)
|
||||
metadata_item = _find_path_in_tree(metadata, path_tokens[1:])
|
||||
return _format_metadata_item(metadata_item)
|
||||
|
@ -136,6 +147,18 @@ def _get_ec2_instance_and_reservation(context, os_instance_id):
|
|||
return ec2_instance, ec2_reservation
|
||||
|
||||
|
||||
def _check_instance_owner(context, os_instance_id, owner_id):
|
||||
# NOTE(ft): check for case of Neutron metadata proxy.
|
||||
# It sends project_id as X-Tenant-ID HTTP header.
|
||||
# We make sure it's correct
|
||||
if context.project_id != owner_id:
|
||||
LOG.warning(_('Tenant_id %(tenant_id)s does not match tenant_id '
|
||||
'of instance %(instance_id)s.'),
|
||||
{'tenant_id': context.project_id,
|
||||
'instance_id': os_instance_id})
|
||||
raise exception.EC2MetadataNotFound()
|
||||
|
||||
|
||||
def _build_metadata(context, ec2_instance, ec2_reservation,
|
||||
os_instance_id, remote_ip):
|
||||
metadata = {
|
||||
|
|
|
@ -145,6 +145,7 @@ class ProxyTestCase(test_base.BaseTestCase):
|
|||
'project_id': mock.sentinel.project_id,
|
||||
'private_ip': mock.sentinel.private_ip}
|
||||
get_metadata_item.return_value = 'fake_item'
|
||||
self.handler.cache_region = 'fake_region'
|
||||
|
||||
retval = self.handler._get_metadata(['fake_ver', 'fake_attr'],
|
||||
requester)
|
||||
|
@ -152,7 +153,8 @@ class ProxyTestCase(test_base.BaseTestCase):
|
|||
get_context.assert_called_with()
|
||||
get_metadata_item.assert_called_with(
|
||||
get_context.return_value, ['fake_ver', 'fake_attr'],
|
||||
mock.sentinel.os_instance_id, mock.sentinel.private_ip)
|
||||
mock.sentinel.os_instance_id, mock.sentinel.private_ip,
|
||||
'fake_region')
|
||||
self.assertEqual(mock.sentinel.project_id,
|
||||
get_context.return_value.project_id)
|
||||
|
||||
|
|
|
@ -17,6 +17,8 @@ import copy
|
|||
|
||||
import mock
|
||||
from novaclient import exceptions as nova_exception
|
||||
from oslo_cache import core as cache_core
|
||||
from oslo_config import cfg
|
||||
import six
|
||||
|
||||
from ec2api import exception
|
||||
|
@ -26,7 +28,7 @@ from ec2api.tests.unit import fakes
|
|||
from ec2api.tests.unit import matchers
|
||||
from ec2api.tests.unit import tools
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
FAKE_USER_DATA = u'fake_user_data-' + six.unichr(1071)
|
||||
|
||||
|
||||
|
@ -46,9 +48,15 @@ class MetadataApiTestCase(base.ApiTestCase):
|
|||
self.instance_api.describe_instance_attribute.return_value = {
|
||||
'instanceId': fakes.ID_EC2_INSTANCE_1,
|
||||
'userData': {'value': userDataValue}}
|
||||
self.configure(enabled=False, group='cache')
|
||||
self._init_cache_region()
|
||||
|
||||
self.fake_context = base.create_context()
|
||||
|
||||
def _init_cache_region(self):
|
||||
self.cache_region = cache_core.create_region()
|
||||
cache_core.configure_cache_region(CONF, self.cache_region)
|
||||
|
||||
def test_get_version_list(self):
|
||||
retval = api.get_version_list()
|
||||
self.assertEqual('\n'.join(api.VERSIONS + ['latest']), retval)
|
||||
|
@ -86,13 +94,15 @@ class MetadataApiTestCase(base.ApiTestCase):
|
|||
def test_get_version_root(self):
|
||||
retval = api.get_metadata_item(self.fake_context, ['2009-04-04'],
|
||||
fakes.ID_OS_INSTANCE_1,
|
||||
fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
self.assertEqual('meta-data/\nuser-data', retval)
|
||||
|
||||
self.assertRaises(
|
||||
exception.EC2MetadataNotFound,
|
||||
api.get_metadata_item, self.fake_context, ['9999-99-99'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
|
||||
self.db_api.get_items_ids.assert_called_with(
|
||||
self.fake_context, 'i', item_ids=None,
|
||||
|
@ -106,14 +116,16 @@ class MetadataApiTestCase(base.ApiTestCase):
|
|||
self.assertRaises(exception.EC2MetadataNotFound,
|
||||
api.get_metadata_item, self.fake_context,
|
||||
['9999-99-99', 'user-data-invalid'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
|
||||
def test_mismatch_project_id(self):
|
||||
self.fake_context.project_id = fakes.random_os_id()
|
||||
self.assertRaises(
|
||||
exception.EC2MetadataNotFound,
|
||||
api.get_metadata_item, self.fake_context, ['2009-04-04'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
|
||||
def test_non_existing_instance(self):
|
||||
self.instance_api.describe_instances.return_value = {
|
||||
|
@ -121,12 +133,14 @@ class MetadataApiTestCase(base.ApiTestCase):
|
|||
self.assertRaises(
|
||||
exception.EC2MetadataNotFound,
|
||||
api.get_metadata_item, self.fake_context, ['2009-04-04'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
|
||||
def test_user_data(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context, ['2009-04-04', 'user-data'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
self.assertEqual(FAKE_USER_DATA, retval)
|
||||
|
||||
def test_no_user_data(self):
|
||||
|
@ -136,7 +150,8 @@ class MetadataApiTestCase(base.ApiTestCase):
|
|||
exception.EC2MetadataNotFound,
|
||||
api.get_metadata_item, self.fake_context,
|
||||
['2009-04-04', 'user-data'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
|
||||
def test_security_groups(self):
|
||||
self.instance_api.describe_instances.return_value = {
|
||||
|
@ -144,7 +159,8 @@ class MetadataApiTestCase(base.ApiTestCase):
|
|||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'security-groups'],
|
||||
fakes.ID_OS_INSTANCE_2, fakes.IP_NETWORK_INTERFACE_1)
|
||||
fakes.ID_OS_INSTANCE_2, fakes.IP_NETWORK_INTERFACE_1,
|
||||
self.cache_region)
|
||||
self.assertEqual('\n'.join(['groupname3']),
|
||||
retval)
|
||||
|
||||
|
@ -152,14 +168,16 @@ class MetadataApiTestCase(base.ApiTestCase):
|
|||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'local-hostname'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
self.assertEqual(fakes.EC2_INSTANCE_1['privateDnsName'], retval)
|
||||
|
||||
def test_local_ipv4(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'local-ipv4'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
self.assertEqual(fakes.IP_NETWORK_INTERFACE_2, retval)
|
||||
|
||||
def test_local_ipv4_from_address(self):
|
||||
|
@ -168,14 +186,16 @@ class MetadataApiTestCase(base.ApiTestCase):
|
|||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'local-ipv4'],
|
||||
fakes.ID_OS_INSTANCE_2, fakes.IP_NETWORK_INTERFACE_1)
|
||||
fakes.ID_OS_INSTANCE_2, fakes.IP_NETWORK_INTERFACE_1,
|
||||
self.cache_region)
|
||||
self.assertEqual(fakes.IP_NETWORK_INTERFACE_1, retval)
|
||||
|
||||
def test_pubkey_name(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'public-keys'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
self.assertEqual('0=%s' % fakes.NAME_KEY_PAIR, retval)
|
||||
|
||||
def test_pubkey(self):
|
||||
|
@ -187,7 +207,8 @@ class MetadataApiTestCase(base.ApiTestCase):
|
|||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'public-keys', '0', 'openssh-key'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
self.assertEqual(fakes.PUBLIC_KEY_KEY_PAIR, retval)
|
||||
self.nova.servers.get.assert_called_once_with(fakes.ID_OS_INSTANCE_1)
|
||||
self.nova.keypairs._get.assert_called_once_with(
|
||||
|
@ -201,34 +222,39 @@ class MetadataApiTestCase(base.ApiTestCase):
|
|||
api.get_metadata_item,
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'public-keys', '0', 'openssh-key'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
|
||||
def test_image_type_ramdisk(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'ramdisk-id'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
self.assertEqual(fakes.ID_EC2_IMAGE_ARI_1, retval)
|
||||
|
||||
def test_image_type_kernel(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'kernel-id'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
self.assertEqual(fakes.ID_EC2_IMAGE_AKI_1, retval)
|
||||
|
||||
def test_check_version(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'block-device-mapping'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
self.assertIsNotNone(retval)
|
||||
|
||||
self.assertRaises(
|
||||
exception.EC2MetadataNotFound,
|
||||
api.get_metadata_item, self.fake_context,
|
||||
['2007-08-29', 'meta-data', 'block-device-mapping'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
|
||||
def test_format_instance_mapping(self):
|
||||
retval = api._build_block_device_mappings(
|
||||
|
@ -246,16 +272,38 @@ class MetadataApiTestCase(base.ApiTestCase):
|
|||
self.assertThat(retval,
|
||||
matchers.DictMatches(expected))
|
||||
|
||||
def test_metadata_cache(self):
|
||||
self.configure(enabled=True, group='cache')
|
||||
self.configure(backend='oslo_cache.dict', group='cache')
|
||||
self._init_cache_region()
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'local-ipv4'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
self.assertEqual(fakes.IP_NETWORK_INTERFACE_2, retval)
|
||||
self.nova.servers.get.assert_called_once_with(fakes.ID_OS_INSTANCE_1)
|
||||
self.nova.servers.get.reset_mock()
|
||||
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'instance-id'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
self.cache_region)
|
||||
self.assertEqual(fakes.ID_EC2_INSTANCE_1, retval)
|
||||
self.nova.servers.get.assert_not_called()
|
||||
|
||||
|
||||
class MetadataApiIntegralTestCase(base.ApiTestCase):
|
||||
# TODO(ft): 'execute' feature isn't used here, but some mocks and
|
||||
# fake context are. ApiTestCase should be split to some classes to use
|
||||
# its feature optimally
|
||||
|
||||
@mock.patch('ec2api.metadata.api.cache_core.create_region')
|
||||
@mock.patch('ec2api.api.instance.security_group_api')
|
||||
@mock.patch('ec2api.api.instance.network_interface_api')
|
||||
def test_get_metadata_integral(self, network_interface_api,
|
||||
security_group_api):
|
||||
security_group_api, create_region):
|
||||
fake_context = base.create_context(is_os_admin=True)
|
||||
|
||||
self.set_mock_db_items(
|
||||
|
@ -284,13 +332,16 @@ class MetadataApiIntegralTestCase(base.ApiTestCase):
|
|||
security_group_api.describe_security_groups.return_value = {
|
||||
'securityGroupInfo': [fakes.EC2_SECURITY_GROUP_1,
|
||||
fakes.EC2_SECURITY_GROUP_3]}
|
||||
create_region.get.return_value = cache_core.NO_VALUE
|
||||
|
||||
retval = api.get_metadata_item(
|
||||
fake_context, ['latest', 'meta-data', 'instance-id'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2,
|
||||
create_region)
|
||||
self.assertEqual(fakes.ID_EC2_INSTANCE_1, retval)
|
||||
|
||||
retval = api.get_metadata_item(
|
||||
fake_context, ['latest', 'meta-data', 'instance-id'],
|
||||
fakes.ID_OS_INSTANCE_2, '10.200.1.15')
|
||||
fakes.ID_OS_INSTANCE_2, '10.200.1.15',
|
||||
create_region)
|
||||
self.assertEqual(fakes.ID_EC2_INSTANCE_2, retval)
|
||||
|
|
|
@ -17,6 +17,8 @@ APIPASTE_FILE=$CONF_DIR/api-paste.ini
|
|||
|
||||
AUTH_CACHE_DIR=${AUTH_CACHE_DIR:-/var/cache/ec2api}
|
||||
|
||||
CACHE_BACKEND='oslo_cache.dict'
|
||||
|
||||
#Check for environment
|
||||
if [[ -z "$OS_AUTH_URL" || -z "$OS_USERNAME" || -z "$OS_PASSWORD" ]]; then
|
||||
echo "Please set OS_AUTH_URL, OS_USERNAME, OS_PASSWORD"
|
||||
|
@ -288,6 +290,9 @@ iniset $CONF_FILE $GROUP_AUTHTOKEN project_domain_name $SERVICE_DOMAIN_NAME
|
|||
iniset $CONF_FILE $GROUP_AUTHTOKEN user_domain_name $SERVICE_DOMAIN_NAME
|
||||
iniset $CONF_FILE $GROUP_AUTHTOKEN auth_type password
|
||||
|
||||
GROUP_CACHE="cache"
|
||||
iniset $CONF_FILE $GROUP_CACHE enabled True
|
||||
iniset $CONF_FILE $GROUP_CACHE backend "$CACHE_BACKEND"
|
||||
|
||||
if [[ -f "$NOVA_CONF" ]]; then
|
||||
# NOTE(ft): use swift instead internal s3 server if enabled
|
||||
|
|
|
@ -8,6 +8,7 @@ eventlet!=0.18.3,<0.21.0,>=0.18.2 # MIT
|
|||
greenlet>=0.3.2 # MIT
|
||||
httplib2>=0.7.5 # MIT
|
||||
lxml!=3.7.0,>=2.3 # BSD
|
||||
oslo.cache>=1.5.0 # Apache-2.0
|
||||
oslo.config>=4.0.0 # Apache-2.0
|
||||
oslo.concurrency>=3.8.0 # Apache-2.0
|
||||
oslo.context>=2.14.0 # Apache-2.0
|
||||
|
|
Loading…
Reference in New Issue