Distributed cache namespace to invalidate regions

dogpile.cache's region invalidation is not designed to work across
processes. This patch enables distributed invalidation of keys in a
region.

Instead of using a static cache key, we use the original cache key
and append a dynamic value to it. This value is looked up in
memcached using the region name as a key. So anytime the value of
the region key changes the cache keys in that region are
effectively invalidated.

Conflicts:
	keystone/assignment/core.py: imports were fixed
	keystone/catalog/core.py: imports were fixed
	keystone/common/cache/core.py:
                dogpile.cache doesn't have invalidation strategies
                in 0.5.8. Because of that, call to
                region.invalidate was redefined.
	keystone/identity/core.py:
                there is no per-region cache in id_mapping_api
                in Mitaka
	keystone/revoke/core.py:
                there is no per-region cache in revocations
                in Mitaka
	keystone/server/backends.py:
                removed configuration of regions which were added
                only in Newton
	keystone/tests/unit/test_v3_assignment.py:
                conflict due to freezegun being used in Newton and
                not used in Mitaka
	keystone/token/provider.py:
                there is no per-region cache in token providers
                in Mitaka

Closes-Bug: #1590779
Change-Id: Ib80d41d43ef815b37282d72ad68e7aa8e1ff354e
(cherry picked from commit 42eda48c78)
This commit is contained in:
David Stanek 2016-08-01 21:06:50 +00:00 committed by Boris Bobrov
parent 3a1e41fecf
commit f1d9c54ef0
6 changed files with 347 additions and 75 deletions

View File

@ -18,7 +18,6 @@ import abc
import copy
import functools
from oslo_cache import core as oslo_cache
from oslo_config import cfg
from oslo_log import log
from oslo_log import versionutils
@ -44,7 +43,7 @@ MEMOIZE = cache.get_memoization_decorator(group='role')
# This builds a discrete cache region dedicated to role assignments computed
# for a given user + project/domain pair. Any write operation to add or remove
# any role assignment should invalidate this entire cache region.
COMPUTED_ASSIGNMENTS_REGION = oslo_cache.create_region()
COMPUTED_ASSIGNMENTS_REGION = cache.create_region(name='computed assignments')
MEMOIZE_COMPUTED_ASSIGNMENTS = cache.get_memoization_decorator(
group='role',
region=COMPUTED_ASSIGNMENTS_REGION)

View File

@ -18,7 +18,6 @@
import abc
import itertools
from oslo_cache import core as oslo_cache
from oslo_config import cfg
from oslo_log import log
import six
@ -49,7 +48,7 @@ MEMOIZE = cache.get_memoization_decorator(group='catalog')
# computed for a given user + project pair. Any write operation to create,
# modify or delete elements of the service catalog should invalidate this
# entire cache region.
COMPUTED_CATALOG_REGION = oslo_cache.create_region()
COMPUTED_CATALOG_REGION = cache.create_region(name='computed catalog region')
MEMOIZE_COMPUTED_CATALOG = cache.get_memoization_decorator(
group='catalog',
region=COMPUTED_CATALOG_REGION)

View File

@ -13,8 +13,11 @@
# under the License.
"""Keystone Caching Layer Implementation."""
import os
import dogpile.cache
from dogpile.cache import api
from dogpile.cache import util
from oslo_cache import core as cache
from oslo_config import cfg
@ -25,6 +28,90 @@ CONF = cfg.CONF
CACHE_REGION = cache.create_region()
class RegionInvalidationManager(object):
REGION_KEY_PREFIX = '<<<region>>>:'
def __init__(self, invalidation_region, region_name):
self._invalidation_region = invalidation_region
self._region_key = self.REGION_KEY_PREFIX + region_name
def _generate_new_id(self):
return os.urandom(10)
@property
def region_id(self):
return self._invalidation_region.get_or_create(
self._region_key, self._generate_new_id, expiration_time=-1)
def invalidate_region(self):
new_region_id = self._generate_new_id()
self._invalidation_region.set(self._region_key, new_region_id)
return new_region_id
def is_region_key(self, key):
return key == self._region_key
class DistributedInvalidationStrategy(object):
def __init__(self, region_manager):
self._region_manager = region_manager
def invalidate(self, hard=None):
self._region_manager.invalidate_region()
def is_invalidated(self, timestamp):
return False
def was_hard_invalidated(self):
return False
def is_hard_invalidated(self, timestamp):
return False
def was_soft_invalidated(self):
return False
def is_soft_invalidated(self, timestamp):
return False
def key_manger_factory(invalidation_manager, orig_key_mangler):
def key_mangler(key):
# NOTE(dstanek): Since *all* keys go through the key mangler we
# need to make sure the region keys don't get the region_id added.
# If it were there would be no way to get to it, making the cache
# effectively useless.
if not invalidation_manager.is_region_key(key):
key = '%s:%s' % (key, invalidation_manager.region_id)
if orig_key_mangler:
key = orig_key_mangler(key)
return key
return key_mangler
def create_region(name):
"""Create a dopile region.
Wraps oslo_cache.core.create_region. This is used to ensure that the
Region is properly patched and allows us to more easily specify a region
name.
:param str name: The region name
:returns: The new region.
:rtype: :class:`dogpile.cache.region.CacheRegion`
"""
region = cache.create_region()
region.name = name # oslo.cache doesn't allow this yet
return region
CACHE_REGION = create_region(name='shared default')
CACHE_INVALIDATION_REGION = create_region(name='invalidation region')
def configure_cache(region=None):
if region is None:
region = CACHE_REGION
@ -38,6 +125,55 @@ def configure_cache(region=None):
if not configured:
region.wrap(_context_cache._ResponseCacheProxy)
region_manager = RegionInvalidationManager(
CACHE_INVALIDATION_REGION, region.name)
region.key_mangler = key_manger_factory(
region_manager, region.key_mangler)
strategy = DistributedInvalidationStrategy(region_manager)
def invalidate_by_strategy(*args, **kwargs):
return strategy.invalidate()
region.invalidate = invalidate_by_strategy
def _sha1_mangle_key(key):
"""Wrapper for dogpile's sha1_mangle_key.
dogpile's sha1_mangle_key function expects an encoded string, so we
should take steps to properly handle multiple inputs before passing
the key through.
NOTE(dstanek): this was copied directly from olso_cache
"""
try:
key = key.encode('utf-8', errors='xmlcharrefreplace')
except (UnicodeError, AttributeError):
# NOTE(stevemar): if encoding fails just continue anyway.
pass
return util.sha1_mangle_key(key)
def configure_invalidation_region():
if CACHE_INVALIDATION_REGION.is_configured:
return
# NOTE(dstanek): Configuring this region manually so that we control the
# expiration and can ensure that the keys don't expire.
config_dict = cache._build_cache_config(CONF)
config_dict['expiration_time'] = None # we don't want an expiration
CACHE_INVALIDATION_REGION.configure_from_config(
config_dict, '%s.' % CONF.cache.config_prefix)
# NOTE(morganfainberg): if the backend requests the use of a
# key_mangler, we should respect that key_mangler function. If a
# key_mangler is not defined by the backend, use the sha1_mangle_key
# mangler provided by dogpile.cache. This ensures we always use a fixed
# size cache-key.
if CACHE_INVALIDATION_REGION.key_mangler is None:
CACHE_INVALIDATION_REGION.key_mangler = _sha1_mangle_key
def get_memoization_decorator(group, expiration_group=None, region=None):
if region is None:
@ -62,63 +198,3 @@ dogpile.cache.register_backend(
'keystone.cache.memcache_pool',
'keystone.common.cache.backends.memcache_pool',
'PooledMemcachedBackend')
# TODO(morganfainberg): Move this logic up into oslo.cache directly
# so we can handle region-wide invalidations or alternatively propose
# a fix to dogpile.cache to make region-wide invalidates possible to
# work across distributed processes.
class _RegionInvalidator(object):
def __init__(self, region, region_name):
self.region = region
self.region_name = region_name
region_key = '_RegionExpiration.%(type)s.%(region_name)s'
self.soft_region_key = region_key % {'type': 'soft',
'region_name': self.region_name}
self.hard_region_key = region_key % {'type': 'hard',
'region_name': self.region_name}
@property
def hard_invalidated(self):
invalidated = self.region.backend.get(self.hard_region_key)
if invalidated is not api.NO_VALUE:
return invalidated.payload
return None
@hard_invalidated.setter
def hard_invalidated(self, value):
self.region.set(self.hard_region_key, value)
@hard_invalidated.deleter
def hard_invalidated(self):
self.region.delete(self.hard_region_key)
@property
def soft_invalidated(self):
invalidated = self.region.backend.get(self.soft_region_key)
if invalidated is not api.NO_VALUE:
return invalidated.payload
return None
@soft_invalidated.setter
def soft_invalidated(self, value):
self.region.set(self.soft_region_key, value)
@soft_invalidated.deleter
def soft_invalidated(self):
self.region.delete(self.soft_region_key)
def apply_invalidation_patch(region, region_name):
"""Patch the region interfaces to ensure we share the expiration time.
This method is used to patch region.invalidate, region._hard_invalidated,
and region._soft_invalidated.
"""
# Patch the region object. This logic needs to be moved up into dogpile
# itself. Patching the internal interfaces, unfortunately, is the only
# way to handle this at the moment.
invalidator = _RegionInvalidator(region=region, region_name=region_name)
setattr(region, '_hard_invalidated', invalidator.hard_invalidated)
setattr(region, '_soft_invalidated', invalidator.soft_invalidated)

View File

@ -31,13 +31,8 @@ def load_backends():
# Configure and build the cache
cache.configure_cache()
cache.configure_cache(region=catalog.COMPUTED_CATALOG_REGION)
cache.apply_invalidation_patch(
region=catalog.COMPUTED_CATALOG_REGION,
region_name=catalog.COMPUTED_CATALOG_REGION.name)
cache.configure_cache(region=assignment.COMPUTED_ASSIGNMENTS_REGION)
cache.apply_invalidation_patch(
region=assignment.COMPUTED_ASSIGNMENTS_REGION,
region_name=assignment.COMPUTED_ASSIGNMENTS_REGION.name)
cache.configure_invalidation_region()
# Ensure that the identity driver is created before the assignment manager
# and that the assignment driver is created before the resource manager.

View File

@ -0,0 +1,200 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from dogpile.cache import api as dogpile
from dogpile.cache.backends import memory
from oslo_config import cfg
from oslo_config import fixture as config_fixture
from keystone.common import cache
from keystone.tests import unit
CONF = cfg.CONF
class TestCacheRegion(unit.BaseTestCase):
def setUp(self):
super(TestCacheRegion, self).setUp()
self.config_fixture = self.useFixture(config_fixture.Config(CONF))
self.config_fixture.config(
# TODO(morganfainberg): Make Cache Testing a separate test case
# in tempest, and move it out of the base unit tests.
group='cache',
backend='dogpile.cache.memory')
self.config_fixture.config(
group='cache',
enabled='true')
# replace existing backend since this may already be configured
cache.CACHE_INVALIDATION_REGION.configure(
backend='dogpile.cache.memory',
expiration_time=None,
replace_existing_backend=True)
self.region_name = uuid.uuid4().hex
self.region0 = cache.create_region('test_region')
self.region1 = cache.create_region('test_region')
cache.configure_cache(region=self.region0)
cache.configure_cache(region=self.region1)
# TODO(dstanek): this should be a mock entrypoint
self.cache_dict = {}
self.backend = memory.MemoryBackend({'cache_dict': self.cache_dict})
self.region0.backend = self.backend
self.region1.backend = self.backend
def _assert_has_no_value(self, values):
for value in values:
self.assertIsInstance(value, dogpile.NoValue)
def test_singular_methods_when_invalidating_the_region(self):
key = uuid.uuid4().hex
value = uuid.uuid4().hex
# key does not exist
self.assertIsInstance(self.region0.get(key), dogpile.NoValue)
# make it exist
self.region0.set(key, value)
# ensure it exists
self.assertEqual(value, self.region0.get(key))
# invalidating region1 should invalidate region0
self.region1.invalidate()
self.assertIsInstance(self.region0.get(key), dogpile.NoValue)
def test_region_singular_methods_delete(self):
key = uuid.uuid4().hex
value = uuid.uuid4().hex
# key does not exist
self.assertIsInstance(self.region0.get(key), dogpile.NoValue)
# make it exist
self.region0.set(key, value)
# ensure it exists
self.assertEqual(value, self.region0.get(key))
# delete it
self.region1.delete(key)
# ensure it's gone
self.assertIsInstance(self.region0.get(key), dogpile.NoValue)
def test_multi_methods_when_invalidating_the_region(self):
mapping = {uuid.uuid4().hex: uuid.uuid4().hex for _ in range(4)}
keys = list(mapping.keys())
values = [mapping[k] for k in keys]
# keys do not exist
self._assert_has_no_value(self.region0.get_multi(keys))
# make them exist
self.region0.set_multi(mapping)
# ensure they exist
self.assertEqual(values, self.region0.get_multi(keys))
# check using the singular get method for completeness
self.assertEqual(mapping[keys[0]], self.region0.get(keys[0]))
# invalidating region1 should invalidate region0
self.region1.invalidate()
# ensure they are gone
self._assert_has_no_value(self.region0.get_multi(keys))
def test_region_multi_methods_delete(self):
mapping = {uuid.uuid4().hex: uuid.uuid4().hex for _ in range(4)}
keys = list(mapping.keys())
values = [mapping[k] for k in keys]
# keys do not exist
self._assert_has_no_value(self.region0.get_multi(keys))
# make them exist
self.region0.set_multi(mapping)
# ensure they exist
keys = list(mapping.keys())
self.assertEqual(values, self.region0.get_multi(keys))
# check using the singular get method for completeness
self.assertEqual(mapping[keys[0]], self.region0.get(keys[0]))
# delete them
self.region1.delete_multi(mapping.keys())
# ensure they are gone
self._assert_has_no_value(self.region0.get_multi(keys))
def test_memoize_decorator_when_invalidating_the_region(self):
memoize = cache.get_memoization_decorator('cache', region=self.region0)
@memoize
def func(value):
return value + uuid.uuid4().hex
key = uuid.uuid4().hex
# test get/set
return_value = func(key)
# the values should be the same since it comes from the cache
self.assertEqual(return_value, func(key))
# invalidating region1 should invalidate region0
self.region1.invalidate()
new_value = func(key)
self.assertNotEqual(return_value, new_value)
def test_combination(self):
memoize = cache.get_memoization_decorator('cache', region=self.region0)
@memoize
def func(value):
return value + uuid.uuid4().hex
key = uuid.uuid4().hex
simple_value = uuid.uuid4().hex
# test get/set using the decorator
return_value = func(key)
self.assertEqual(return_value, func(key))
# test get/set using the singular methods
self.region0.set(key, simple_value)
self.assertEqual(simple_value, self.region0.get(key))
# invalidating region1 should invalidate region0
self.region1.invalidate()
# ensure that the decorated function returns a new value
new_value = func(key)
self.assertNotEqual(return_value, new_value)
# ensure that a get doesn't have a value
self.assertIsInstance(self.region0.get(key), dogpile.NoValue)
def test_direct_region_key_invalidation(self):
"""Invalidate by manually clearing the region key's value.
NOTE(dstanek): I normally don't like tests that repeat application
logic, but in this case we need to. There are too many ways that
the tests above can erroneosly pass that we need this sanity check.
"""
region_key = cache.RegionInvalidationManager(
None, self.region0.name)._region_key
key = uuid.uuid4().hex
value = uuid.uuid4().hex
# key does not exist
self.assertIsInstance(self.region0.get(key), dogpile.NoValue)
# make it exist
self.region0.set(key, value)
# ensure it exists
self.assertEqual(value, self.region0.get(key))
# test invalidation
cache.CACHE_INVALIDATION_REGION.delete(region_key)
self.assertIsInstance(self.region0.get(key), dogpile.NoValue)

View File

@ -489,6 +489,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
user1 = unit.new_user_ref(domain_id=self.domain['id'])
user1 = self.identity_api.create_user(user1)
role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
collection_url = '/role_assignments'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
@ -499,7 +502,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
# that we get them all back.
gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
group_id=self.group_id,
role_id=self.role_id)
role_id=role['id'])
self.put(gd_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
@ -510,7 +513,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
user_id=user1['id'],
role_id=self.role_id)
role_id=role['id'])
self.put(ud_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
@ -521,7 +524,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
gp_entity = self.build_role_assignment_entity(
project_id=self.project_id, group_id=self.group_id,
role_id=self.role_id)
role_id=role['id'])
self.put(gp_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
@ -532,7 +535,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
up_entity = self.build_role_assignment_entity(
project_id=self.project_id, user_id=user1['id'],
role_id=self.role_id)
role_id=role['id'])
self.put(up_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(