summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJamie Lennox <jamielennox@gmail.com>2016-01-17 00:34:02 +1100
committerJaewoo Park <jp655p@att.com>2017-12-01 16:36:40 -0800
commit9d8e2836fe7fca186e0380d8a532540ff5cc5215 (patch)
tree1bc083c068099306dbc6f4927a5e48b493943bd0
parent4c08d725c29b1bd378c8acb9fc997f3a2d557fd4 (diff)
Use oslo_cache in auth_token middleware
Use the new oslo.cache library instead of using memcached directly. This keeps the old options around and will continue to use those in preference to the oslo.config library as there is no way to test whether oslo.cache was explicitly configured to use that in preference. Currently there are no messages or anything to deprecate the old options until we've had a chance to test it in production environments. Closes-Bug: #1523375 Change-Id: Ifccacc5db311ad538ce60191cbe221644d1a5807 Co-Authored-By: Nicolas Helgeson <nh202b@att.com>
Notes
Notes (review): Code-Review+1: Tin Lam <tin@irrational.io> Code-Review+2: ayoung <ayoung@redhat.com> Workflow+1: ayoung <ayoung@redhat.com> Verified+2: Zuul Submitted-by: Zuul Submitted-at: Sat, 02 Dec 2017 16:24:36 +0000 Reviewed-on: https://review.openstack.org/268664 Project: openstack/keystonemiddleware Branch: refs/heads/master
-rw-r--r--keystonemiddleware/auth_token/__init__.py11
-rw-r--r--keystonemiddleware/auth_token/_cache.py2
-rw-r--r--keystonemiddleware/auth_token/_memcache_pool.py186
-rw-r--r--keystonemiddleware/tests/unit/auth_token/test_auth_token_middleware.py18
-rw-r--r--keystonemiddleware/tests/unit/auth_token/test_connection_pool.py4
-rw-r--r--requirements.txt1
6 files changed, 33 insertions, 189 deletions
diff --git a/keystonemiddleware/auth_token/__init__.py b/keystonemiddleware/auth_token/__init__.py
index 343bec2..9e84c95 100644
--- a/keystonemiddleware/auth_token/__init__.py
+++ b/keystonemiddleware/auth_token/__init__.py
@@ -225,6 +225,8 @@ from keystoneauth1 import loading
225from keystoneauth1.loading import session as session_loading 225from keystoneauth1.loading import session as session_loading
226from keystoneclient.common import cms 226from keystoneclient.common import cms
227from keystoneclient import exceptions as ksc_exceptions 227from keystoneclient import exceptions as ksc_exceptions
228import oslo_cache
229from oslo_config import cfg
228from oslo_log import log as logging 230from oslo_log import log as logging
229from oslo_serialization import jsonutils 231from oslo_serialization import jsonutils
230import webob.dec 232import webob.dec
@@ -245,6 +247,7 @@ from keystonemiddleware.i18n import _
245 247
246_LOG = logging.getLogger(__name__) 248_LOG = logging.getLogger(__name__)
247_CACHE_INVALID_INDICATOR = 'invalid' 249_CACHE_INVALID_INDICATOR = 'invalid'
250oslo_cache.configure(cfg.CONF)
248 251
249 252
250AUTH_TOKEN_OPTS = [ 253AUTH_TOKEN_OPTS = [
@@ -952,7 +955,15 @@ class AuthProtocol(BaseAuthProtocol):
952 include_service_catalog=self._include_service_catalog, 955 include_service_catalog=self._include_service_catalog,
953 requested_auth_version=auth_version) 956 requested_auth_version=auth_version)
954 957
958 def _create_oslo_cache(self):
959 # having this as a function makes test mocking easier
960 conf = cfg.CONF
961 region = oslo_cache.create_region()
962 oslo_cache.configure_cache_region(conf, region)
963 return region
964
955 def _token_cache_factory(self): 965 def _token_cache_factory(self):
966
956 security_strategy = self._conf.get('memcache_security_strategy') 967 security_strategy = self._conf.get('memcache_security_strategy')
957 968
958 cache_kwargs = dict( 969 cache_kwargs = dict(
diff --git a/keystonemiddleware/auth_token/_cache.py b/keystonemiddleware/auth_token/_cache.py
index 43ce923..2801077 100644
--- a/keystonemiddleware/auth_token/_cache.py
+++ b/keystonemiddleware/auth_token/_cache.py
@@ -13,13 +13,13 @@
13import contextlib 13import contextlib
14import hashlib 14import hashlib
15 15
16from oslo_cache import _memcache_pool as memcache_pool
16from oslo_serialization import jsonutils 17from oslo_serialization import jsonutils
17from oslo_utils import timeutils 18from oslo_utils import timeutils
18import six 19import six
19 20
20from keystonemiddleware.auth_token import _exceptions as exc 21from keystonemiddleware.auth_token import _exceptions as exc
21from keystonemiddleware.auth_token import _memcache_crypt as memcache_crypt 22from keystonemiddleware.auth_token import _memcache_crypt as memcache_crypt
22from keystonemiddleware.auth_token import _memcache_pool as memcache_pool
23from keystonemiddleware.i18n import _ 23from keystonemiddleware.i18n import _
24 24
25 25
diff --git a/keystonemiddleware/auth_token/_memcache_pool.py b/keystonemiddleware/auth_token/_memcache_pool.py
deleted file mode 100644
index 2b3853a..0000000
--- a/keystonemiddleware/auth_token/_memcache_pool.py
+++ /dev/null
@@ -1,186 +0,0 @@
1# Copyright 2014 Mirantis Inc
2# All Rights Reserved.
3#
4# Licensed under the Apache License, Version 2.0 (the "License"); you may
5# not use this file except in compliance with the License. You may obtain
6# a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13# License for the specific language governing permissions and limitations
14# under the License.
15
16"""Thread-safe connection pool for python-memcached."""
17
18# NOTE(yorik-sar): this file is copied between keystone and keystonemiddleware
19# and should be kept in sync until we can use external library for this.
20
21import collections
22import contextlib
23import itertools
24import time
25
26from oslo_log import log as logging
27from six.moves import queue
28
29_PoolItem = collections.namedtuple('_PoolItem', ['ttl', 'connection'])
30
31
32class ConnectionGetTimeoutException(Exception):
33 pass
34
35
36class ConnectionPool(queue.Queue):
37 """Base connection pool class.
38
39 This class implements the basic connection pool logic as an abstract base
40 class.
41 """
42
43 def __init__(self, maxsize, unused_timeout, conn_get_timeout=None):
44 """Initialize the connection pool.
45
46 :param maxsize: maximum number of client connections for the pool
47 :type maxsize: int
48 :param unused_timeout: idle time to live for unused clients (in
49 seconds). If a client connection object has been
50 in the pool and idle for longer than the
51 unused_timeout, it will be reaped. This is to
52 ensure resources are released as utilization
53 goes down.
54 :type unused_timeout: int
55 :param conn_get_timeout: maximum time in seconds to wait for a
56 connection. If set to `None` timeout is
57 indefinite.
58 :type conn_get_timeout: int
59 """
60 queue.Queue.__init__(self, maxsize)
61 self._unused_timeout = unused_timeout
62 self._connection_get_timeout = conn_get_timeout
63 self._acquired = 0
64 self._LOG = logging.getLogger(__name__)
65
66 def _create_connection(self):
67 raise NotImplementedError
68
69 def _destroy_connection(self, conn):
70 raise NotImplementedError
71
72 @contextlib.contextmanager
73 def acquire(self):
74 try:
75 conn = self.get(timeout=self._connection_get_timeout)
76 except queue.Empty:
77 self._LOG.critical('Unable to get a connection from pool id '
78 '%(id)s after %(seconds)s seconds.',
79 {'id': id(self),
80 'seconds': self._connection_get_timeout})
81 raise ConnectionGetTimeoutException()
82 try:
83 yield conn
84 finally:
85 self.put(conn)
86
87 def _qsize(self):
88 return self.maxsize - self._acquired
89
90 if not hasattr(queue.Queue, '_qsize'):
91 qsize = _qsize
92
93 def _get(self):
94 if self.queue:
95 conn = self.queue.pop().connection
96 else:
97 conn = self._create_connection()
98 self._acquired += 1
99 return conn
100
101 def _put(self, conn):
102 self.queue.append(_PoolItem(
103 ttl=time.time() + self._unused_timeout,
104 connection=conn,
105 ))
106 self._acquired -= 1
107 # Drop all expired connections from the right end of the queue
108 now = time.time()
109 while self.queue and self.queue[0].ttl < now:
110 conn = self.queue.popleft().connection
111 self._destroy_connection(conn)
112
113
114class MemcacheClientPool(ConnectionPool):
115 def __init__(self, urls, dead_retry=None, socket_timeout=None, **kwargs):
116 ConnectionPool.__init__(self, **kwargs)
117 self._urls = urls
118 self._dead_retry = dead_retry
119 self._socket_timeout = socket_timeout
120
121 # NOTE(morganfainberg): The host objects expect an int for the
122 # deaduntil value. Initialize this at 0 for each host with 0 indicating
123 # the host is not dead.
124 self._hosts_deaduntil = [0] * len(urls)
125
126 # NOTE(morganfainberg): Lazy import to allow middleware to work with
127 # python 3k even if memcache will not due to python 3k
128 # incompatibilities within the python-memcache library.
129 global memcache
130 import memcache
131
132 # This 'class' is taken from http://stackoverflow.com/a/22520633/238308
133 # Don't inherit client from threading.local so that we can reuse
134 # clients in different threads
135 MemcacheClient = type('_MemcacheClient', (object,),
136 dict(memcache.Client.__dict__))
137
138 self._memcache_client_class = MemcacheClient
139
140 def _create_connection(self):
141 return self._memcache_client_class(self._urls,
142 dead_retry=self._dead_retry,
143 socket_timeout=self._socket_timeout)
144
145 def _destroy_connection(self, conn):
146 conn.disconnect_all()
147
148 def _get(self):
149 conn = ConnectionPool._get(self)
150 try:
151 # Propagate host state known to us to this client's list
152 now = time.time()
153 for deaduntil, host in zip(self._hosts_deaduntil, conn.servers):
154 if deaduntil > now and host.deaduntil <= now:
155 host.mark_dead('propagating death mark from the pool')
156 host.deaduntil = deaduntil
157 except Exception:
158 # We need to be sure that connection doesn't leak from the pool.
159 # This code runs before we enter context manager's try-finally
160 # block, so we need to explicitly release it here
161 ConnectionPool._put(self, conn)
162 raise
163 return conn
164
165 def _put(self, conn):
166 try:
167 # If this client found that one of the hosts is dead, mark it as
168 # such in our internal list
169 now = time.time()
170 for i, deaduntil, host in zip(itertools.count(),
171 self._hosts_deaduntil,
172 conn.servers):
173 # Do nothing if we already know this host is dead
174 if deaduntil <= now:
175 if host.deaduntil > now:
176 self._hosts_deaduntil[i] = host.deaduntil
177 else:
178 self._hosts_deaduntil[i] = 0
179 # If all hosts are dead we should forget that they're dead. This
180 # way we won't get completely shut off until dead_retry seconds
181 # pass, but will be checking servers as frequent as we can (over
182 # way smaller socket_timeout)
183 if all(deaduntil > now for deaduntil in self._hosts_deaduntil):
184 self._hosts_deaduntil[:] = [0] * len(self._hosts_deaduntil)
185 finally:
186 ConnectionPool._put(self, conn)
diff --git a/keystonemiddleware/tests/unit/auth_token/test_auth_token_middleware.py b/keystonemiddleware/tests/unit/auth_token/test_auth_token_middleware.py
index a130b19..5c59d14 100644
--- a/keystonemiddleware/tests/unit/auth_token/test_auth_token_middleware.py
+++ b/keystonemiddleware/tests/unit/auth_token/test_auth_token_middleware.py
@@ -28,6 +28,7 @@ from keystoneauth1 import session
28from keystoneclient.common import cms 28from keystoneclient.common import cms
29from keystoneclient import exceptions as ksc_exceptions 29from keystoneclient import exceptions as ksc_exceptions
30import mock 30import mock
31import oslo_cache
31from oslo_log import log as logging 32from oslo_log import log as logging
32from oslo_serialization import jsonutils 33from oslo_serialization import jsonutils
33from oslo_utils import timeutils 34from oslo_utils import timeutils
@@ -252,6 +253,17 @@ class v3CompositeFakeApp(CompositeBase, v3FakeApp):
252 v3_default_service_env_additions) 253 v3_default_service_env_additions)
253 254
254 255
256class FakeOsloCache(_cache._FakeClient):
257 """A fake oslo_cache object.
258
259 The memcache and oslo_cache interfaces are almost the same except we need
260 to return NO_VALUE when not found.
261 """
262
263 def get(self, key):
264 return super(FakeOsloCache, self).get(key) or oslo_cache.NO_VALUE
265
266
255class BaseAuthTokenMiddlewareTest(base.BaseAuthTokenTestCase): 267class BaseAuthTokenMiddlewareTest(base.BaseAuthTokenTestCase):
256 """Base test class for auth_token middleware. 268 """Base test class for auth_token middleware.
257 269
@@ -270,6 +282,12 @@ class BaseAuthTokenMiddlewareTest(base.BaseAuthTokenTestCase):
270 super(BaseAuthTokenMiddlewareTest, self).setUp() 282 super(BaseAuthTokenMiddlewareTest, self).setUp()
271 283
272 self.logger = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG)) 284 self.logger = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
285
286 # the default oslo_cache is null cache, always use an in-mem cache
287 self.useFixture(fixtures.MockPatchObject(auth_token.AuthProtocol,
288 '_create_oslo_cache',
289 return_value=FakeOsloCache()))
290
273 self.expected_env = expected_env or dict() 291 self.expected_env = expected_env or dict()
274 self.fake_app = fake_app or FakeApp 292 self.fake_app = fake_app or FakeApp
275 self.middleware = None 293 self.middleware = None
diff --git a/keystonemiddleware/tests/unit/auth_token/test_connection_pool.py b/keystonemiddleware/tests/unit/auth_token/test_connection_pool.py
index 074d1e5..699b5b5 100644
--- a/keystonemiddleware/tests/unit/auth_token/test_connection_pool.py
+++ b/keystonemiddleware/tests/unit/auth_token/test_connection_pool.py
@@ -13,11 +13,11 @@
13import time 13import time
14 14
15import mock 15import mock
16from oslo_cache import _memcache_pool
16from six.moves import queue 17from six.moves import queue
17import testtools 18import testtools
18from testtools import matchers 19from testtools import matchers
19 20
20from keystonemiddleware.auth_token import _memcache_pool
21from keystonemiddleware.tests.unit import utils 21from keystonemiddleware.tests.unit import utils
22 22
23 23
@@ -109,7 +109,7 @@ class TestConnectionPool(utils.TestCase):
109 # Make sure we've consumed the only available connection from the pool 109 # Make sure we've consumed the only available connection from the pool
110 conn = connection_pool.get_nowait() 110 conn = connection_pool.get_nowait()
111 111
112 self.assertRaises(_memcache_pool.ConnectionGetTimeoutException, 112 self.assertRaises(_memcache_pool.exception.QueueEmpty,
113 _acquire_connection) 113 _acquire_connection)
114 114
115 # Put the connection back and ensure we can acquire the connection 115 # Put the connection back and ensure we can acquire the connection
diff --git a/requirements.txt b/requirements.txt
index 063f47c..7b839c7 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,6 +3,7 @@
3# process, which may cause wedges in the gate later. 3# process, which may cause wedges in the gate later.
4 4
5keystoneauth1>=3.2.0 # Apache-2.0 5keystoneauth1>=3.2.0 # Apache-2.0
6oslo.cache>=1.26.0 # Apache-2.0
6oslo.config>=4.6.0 # Apache-2.0 7oslo.config>=4.6.0 # Apache-2.0
7oslo.context>=2.19.2 # Apache-2.0 8oslo.context>=2.19.2 # Apache-2.0
8oslo.i18n>=3.15.3 # Apache-2.0 9oslo.i18n>=3.15.3 # Apache-2.0