Protect rootwrap daemon socket against multiple threads

Wrap the call with eventlet.Semaphore. Simultaneous Client.execute
calls can fail badly. Alternatively, rootwrap daemon connections
could be made every time when Client.execute is called, without
using a semaphore.

Change-Id: Id9d38832c67f2d81d382cda797a48fee943a27f1
Closes-bug: #1654287
This commit is contained in:
IWAMOTO Toshihiro 2017-10-24 16:27:13 +09:00
parent 2f03cce711
commit 7711a6ce31
2 changed files with 69 additions and 12 deletions

View File

@ -47,11 +47,16 @@ class Client(object):
def __init__(self, rootwrap_daemon_cmd): def __init__(self, rootwrap_daemon_cmd):
self._start_command = rootwrap_daemon_cmd self._start_command = rootwrap_daemon_cmd
self._initialized = False self._initialized = False
self._need_restart = False
self._mutex = threading.Lock() self._mutex = threading.Lock()
self._manager = None self._manager = None
self._proxy = None self._proxy = None
self._process = None self._process = None
self._finalize = None self._finalize = None
# This is for eventlet compatibility. multiprocessing stores
# daemon connection in ForkAwareLocal, so this won't be
# needed with the threading module.
self._exec_sem = threading.Lock()
def _initialize(self): def _initialize(self):
if self._process is not None and self._process.poll() is not None: if self._process is not None and self._process.poll() is not None:
@ -119,20 +124,40 @@ class Client(object):
self._proxy = None self._proxy = None
self._initialized = False self._initialized = False
self._initialize() self._initialize()
self._need_restart = False
return self._proxy return self._proxy
def execute(self, cmd, stdin=None): def _run_one_command(self, proxy, cmd, stdin):
self._ensure_initialized() """Wrap proxy.run_one_command, setting _need_restart on an exception.
proxy = self._proxy
retry = False Usually it should be enough to drain stale data on socket
rather than to restart, but we cannot do draining easily.
"""
try: try:
_need_restart = True
res = proxy.run_one_command(cmd, stdin) res = proxy.run_one_command(cmd, stdin)
except (EOFError, IOError): _need_restart = False
retry = True return res
# res can be None if we received final None sent by dying server thread finally:
# instead of response to our request. Process is most likely to be dead if _need_restart:
# at this point. self._need_restart = True
if retry or res is None:
proxy = self._restart(proxy) def execute(self, cmd, stdin=None):
res = proxy.run_one_command(cmd, stdin) with self._exec_sem:
self._ensure_initialized()
proxy = self._proxy
retry = False
if self._need_restart:
proxy = self._restart(proxy)
try:
res = self._run_one_command(proxy, cmd, stdin)
except (EOFError, IOError):
retry = True
# res can be None if we received final None sent by dying
# server thread instead of response to our
# request. Process is most likely to be dead at this
# point.
if retry or res is None:
proxy = self._restart(proxy)
res = self._run_one_command(proxy, cmd, stdin)
return res return res

View File

@ -25,3 +25,35 @@ if os.environ.get('TEST_EVENTLET', False):
def assert_unpatched(self): def assert_unpatched(self):
# This test case is specifically for eventlet testing # This test case is specifically for eventlet testing
pass pass
def _thread_worker(self, seconds, msg):
code, out, err = self.execute(
['sh', '-c', 'sleep %d; echo %s' % (seconds, msg)])
# Ignore trailing newline
self.assertEqual(msg, out.rstrip())
def _thread_worker_timeout(self, seconds, msg, timeout):
with eventlet.Timeout(timeout):
try:
self._thread_worker(seconds, msg)
except eventlet.Timeout:
pass
def test_eventlet_threads(self):
"""Check eventlet compatibility.
The multiprocessing module is not eventlet friendly and
must be protected against eventlet thread switching and its
timeout exceptions.
"""
th = []
# 10 was not enough for some reason.
for i in range(15):
th.append(
eventlet.spawn(self._thread_worker, i % 3, 'abc%d' % i))
for i in [5, 17, 20, 25]:
th.append(
eventlet.spawn(self._thread_worker_timeout, 2,
'timeout%d' % i, i))
for thread in th:
thread.wait()