cni health: Avoid capsh dependency
Procfs provides all we need to know and we avoid the dependency on external tools. In my testing, this takes 33µs (procfs) compared to 6.69ms (capsh). Partially-Implements: blueprint cni-daemon-readiness-liveness Change-Id: I766ccaa3e80b0bde7375cef858547d2e43ad4e9f Signed-off-by: Antoni Segura Puimedon <antonisp@celebdor.com>
This commit is contained in:
parent
d00b09ce05
commit
bdb394958f
|
@ -15,7 +15,6 @@ import os
|
|||
import psutil
|
||||
import requests
|
||||
from six.moves import http_client as httplib
|
||||
import subprocess
|
||||
|
||||
from flask import Flask
|
||||
from pyroute2 import IPDB
|
||||
|
@ -41,6 +40,16 @@ cni_health_server_opts = [
|
|||
CONF.register_opts(cni_health_server_opts, "cni_health_server")
|
||||
|
||||
BYTES_AMOUNT = 1048576
|
||||
CAP_NET_ADMIN = 12 # Taken from linux/capabilities.h
|
||||
EFFECTIVE_CAPS = 'CapEff:\t'
|
||||
|
||||
|
||||
def _has_cap(capability, entry):
|
||||
with open('/proc/self/status', 'r') as pstat:
|
||||
for line in pstat:
|
||||
if line.startswith(entry):
|
||||
caps = int(line[len(entry):], 16)
|
||||
return (caps & (1 << capability)) != 0
|
||||
|
||||
|
||||
class CNIHealthServer(object):
|
||||
|
@ -63,13 +72,10 @@ class CNIHealthServer(object):
|
|||
self.headers = {'Connection': 'close'}
|
||||
|
||||
def readiness_status(self):
|
||||
net_admin_command = 'capsh --print | grep "Current:" | ' \
|
||||
'cut -d" " -f3 | grep -q cap_net_admin'
|
||||
return_code = subprocess.call(net_admin_command, shell=True)
|
||||
data = 'ok'
|
||||
k8s_conn, k8s_status = self.verify_k8s_connection()
|
||||
|
||||
if return_code != 0:
|
||||
if not _has_cap(CAP_NET_ADMIN, EFFECTIVE_CAPS):
|
||||
error_message = 'NET_ADMIN capabilities not present.'
|
||||
LOG.error(error_message)
|
||||
return error_message, httplib.INTERNAL_SERVER_ERROR, self.headers
|
||||
|
|
|
@ -32,30 +32,30 @@ class TestCNIHealthServer(base.TestCase):
|
|||
self.srv.application.testing = True
|
||||
self.test_client = self.srv.application.test_client()
|
||||
|
||||
@mock.patch('subprocess.call')
|
||||
@mock.patch('kuryr_kubernetes.cni.health._has_cap')
|
||||
@mock.patch('kuryr_kubernetes.cni.health.CNIHealthServer.'
|
||||
'verify_k8s_connection')
|
||||
def test_readiness_status(self, m_verify_k8s_conn, m_subprocess):
|
||||
m_subprocess.return_value = 0
|
||||
def test_readiness_status(self, m_verify_k8s_conn, cap_tester):
|
||||
cap_tester.return_value = True
|
||||
m_verify_k8s_conn.return_value = True, 200
|
||||
resp = self.test_client.get('/ready')
|
||||
self.assertEqual(200, resp.status_code)
|
||||
|
||||
@mock.patch('subprocess.call')
|
||||
@mock.patch('kuryr_kubernetes.cni.health._has_cap')
|
||||
@mock.patch('kuryr_kubernetes.cni.health.CNIHealthServer.'
|
||||
'verify_k8s_connection')
|
||||
def test_readiness_status_net_admin_error(self, m_verify_k8s_conn,
|
||||
m_subprocess):
|
||||
m_subprocess.return_value = 1
|
||||
cap_tester):
|
||||
cap_tester.return_value = False
|
||||
m_verify_k8s_conn.return_value = True, 200
|
||||
resp = self.test_client.get('/ready')
|
||||
self.assertEqual(500, resp.status_code)
|
||||
|
||||
@mock.patch('subprocess.call')
|
||||
@mock.patch('kuryr_kubernetes.cni.health._has_cap')
|
||||
@mock.patch('kuryr_kubernetes.cni.health.CNIHealthServer.'
|
||||
'verify_k8s_connection')
|
||||
def test_readiness_status_k8s_error(self, m_verify_k8s_conn, m_subprocess):
|
||||
m_subprocess.return_value = 0
|
||||
def test_readiness_status_k8s_error(self, m_verify_k8s_conn, cap_tester):
|
||||
cap_tester.return_value = True
|
||||
m_verify_k8s_conn.return_value = False, 503
|
||||
resp = self.test_client.get('/ready')
|
||||
self.assertEqual(503, resp.status_code)
|
||||
|
@ -87,3 +87,8 @@ class TestCNIHealthServer(base.TestCase):
|
|||
m_resource.return_value = cls
|
||||
resp = self.test_client.get('/alive')
|
||||
self.assertEqual(500, resp.status_code)
|
||||
|
||||
|
||||
class TestCNIHealthUtils(base.TestCase):
|
||||
def test_has_cap(self):
|
||||
self.assertTrue(health._has_cap(health.CAP_NET_ADMIN, 'CapBnd:\t'))
|
||||
|
|
Loading…
Reference in New Issue