Add statsd support

Also correct the oversight of not documenting the SSL params to
the server class.

Also cap sphinx due to issues with 1.2 and ignore hacking.

Change-Id: Ic4e8e942620d06a92696d5cf52bc2e9ce6e66bdc
This commit is contained in:
James E. Blair 2013-12-19 09:51:43 -08:00
parent 9c110fb84e
commit a8951ef927
5 changed files with 94 additions and 7 deletions

View File

@ -29,6 +29,11 @@ try:
except ImportError: except ImportError:
import queue as queue import queue as queue
try:
import statsd
except ImportError:
statsd = None
PRECEDENCE_NORMAL = 0 PRECEDENCE_NORMAL = 0
PRECEDENCE_LOW = 1 PRECEDENCE_LOW = 1
PRECEDENCE_HIGH = 2 PRECEDENCE_HIGH = 2
@ -2109,13 +2114,31 @@ class Server(BaseClientServer):
(not for production use). (not for production use).
:arg int port: The TCP port on which to listen. :arg int port: The TCP port on which to listen.
:arg str ssl_key: Path to the SSL private key.
:arg str ssl_cert: Path to the SSL certificate.
:arg str ssl_ca: Path to the CA certificate.
:arg str statsd_host: statsd hostname. None means disabled
(the default).
:arg str statsd_port: statsd port (defaults to 8125).
:arg str statsd_prefix: statsd key prefix.
""" """
def __init__(self, port=4730, ssl_key=None, ssl_cert=None, ssl_ca=None): def __init__(self, port=4730, ssl_key=None, ssl_cert=None, ssl_ca=None,
statsd_host=None, statsd_port=8125, statsd_prefix=None):
self.port = port self.port = port
self.ssl_key = ssl_key self.ssl_key = ssl_key
self.ssl_cert = ssl_cert self.ssl_cert = ssl_cert
self.ssl_ca = ssl_ca self.ssl_ca = ssl_ca
if statsd_host:
if not statsd:
self.log.error("Unable to import statsd module")
self.statsd = None
else:
self.statsd = statsd.StatsClient(statsd_host,
statsd_port,
statsd_prefix)
else:
self.statsd = None
self.high_queue = [] self.high_queue = []
self.normal_queue = [] self.normal_queue = []
self.low_queue = [] self.low_queue = []
@ -2227,6 +2250,7 @@ class Server(BaseClientServer):
if job.worker_connection: if job.worker_connection:
del job.worker_connection.related_jobs[job.handle] del job.worker_connection.related_jobs[job.handle]
del self.jobs[job.handle] del self.jobs[job.handle]
self._updateStats()
def getQueue(self): def getQueue(self):
"""Returns a copy of all internal queues in a flattened form. """Returns a copy of all internal queues in a flattened form.
@ -2257,11 +2281,12 @@ class Server(BaseClientServer):
if handle == job.handle: if handle == job.handle:
queue.remove(job) queue.remove(job)
del self.jobs[handle] del self.jobs[handle]
self._updateStats()
request.connection.sendRaw(b'OK\n') request.connection.sendRaw(b'OK\n')
return return
request.connection.sendRaw(b'ERR UNKNOWN_JOB\n') request.connection.sendRaw(b'ERR UNKNOWN_JOB\n')
def handleStatus(self, request): def _getFunctionStats(self):
functions = {} functions = {}
for function in self.functions: for function in self.functions:
# Total, running, workers # Total, running, workers
@ -2273,6 +2298,10 @@ class Server(BaseClientServer):
for connection in self.active_connections: for connection in self.active_connections:
for function in connection.functions: for function in connection.functions:
functions[function][2] += 1 functions[function][2] += 1
return functions
def handleStatus(self, request):
functions = self._getFunctionStats()
for name, values in functions.items(): for name, values in functions.items():
request.connection.sendRaw(("%s\t%s\t%s\t%s\n" % request.connection.sendRaw(("%s\t%s\t%s\t%s\n" %
(name, values[0], values[1], (name, values[0], values[1],
@ -2297,6 +2326,47 @@ class Server(BaseClientServer):
connection.changeState("AWAKE") connection.changeState("AWAKE")
connection.sendPacket(p) connection.sendPacket(p)
def _updateStats(self):
if not self.statsd:
return
# prefix.queue.JOB.waiting
# prefix.queue.JOB.running
# prefix.queue.JOB.workers
# prefix.queue.waiting
# prefix.queue.running
# prefix.queue.workers
functions = self._getFunctionStats()
base_key = 'queue'
total_waiting = 0
total_running = 0
for name, values in functions.items():
(total, running, workers) = values
job_key = '.'.join([base_key, name])
key = '.'.join([job_key, 'waiting'])
self.statsd.gauge(key, total - running)
total_waiting += (total - running)
key = '.'.join([job_key, 'running'])
self.statsd.gauge(key, running)
total_running += running
key = '.'.join([job_key, 'workers'])
self.statsd.gauge(key, workers)
key = '.'.join([base_key, 'waiting'])
self.statsd.gauge(key, total_waiting)
key = '.'.join([base_key, 'running'])
self.statsd.gauge(key, total_running)
total_workers = 0
for connection in self.active_connections:
if connection.functions:
total_workers += 1
key = '.'.join([base_key, 'workers'])
self.statsd.gauge(key, total_workers)
def _handleSubmitJob(self, packet, precedence): def _handleSubmitJob(self, packet, precedence):
name = packet.getArgument(0) name = packet.getArgument(0)
unique = packet.getArgument(1) unique = packet.getArgument(1)
@ -2317,6 +2387,7 @@ class Server(BaseClientServer):
self.normal_queue.append(job) self.normal_queue.append(job)
elif precedence == PRECEDENCE_LOW: elif precedence == PRECEDENCE_LOW:
self.low_queue.append(job) self.low_queue.append(job)
self._updateStats()
self.wakeConnections() self.wakeConnections()
def handleSubmitJob(self, packet): def handleSubmitJob(self, packet):
@ -2337,6 +2408,7 @@ class Server(BaseClientServer):
connection.related_jobs[job.handle] = job connection.related_jobs[job.handle] = job
job.worker_connection = connection job.worker_connection = connection
job.running = True job.running = True
self._updateStats()
return job return job
return None return None
@ -2399,6 +2471,7 @@ class Server(BaseClientServer):
del self.jobs[handle] del self.jobs[handle]
del job.client_connection.related_jobs[handle] del job.client_connection.related_jobs[handle]
del job.worker_connection.related_jobs[handle] del job.worker_connection.related_jobs[handle]
self._updateStats()
def handleSetClientID(self, packet): def handleSetClientID(self, packet):
name = packet.getArgument(0) name = packet.getArgument(0)

View File

@ -34,7 +34,13 @@ class Server(object):
self.gear_server_pid = None self.gear_server_pid = None
def parse_arguments(self): def parse_arguments(self):
parser = argparse.ArgumentParser(description='Gearman server.') parser = argparse.ArgumentParser(description="""
Gearman server.
If the statsd python module is available, set STATSD_HOST,
STATSD_PORT, and STATSD_PREFIX environment variables for statsd
support.
""")
parser.add_argument('-d', dest='nodaemon', action='store_true', parser.add_argument('-d', dest='nodaemon', action='store_true',
help='do not run as a daemon') help='do not run as a daemon')
parser.add_argument('-p', dest='port', default=4730, parser.add_argument('-p', dest='port', default=4730,
@ -69,10 +75,16 @@ class Server(object):
def main(self): def main(self):
self.setup_logging() self.setup_logging()
statsd_host = os.environ.get('STATSD_HOST')
statsd_port = int(os.environ.get('STATSD_PORT', 8125))
statsd_prefix = os.environ.get('STATSD_PREFIX')
self.server = gear.Server(self.args.port, self.server = gear.Server(self.args.port,
self.args.ssl_key, self.args.ssl_key,
self.args.ssl_cert, self.args.ssl_cert,
self.args.ssl_ca) self.args.ssl_ca,
statsd_host,
statsd_port,
statsd_prefix)
signal.pause() signal.pause()

View File

@ -1,3 +1,3 @@
pbr>=0.5.21,<1.0 pbr>=0.5.21,<1.0
python-daemon
extras extras
python-daemon

View File

@ -3,7 +3,8 @@ discover
fixtures>=0.3.12 fixtures>=0.3.12
hacking>=0.5.3,<0.6 hacking>=0.5.3,<0.6
python-subunit python-subunit
sphinx>=1.1.2 statsd>=1.0.0,<3.0
sphinx>=1.1.2,<1.2
testrepository>=0.0.13 testrepository>=0.0.13
testresources testresources
testscenarios testscenarios

View File

@ -33,4 +33,5 @@ commands = {posargs}
[flake8] [flake8]
exclude = .venv,.tox,dist,doc,*.egg exclude = .venv,.tox,dist,doc,*.egg
show-source = true show-source = true
ignore = E123,E125 # E123, E125, and H ignored intentionally in this code-base
ignore = E123,E125,H