diff --git a/etc/zuul.conf-sample b/etc/zuul.conf-sample index f0e17658c0..17092af55f 100644 --- a/etc/zuul.conf-sample +++ b/etc/zuul.conf-sample @@ -38,6 +38,7 @@ trusted_rw_paths=/opt/zuul-logs listen_address=127.0.0.1 port=9000 static_cache_expiry=0 +;sql_connection_name=mydatabase [webapp] listen_address=0.0.0.0 diff --git a/zuul/cmd/web.py b/zuul/cmd/web.py index 6e5489f74b..ad3062ff08 100755 --- a/zuul/cmd/web.py +++ b/zuul/cmd/web.py @@ -22,6 +22,7 @@ import threading import zuul.cmd import zuul.web +from zuul.driver.sql import sqlconnection from zuul.lib.config import get_default @@ -48,6 +49,30 @@ class WebServer(zuul.cmd.ZuulDaemonApp): params['ssl_cert'] = get_default(self.config, 'gearman', 'ssl_cert') params['ssl_ca'] = get_default(self.config, 'gearman', 'ssl_ca') + sql_conn_name = get_default(self.config, 'web', + 'sql_connection_name') + sql_conn = None + if sql_conn_name: + # we want a specific sql connection + sql_conn = self.connections.connections.get(sql_conn_name) + if not sql_conn: + self.log.error("Couldn't find sql connection '%s'" % + sql_conn_name) + sys.exit(1) + else: + # look for any sql connection + connections = [c for c in self.connections.connections.values() + if isinstance(c, sqlconnection.SQLConnection)] + if len(connections) > 1: + self.log.error("Multiple sql connection found, " + "set the sql_connection_name option " + "in zuul.conf [web] section") + sys.exit(1) + if connections: + # use this sql connection by default + sql_conn = connections[0] + params['sql_connection'] = sql_conn + try: self.web = zuul.web.ZuulWeb(**params) except Exception as e: @@ -79,6 +104,8 @@ class WebServer(zuul.cmd.ZuulDaemonApp): self.setup_logging('web', 'log_config') self.log = logging.getLogger("zuul.WebServer") + self.configure_connections() + try: self._run() except Exception: diff --git a/zuul/web/__init__.py b/zuul/web/__init__.py index db14343ed0..d400f60a1f 100755 --- a/zuul/web/__init__.py +++ b/zuul/web/__init__.py @@ -20,11 +20,14 @@ import json import logging import os import time +import urllib.parse import uvloop import aiohttp from aiohttp import web +from sqlalchemy.sql import select + import zuul.rpcclient STATIC_DIR = os.path.join(os.path.dirname(__file__), 'static') @@ -200,6 +203,92 @@ class GearmanHandler(object): return resp +class SqlHandler(object): + log = logging.getLogger("zuul.web.SqlHandler") + filters = ("project", "pipeline", "change", "patchset", "ref", + "result", "uuid", "job_name", "voting", "node_name", "newrev") + + def __init__(self, connection): + self.connection = connection + + def query(self, args): + build = self.connection.zuul_build_table + buildset = self.connection.zuul_buildset_table + query = select([ + buildset.c.project, + buildset.c.pipeline, + buildset.c.change, + buildset.c.patchset, + buildset.c.ref, + buildset.c.newrev, + buildset.c.ref_url, + build.c.result, + build.c.uuid, + build.c.job_name, + build.c.voting, + build.c.node_name, + build.c.start_time, + build.c.end_time, + build.c.log_url]).select_from(build.join(buildset)) + for table in ('build', 'buildset'): + for k, v in args['%s_filters' % table].items(): + if table == 'build': + column = build.c + else: + column = buildset.c + query = query.where(getattr(column, k).in_(v)) + return query.limit(args['limit']).offset(args['skip']).order_by( + build.c.id.desc()) + + def get_builds(self, args): + """Return a list of build""" + builds = [] + with self.connection.engine.begin() as conn: + query = self.query(args) + for row in conn.execute(query): + build = dict(row) + # Convert date to iso format + if row.start_time: + build['start_time'] = row.start_time.strftime( + '%Y-%m-%dT%H:%M:%S') + if row.end_time: + build['end_time'] = row.end_time.strftime( + '%Y-%m-%dT%H:%M:%S') + # Compute run duration + if row.start_time and row.end_time: + build['duration'] = (row.end_time - + row.start_time).total_seconds() + builds.append(build) + return builds + + async def processRequest(self, request): + try: + args = { + 'buildset_filters': {}, + 'build_filters': {}, + 'limit': 50, + 'skip': 0, + } + for k, v in urllib.parse.parse_qsl(request.rel_url.query_string): + if k in ("tenant", "project", "pipeline", "change", + "patchset", "ref", "newrev"): + args['buildset_filters'].setdefault(k, []).append(v) + elif k in ("uuid", "job_name", "voting", "node_name", + "result"): + args['build_filters'].setdefault(k, []).append(v) + elif k in ("limit", "skip"): + args[k] = int(v) + else: + raise ValueError("Unknown parameter %s" % k) + data = self.get_builds(args) + resp = web.json_response(data) + except Exception as e: + self.log.exception("Jobs exception:") + resp = web.json_response({'error_description': 'Internal error'}, + status=500) + return resp + + class ZuulWeb(object): log = logging.getLogger("zuul.web.ZuulWeb") @@ -207,7 +296,8 @@ class ZuulWeb(object): def __init__(self, listen_address, listen_port, gear_server, gear_port, ssl_key=None, ssl_cert=None, ssl_ca=None, - static_cache_expiry=3600): + static_cache_expiry=3600, + sql_connection=None): self.listen_address = listen_address self.listen_port = listen_port self.event_loop = None @@ -218,6 +308,10 @@ class ZuulWeb(object): ssl_key, ssl_cert, ssl_ca) self.log_streaming_handler = LogStreamingHandler(self.rpc) self.gearman_handler = GearmanHandler(self.rpc) + if sql_connection: + self.sql_handler = SqlHandler(sql_connection) + else: + self.sql_handler = None async def _handleWebsocket(self, request): return await self.log_streaming_handler.processRequest( @@ -233,6 +327,9 @@ class ZuulWeb(object): async def _handleJobsRequest(self, request): return await self.gearman_handler.processRequest(request, 'job_list') + async def _handleSqlRequest(self, request): + return await self.sql_handler.processRequest(request) + async def _handleStaticRequest(self, request): fp = None if request.path.endswith("tenants.html") or request.path.endswith("/"): @@ -241,6 +338,8 @@ class ZuulWeb(object): fp = os.path.join(STATIC_DIR, "status.html") elif request.path.endswith("jobs.html"): fp = os.path.join(STATIC_DIR, "jobs.html") + elif request.path.endswith("builds.html"): + fp = os.path.join(STATIC_DIR, "builds.html") headers = {} if self.static_cache_expiry: headers['Cache-Control'] = "public, max-age=%d" % \ @@ -269,6 +368,12 @@ class ZuulWeb(object): ('GET', '/', self._handleStaticRequest), ] + if self.sql_handler: + routes.append(('GET', '/{tenant}/builds.json', + self._handleSqlRequest)) + routes.append(('GET', '/{tenant}/builds.html', + self._handleStaticRequest)) + self.log.debug("ZuulWeb starting") asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) user_supplied_loop = loop is not None diff --git a/zuul/web/static/builds.html b/zuul/web/static/builds.html new file mode 100644 index 0000000000..921c9e2a50 --- /dev/null +++ b/zuul/web/static/builds.html @@ -0,0 +1,84 @@ + + + + + Zuul Builds + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
idJobProjectPipelineChangeNewrevDurationLog urlNode nameStart timeEnd timeResult
{{ build.id }}{{ build.job_name }}{{ build.project }}{{ build.pipeline }}change{{ build.newrev }}{{ build.duration }} secondslogs{{ build.node_name }}{{ build.start_time }}{{ build.end_time }}{{ build.result }}
+
diff --git a/zuul/web/static/javascripts/zuul.angular.js b/zuul/web/static/javascripts/zuul.angular.js index 27e1432982..87cbbdd0a1 100644 --- a/zuul/web/static/javascripts/zuul.angular.js +++ b/zuul/web/static/javascripts/zuul.angular.js @@ -43,3 +43,57 @@ angular.module('zuulJobs', []).controller( } $scope.jobs_fetch(); }); + +angular.module('zuulBuilds', [], function($locationProvider) { + $locationProvider.html5Mode({ + enabled: true, + requireBase: false + }); +}).controller('mainController', function($scope, $http, $location) +{ + $scope.rowClass = function(build) { + if (build.result == "SUCCESS") { + return "success"; + } else { + return "warning"; + } + }; + var query_args = $location.search(); + var url = $location.url(); + var tenant_start = url.lastIndexOf( + '/', url.lastIndexOf('/builds.html') - 1) + 1; + var tenant_length = url.lastIndexOf('/builds.html') - tenant_start; + $scope.tenant = url.substr(tenant_start, tenant_length); + $scope.builds = undefined; + if (query_args["pipeline"]) {$scope.pipeline = query_args["pipeline"]; + } else {$scope.pipeline = "";} + if (query_args["job_name"]) {$scope.job_name = query_args["job_name"]; + } else {$scope.job_name = "";} + if (query_args["project"]) {$scope.project = query_args["project"]; + } else {$scope.project = "";} + $scope.builds_fetch = function() { + query_string = ""; + if ($scope.tenant) {query_string += "&tenant="+$scope.tenant;} + if ($scope.pipeline) {query_string += "&pipeline="+$scope.pipeline;} + if ($scope.job_name) {query_string += "&job_name="+$scope.job_name;} + if ($scope.project) {query_string += "&project="+$scope.project;} + if (query_string != "") {query_string = "?" + query_string.substr(1);} + $http.get("builds.json" + query_string) + .then(function success(result) { + for (build_pos = 0; + build_pos < result.data.length; + build_pos += 1) { + build = result.data[build_pos] + if (build.node_name == null) { + build.node_name = 'master' + } + /* Fix incorect url for post_failure job */ + if (build.log_url == build.job_name) { + build.log_url = undefined; + } + } + $scope.builds = result.data; + }); + } + $scope.builds_fetch() +});