Add ability to restrict jobs to specific files.

Add an additional job parameter, 'file', that will cause that
job to only run if the change touches files that match the
specification.

Change-Id: I8c8fd3d029e02e338fd1dd266443b9ac56c0e5ac
Reviewed-on: https://review.openstack.org/23710
Reviewed-by: Clark Boylan <clark.boylan@gmail.com>
Reviewed-by: Jeremy Stanley <fungi@yuggoth.org>
Reviewed-by: Monty Taylor <mordred@inaugust.com>
Approved: James E. Blair <corvus@inaugust.com>
Tested-by: Jenkins
This commit is contained in:
James E. Blair 2013-03-06 08:50:50 -08:00 committed by Jenkins
parent e7ab86f0a7
commit 70c715813c
8 changed files with 77 additions and 10 deletions

View File

@ -432,6 +432,12 @@ each job as it builds a list from the project specification.
treated as a regular expression and multiple branches may be
listed.
**files (optional)**
This job should only be run if at least one of the files involved in
the change (added, deleted, or modified) matches at least one of the
file patterns listed here. This field is treated as a regular
expression and multiple expressions may be listed.
**parameter-function (optional)**
Specifies a function that should be applied to the parameters before
the job is launched. The function should be defined in a python file

View File

@ -43,6 +43,9 @@ jobs:
hold-following-changes: true
- name: nonvoting-project-test2
voting: false
- name: project-testfile
files:
- '.*-requires'
projects:
- name: org/project
@ -51,10 +54,12 @@ projects:
- project-merge:
- project-test1
- project-test2
- project-testfile
gate:
- project-merge:
- project-test1
- project-test2
- project-testfile
post:
- project-post

View File

@ -45,6 +45,9 @@ jobs:
- name: test-merge2
success-pattern: http://logs.example.com/{change.number}/{change.patchset}/{pipeline.name}/{job.name}/{build.number}/success
failure-pattern: http://logs.example.com/{change.number}/{change.patchset}/{pipeline.name}/{job.name}/{build.number}/fail
- name: project-testfile
files:
- 'tools/.*-requires'
projects:
- name: test-org/test

View File

@ -192,7 +192,7 @@ class FakeChange(object):
self.addPatchset()
self.data['submitRecords'] = self.getSubmitRecords()
def addPatchset(self, files=None, large=False):
def addPatchset(self, files=[], large=False):
self.latest_patchset += 1
if files:
fn = files[0]
@ -202,12 +202,15 @@ class FakeChange(object):
c = add_fake_change_to_repo(self.project, self.branch,
self.number, self.latest_patchset,
msg, fn, large)
ps_files = [{'file': '/COMMIT_MSG',
'type': 'ADDED'},
{'file': 'README',
'type': 'MODIFIED'}]
for f in files:
ps_files.append({'file': f, 'type': 'ADDED'})
d = {'approvals': [],
'createdOn': time.time(),
'files': [{'file': '/COMMIT_MSG',
'type': 'ADDED'},
{'file': 'README',
'type': 'MODIFIED'}],
'files': ps_files,
'number': str(self.latest_patchset),
'ref': 'refs/changes/1/%s/%s' % (self.number,
self.latest_patchset),
@ -1966,3 +1969,28 @@ class testScheduler(unittest.TestCase):
assert D.data['status'] == 'MERGED'
assert D.reported == 2
self.assertEmptyQueues()
def test_file_jobs(self):
"Test that file jobs run only when appropriate"
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
A.addPatchset(['pip-requires'])
B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
A.addApproval('CRVW', 2)
B.addApproval('CRVW', 2)
self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
self.waitUntilSettled()
jobs = self.fake_jenkins.all_jobs
finished_jobs = self.fake_jenkins.job_history
testfile_jobs = [x for x in finished_jobs
if x.name == 'project-testfile']
assert len(testfile_jobs) == 1
assert testfile_jobs[0].changes == '1,2'
assert A.data['status'] == 'MERGED'
assert A.reported == 2
assert B.data['status'] == 'MERGED'
assert B.reported == 2
self.assertEmptyQueues()

View File

@ -64,6 +64,7 @@ class LayoutSchema(object):
'voting': bool,
'parameter-function': str,
'branch': toList(str),
'files': toList(str),
}
jobs = [job]

View File

@ -370,6 +370,8 @@ class Job(object):
self.voting = True
self.branches = []
self._branches = []
self.files = []
self._files = []
def __str__(self):
return self.name
@ -387,16 +389,29 @@ class Job(object):
self.voting = other.voting
self.branches = other.branches[:]
self._branches = other._branches[:]
self.files = other.files[:]
self._files = other._files[:]
def changeMatches(self, change):
if not self.branches:
return True
matches_branch = False
for branch in self.branches:
if hasattr(change, 'branch') and branch.match(change.branch):
return True
matches_branch = True
if hasattr(change, 'ref') and branch.match(change.ref):
return True
return False
matches_branch = True
if self.branches and not matches_branch:
return False
matches_file = False
for f in self.files:
if hasattr(change, 'files'):
for cf in change.files:
if f.match(cf):
matches_file = True
if self.files and not matches_file:
return False
return True
class JobTree(object):
@ -532,6 +547,7 @@ class Change(Changeish):
self.patchset = None
self.refspec = None
self.files = []
self.reported = False
self.needs_change = None
self.needed_by_changes = []

View File

@ -157,6 +157,10 @@ class Scheduler(threading.Thread):
if branches:
job._branches = branches
job.branches = [re.compile(x) for x in branches]
files = toList(config_job.get('files'))
if files:
job._files = files
job.files = [re.compile(x) for x in files]
def add_jobs(job_tree, config_jobs):
for job in config_jobs:
@ -521,6 +525,8 @@ class BasePipelineManager(object):
efilters = ''
for b in tree.job._branches:
efilters += str(b)
for f in tree.job._files:
efilters += str(f)
if efilters:
efilters = ' ' + efilters
hold = ''

View File

@ -296,6 +296,8 @@ class Gerrit(object):
for ps in data['patchSets']:
if ps['number'] == patchset:
change.refspec = ps['ref']
for f in ps.get('files', []):
change.files.append(f['file'])
if int(ps['number']) > int(max_ps):
max_ps = ps['number']
if max_ps == patchset: