Register RE2 syntax errors as warnings

This adds a configuration warning (viewable in the web UI) for any
regular expressions found in in-repo configuration that can not
be compiled and executed with RE2.

Change-Id: I092b47e9b43e9548cafdcb65d5d21712fc6cc3af
This commit is contained in:
James E. Blair 2023-08-21 14:48:06 -07:00
parent 3d5f87359d
commit d4fac1a0e8
26 changed files with 560 additions and 120 deletions

View File

@ -0,0 +1,15 @@
---
deprecations:
- |
Regular expressions using Python syntax are deprecated in favor of
RE2 syntax instead, for additional speed and safety. Negative
lookahead assertions may be replaced using the `negate` keyword.
See :ref:`regex` for more information.
If Zuul detects a regular expression using Python syntax that is not
supported by RE2, it will register a configuration syntax warning.
These may be viewed on the configuration errors page in the web
interface.
A future version of Zuul will remove Python regex support
completely and these warnings will become errors.

View File

@ -0,0 +1,89 @@
- pipeline:
name: check
manager: independent
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
Verified: 1
failure:
gerrit:
Verified: -1
- pipeline:
name: gate
manager: dependent
success-message: Build succeeded (gate).
trigger:
gerrit:
- event: comment-added
approval:
- Approved: 1
require:
gerrit:
approval:
username: '(?!invalid)'
success:
gerrit:
Verified: 2
submit: true
failure:
gerrit:
Verified: -2
start:
gerrit:
Verified: 0
precedence: high
- pipeline:
name: post
manager: independent
trigger:
gerrit:
- event: ref-updated
ref: ^(?!invalid/).*$
- pipeline:
name: tag
manager: independent
trigger:
gerrit:
- event: ref-updated
ref: ^refs/tags/.*$
- job:
name: base
parent: null
run: playbooks/base.yaml
nodeset:
nodes:
- label: ubuntu-xenial
name: controller
- job:
name: check-job
run: playbooks/check.yaml
- job:
name: post-job
run: playbooks/post.yaml
- job:
name: tag-job
run: playbooks/tag.yaml
- project:
name: org/project
check:
jobs:
- check-job
gate:
jobs:
- check-job
post:
jobs:
- post-job
tag:
jobs:
- tag-job

View File

@ -0,0 +1,7 @@
- pipeline:
name: check
manager: independent
trigger:
git:
- event: ref-updated
ref: '(?!invalid)'

View File

@ -0,0 +1,15 @@
- pipeline:
name: check
manager: independent
trigger:
github:
- event: pull_request_review
action: submitted
state: approve
ref: '(?!invalid)'
start:
github: {}
success:
github: {}
failure:
github: {}

View File

@ -0,0 +1,7 @@
- pipeline:
name: check
manager: independent
trigger:
gitlab:
- event: gl_push
ref: '(?!invalid)'

View File

@ -0,0 +1,7 @@
- pipeline:
name: check
manager: independent
trigger:
pagure:
- event: pg_push
ref: '(?!invalid)'

View File

@ -0,0 +1,7 @@
- pipeline:
name: check
manager: independent
trigger:
zuul:
- event: parent-change-enqueued
pipeline: '(?!invalid)'

View File

@ -0,0 +1,93 @@
- pragma:
implied-branches:
- '^(?!invalid).*$'
- pipeline:
name: check
manager: independent
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
Verified: 1
failure:
gerrit:
Verified: -1
- pipeline:
name: gate
manager: dependent
success-message: Build succeeded (gate).
trigger:
gerrit:
- event: comment-added
approval:
- Approved: 1
success:
gerrit:
Verified: 2
submit: true
failure:
gerrit:
Verified: -2
start:
gerrit:
Verified: 0
precedence: high
- pipeline:
name: post
manager: independent
trigger:
gerrit:
- event: ref-updated
ref:
regex: ^refs/.*$
negate: true
- pipeline:
name: tag
manager: independent
trigger:
gerrit:
- event: ref-updated
ref: ^refs/tags/.*$
- job:
name: base
parent: null
run: playbooks/base.yaml
nodeset:
nodes:
- label: ubuntu-xenial
name: controller
- job:
name: check-job
run: playbooks/check.yaml
branches: ^(?!invalid).*$
- job:
name: post-job
run: playbooks/post.yaml
- job:
name: tag-job
run: playbooks/tag.yaml
- project:
name: org/project
check:
jobs:
- check-job:
branches: ^(?!invalid).*$
gate:
jobs:
- check-job
post:
jobs:
- post-job
tag:
jobs:
- tag-job

View File

@ -108,7 +108,7 @@ class TestJob(BaseTestCase):
'parent': None,
'irrelevant-files': [
'^docs/.*$'
]})
]}, None)
return job
def test_change_matches_returns_false_for_matched_skip_if(self):
@ -194,7 +194,7 @@ class TestJob(BaseTestCase):
'name': 'base',
'parent': None,
'timeout': 30,
})
}, None)
self.layout.addJob(base)
python27 = self.pcontext.job_parser.fromYaml({
'_source_context': self.context,
@ -202,7 +202,7 @@ class TestJob(BaseTestCase):
'name': 'python27',
'parent': 'base',
'timeout': 40,
})
}, None)
self.layout.addJob(python27)
python27diablo = self.pcontext.job_parser.fromYaml({
'_source_context': self.context,
@ -212,7 +212,7 @@ class TestJob(BaseTestCase):
'stable/diablo'
],
'timeout': 50,
})
}, None)
self.layout.addJob(python27diablo)
project_config = self.pcontext.project_parser.fromYaml({
@ -225,7 +225,7 @@ class TestJob(BaseTestCase):
'run': 'playbooks/python27.yaml'}}
]
}
})
}, None)
self.layout.addProjectConfig(project_config)
change = model.Change(self.project)
@ -269,7 +269,7 @@ class TestJob(BaseTestCase):
'name': 'base',
'parent': None,
'timeout': 30,
})
}, None)
self.layout.addJob(base)
python27 = self.pcontext.job_parser.fromYaml({
'_source_context': self.context,
@ -278,7 +278,7 @@ class TestJob(BaseTestCase):
'parent': 'base',
'timeout': 40,
'irrelevant-files': ['^ignored-file$'],
})
}, None)
self.layout.addJob(python27)
project_config = self.pcontext.project_parser.fromYaml({
@ -290,7 +290,7 @@ class TestJob(BaseTestCase):
'python27',
]
}
})
}, None)
self.layout.addProjectConfig(project_config)
change = model.Change(self.project)
@ -321,7 +321,7 @@ class TestJob(BaseTestCase):
'_start_mark': self.start_mark,
'parent': None,
'name': 'base',
})
}, None)
self.layout.addJob(base)
other_project = model.Project('other_project', self.source)
@ -334,7 +334,7 @@ class TestJob(BaseTestCase):
'_source_context': other_context,
'_start_mark': self.start_mark,
'name': 'base',
})
}, None)
with testtools.ExpectedException(
Exception,
"Job base in other_project is not permitted "
@ -350,22 +350,21 @@ class TestJob(BaseTestCase):
'name': 'job',
'parent': None,
'post-review': True
})
}, None)
self.layout.addJob(job)
project_config = self.pcontext.project_parser.fromYaml(
{
'_source_context': self.context,
'_start_mark': self.start_mark,
'name': 'project',
'gate': {
'jobs': [
'job'
]
}
project_config = self.pcontext.project_parser.fromYaml({
'_source_context': self.context,
'_start_mark': self.start_mark,
'name': 'project',
'gate': {
'jobs': [
'job'
]
}
)
}, None)
self.layout.addProjectConfig(project_config)
change = model.Change(self.project)

View File

@ -31,7 +31,7 @@ import paramiko
import zuul.configloader
from zuul.lib import yamlutil as yaml
from zuul.model import MergeRequest
from zuul.model import MergeRequest, SEVERITY_WARNING
from zuul.zk.blob_store import BlobStore
from tests.base import (
@ -5217,6 +5217,132 @@ class TestValidateWarnings(ZuulTestCase):
pass
class TestPCREDeprecation(ZuulTestCase):
@simple_layout('layouts/pcre-deprecation.yaml')
def test_pcre_deprecation(self):
tenant = self.scheds.first.sched.abide.tenants.get("tenant-one")
errors = tenant.layout.loading_errors
self.assertEqual(len(errors), 3)
# Pragma implied-branches
idx = 0
self.assertEqual(errors[idx].severity, SEVERITY_WARNING)
self.assertEqual(errors[idx].name, 'Regex Deprecation')
self.assertIn('pragma stanza', errors[idx].error)
# Job branches
idx = 1
self.assertEqual(errors[idx].severity, SEVERITY_WARNING)
self.assertEqual(errors[idx].name, 'Regex Deprecation')
self.assertIn('job stanza', errors[idx].error)
# Project-pipeline job branches
idx = 2
self.assertEqual(errors[idx].severity, SEVERITY_WARNING)
self.assertEqual(errors[idx].name, 'Regex Deprecation')
self.assertIn('project stanza', errors[idx].error)
class TestPCREDeprecationGerrit(ZuulTestCase):
@simple_layout('layouts/pcre-deprecation-gerrit.yaml')
def test_pcre_deprecation_gerrit(self):
tenant = self.scheds.first.sched.abide.tenants.get("tenant-one")
errors = tenant.layout.loading_errors
self.assertEqual(len(errors), 2)
# Pipeline gerrit trigger ref
idx = 0
self.assertEqual(errors[idx].severity, SEVERITY_WARNING)
self.assertEqual(errors[idx].name, 'Regex Deprecation')
self.assertIn('pipeline stanza', errors[idx].error)
self.assertIn('name: gate', errors[idx].error)
# Pipeline gerrit require approval
idx = 1
self.assertEqual(errors[idx].severity, SEVERITY_WARNING)
self.assertEqual(errors[idx].name, 'Regex Deprecation')
self.assertIn('pipeline stanza', errors[idx].error)
self.assertIn('name: post', errors[idx].error)
class TestPCREDeprecationGit(ZuulTestCase):
config_file = 'zuul-git-driver.conf'
@simple_layout('layouts/pcre-deprecation-git.yaml')
def test_pcre_deprecation_git(self):
tenant = self.scheds.first.sched.abide.tenants.get("tenant-one")
errors = tenant.layout.loading_errors
self.assertEqual(len(errors), 1)
# Pipeline git trigger ref
idx = 0
self.assertEqual(errors[idx].severity, SEVERITY_WARNING)
self.assertEqual(errors[idx].name, 'Regex Deprecation')
self.assertIn('pipeline stanza', errors[idx].error)
class TestPCREDeprecationGithub(ZuulTestCase):
config_file = 'zuul-connections-gerrit-and-github.conf'
@simple_layout('layouts/pcre-deprecation-github.yaml')
def test_pcre_deprecation_github(self):
tenant = self.scheds.first.sched.abide.tenants.get("tenant-one")
errors = tenant.layout.loading_errors
self.assertEqual(len(errors), 1)
# Pipeline github trigger ref
idx = 0
self.assertEqual(errors[idx].severity, SEVERITY_WARNING)
self.assertEqual(errors[idx].name, 'Regex Deprecation')
self.assertIn('pipeline stanza', errors[idx].error)
class TestPCREDeprecationGitlab(ZuulTestCase):
config_file = 'zuul-gitlab-driver.conf'
@simple_layout('layouts/pcre-deprecation-gitlab.yaml', driver='gitlab')
def test_pcre_deprecation_gitlab(self):
tenant = self.scheds.first.sched.abide.tenants.get("tenant-one")
errors = tenant.layout.loading_errors
self.assertEqual(len(errors), 1)
# Pipeline gitlab trigger ref
idx = 0
self.assertEqual(errors[idx].severity, SEVERITY_WARNING)
self.assertEqual(errors[idx].name, 'Regex Deprecation')
self.assertIn('pipeline stanza', errors[idx].error)
class TestPCREDeprecationPagure(ZuulTestCase):
config_file = 'zuul-pagure-driver.conf'
@simple_layout('layouts/pcre-deprecation-pagure.yaml', driver='pagure')
def test_pcre_deprecation_pagure(self):
tenant = self.scheds.first.sched.abide.tenants.get("tenant-one")
errors = tenant.layout.loading_errors
self.assertEqual(len(errors), 1)
# Pipeline pagure trigger ref
idx = 0
self.assertEqual(errors[idx].severity, SEVERITY_WARNING)
self.assertEqual(errors[idx].name, 'Regex Deprecation')
self.assertIn('pipeline stanza', errors[idx].error)
class TestPCREDeprecationZuul(ZuulTestCase):
@simple_layout('layouts/pcre-deprecation-zuul.yaml')
def test_pcre_deprecation_zuul(self):
tenant = self.scheds.first.sched.abide.tenants.get("tenant-one")
errors = tenant.layout.loading_errors
self.assertEqual(len(errors), 1)
# Pipeline zuul trigger ref
idx = 0
self.assertEqual(errors[idx].severity, SEVERITY_WARNING)
self.assertEqual(errors[idx].name, 'Regex Deprecation')
self.assertIn('pipeline stanza', errors[idx].error)
class RoleTestCase(ZuulTestCase):
def _getRolesPaths(self, build, playbook):
path = os.path.join(self.jobdir_root, build.uuid,

View File

@ -84,11 +84,19 @@ def check_config_path(path):
"allowed in extra-config-paths")
def make_regex(data):
def make_regex(data, error_accumulator=None):
if isinstance(data, dict):
return ZuulRegex(data['regex'],
negate=data.get('negate', False))
return ZuulRegex(data)
regex = ZuulRegex(data['regex'],
negate=data.get('negate', False))
else:
regex = ZuulRegex(data)
if error_accumulator and regex.re2_failure:
if regex.re2_failure_message:
error_accumulator.addError(RegexDeprecation(
regex.re2_failure_message))
else:
error_accumulator.addError(RegexDeprecation())
return regex
def indent(s):
@ -338,6 +346,21 @@ class DeprecationWarning(ConfigurationSyntaxWarning):
name=self.zuul_error_name)
class RegexDeprecation(DeprecationWarning):
zuul_error_name = 'Regex Deprecation'
zuul_error_message = """\
All regular expressions must conform to RE2 syntax, but an
expression using the deprecated Perl-style syntax has been detected.
Adjust the configuration to conform to RE2 syntax."""
def __init__(self, msg=None):
super().__init__()
if msg:
self.zuul_error_message += f"""
The RE2 syntax error is: {msg}"""
@contextmanager
def project_configuration_exceptions(context, accumulator):
try:
@ -395,13 +418,13 @@ def early_configuration_exceptions(context, accumulator):
@contextmanager
def configuration_exceptions(stanza, conf, accumulator):
def configuration_exceptions(local_accumulator):
try:
yield
except ConfigurationSyntaxError:
raise
except Exception as e:
conf = copy.deepcopy(conf)
conf = copy.deepcopy(local_accumulator.conf)
context = conf.pop('_source_context')
start_mark = conf.pop('_start_mark')
intro = textwrap.fill(textwrap.dedent("""\
@ -424,10 +447,12 @@ def configuration_exceptions(stanza, conf, accumulator):
m = m.format(intro=intro,
error=indent(str(e)),
stanza=stanza,
stanza=local_accumulator.stanza,
content=indent(start_mark.snippet.rstrip()),
start_mark=str(start_mark))
# Get a LoadingErrors object
accumulator = local_accumulator.accumulator
accumulator.makeError(
context, start_mark, m,
short_error=str(e),
@ -612,10 +637,9 @@ class PragmaParser(object):
self.log = logging.getLogger("zuul.PragmaParser")
self.pcontext = pcontext
def fromYaml(self, conf):
def fromYaml(self, conf, error_accumulator):
conf = copy_safe_config(conf)
self.schema(conf)
bm = conf.get('implied-branch-matchers')
source_context = conf['_source_context']
@ -630,7 +654,7 @@ class PragmaParser(object):
# (automatically generated from source file branches) are
# ImpliedBranchMatchers.
source_context.implied_branches = [
change_matcher.BranchMatcher(make_regex(x))
change_matcher.BranchMatcher(make_regex(x, error_accumulator))
for x in as_list(branches)]
@ -887,8 +911,8 @@ class JobParser(object):
self.log = logging.getLogger("zuul.JobParser")
self.pcontext = pcontext
def fromYaml(self, conf, project_pipeline=False, name=None,
validate=True):
def fromYaml(self, conf, error_accumulator,
project_pipeline=False, name=None, validate=True):
conf = copy_safe_config(conf)
if validate:
self.schema(conf)
@ -1182,8 +1206,11 @@ class JobParser(object):
branches = None
if 'branches' in conf:
branches = [change_matcher.BranchMatcher(make_regex(x))
for x in as_list(conf['branches'])]
branches = [
change_matcher.BranchMatcher(
make_regex(x, error_accumulator))
for x in as_list(conf['branches'])
]
elif not project_pipeline:
branches = self.pcontext.getImpliedBranches(job.source_context)
if branches:
@ -1254,7 +1281,7 @@ class ProjectTemplateParser(object):
return vs.Schema(project)
def fromYaml(self, conf, validate=True, freeze=True):
def fromYaml(self, conf, error_accumulator, validate=True, freeze=True):
conf = copy_safe_config(conf)
if validate:
self.schema(conf)
@ -1274,7 +1301,7 @@ class ProjectTemplateParser(object):
project_pipeline.fail_fast = conf_pipeline.get(
'fail-fast')
self.parseJobList(
conf_pipeline.get('jobs', []),
conf_pipeline.get('jobs', []), error_accumulator,
source_context, start_mark, project_pipeline.job_list)
# If this project definition is in a place where it
@ -1295,7 +1322,8 @@ class ProjectTemplateParser(object):
project_template.freeze()
return project_template
def parseJobList(self, conf, source_context, start_mark, job_list):
def parseJobList(self, conf, error_accumulator, source_context,
start_mark, job_list):
for conf_job in conf:
if isinstance(conf_job, str):
jobname = conf_job
@ -1309,7 +1337,7 @@ class ProjectTemplateParser(object):
attrs['_start_mark'] = start_mark
job_list.addJob(self.pcontext.job_parser.fromYaml(
attrs, project_pipeline=True,
attrs, error_accumulator, project_pipeline=True,
name=jobname, validate=False))
@ -1346,7 +1374,7 @@ class ProjectParser(object):
return vs.Schema(project)
def fromYaml(self, conf):
def fromYaml(self, conf, error_accumulator):
conf = copy_safe_config(conf)
self.schema(conf)
@ -1366,7 +1394,7 @@ class ProjectParser(object):
# Parse the project as a template since they're mostly the
# same.
project_config = self.pcontext.project_template_parser. \
fromYaml(conf, validate=False, freeze=False)
fromYaml(conf, error_accumulator, validate=False, freeze=False)
project_config.name = project_name
else:
@ -1382,7 +1410,7 @@ class ProjectParser(object):
# Parse the project as a template since they're mostly the
# same.
project_config = self.pcontext.project_template_parser.\
fromYaml(conf, validate=False, freeze=False)
fromYaml(conf, error_accumulator, validate=False, freeze=False)
project_config.name = project.canonical_name
@ -1612,20 +1640,21 @@ class PipelineParser(object):
pipeline.setManager(manager)
local_accumulator = LocalAccumulator(self.pcontext.loading_errors,
'pipeline', conf)
for source_name, require_config in conf.get('require', {}).items():
source = self.pcontext.connections.getSource(source_name)
manager.ref_filters.extend(
source.getRequireFilters(require_config))
source.getRequireFilters(require_config, local_accumulator))
seen_connections.add(source_name)
for source_name, reject_config in conf.get('reject', {}).items():
source = self.pcontext.connections.getSource(source_name)
manager.ref_filters.extend(
source.getRejectFilters(reject_config))
source.getRejectFilters(reject_config, local_accumulator))
seen_connections.add(source_name)
local_accumulator = LocalAccumulator(self.pcontext.loading_errors,
'pipeline', conf)
for connection_name, trigger_config in conf.get('trigger').items():
if self.pcontext.tenant.allowed_triggers is not None and \
connection_name not in self.pcontext.tenant.allowed_triggers:
@ -2539,8 +2568,9 @@ class TenantParser(object):
def filterUntrustedProjectYAML(self, data, loading_errors):
if data and data.pipelines:
with configuration_exceptions(
'pipeline', data.pipelines[0], loading_errors):
local_accumulator = LocalAccumulator(
loading_errors, 'pipeline', data.pipelines[0])
with configuration_exceptions(local_accumulator):
raise PipelineNotPermittedError()
return data.copy(trusted=False)
@ -2556,16 +2586,19 @@ class TenantParser(object):
# Handle pragma items first since they modify the source context
# used by other classes.
for config_pragma in unparsed_config.pragmas:
with configuration_exceptions('pragma',
config_pragma, loading_errors):
pcontext.pragma_parser.fromYaml(config_pragma)
local_accumulator = LocalAccumulator(
loading_errors, 'pragma', config_pragma)
with configuration_exceptions(local_accumulator):
pcontext.pragma_parser.fromYaml(
config_pragma, local_accumulator)
for config_pipeline in unparsed_config.pipelines:
classes = self._getLoadClasses(tenant, config_pipeline)
if 'pipeline' not in classes:
continue
with configuration_exceptions('pipeline',
config_pipeline, loading_errors):
local_accumulator = LocalAccumulator(
loading_errors, 'pipeline', config_pipeline)
with configuration_exceptions(local_accumulator):
parsed_config.pipelines.append(
pcontext.pipeline_parser.fromYaml(config_pipeline))
@ -2573,8 +2606,9 @@ class TenantParser(object):
classes = self._getLoadClasses(tenant, config_nodeset)
if 'nodeset' not in classes:
continue
with configuration_exceptions('nodeset',
config_nodeset, loading_errors):
local_accumulator = LocalAccumulator(
loading_errors, 'nodeset', config_nodeset)
with configuration_exceptions(local_accumulator):
parsed_config.nodesets.append(
pcontext.nodeset_parser.fromYaml(config_nodeset))
@ -2582,8 +2616,9 @@ class TenantParser(object):
classes = self._getLoadClasses(tenant, config_secret)
if 'secret' not in classes:
continue
with configuration_exceptions('secret',
config_secret, loading_errors):
local_accumulator = LocalAccumulator(
loading_errors, 'secret', config_secret)
with configuration_exceptions(local_accumulator):
parsed_config.secrets.append(
pcontext.secret_parser.fromYaml(config_secret))
@ -2591,17 +2626,20 @@ class TenantParser(object):
classes = self._getLoadClasses(tenant, config_job)
if 'job' not in classes:
continue
with configuration_exceptions('job',
config_job, loading_errors):
local_accumulator = LocalAccumulator(
loading_errors, 'job', config_job)
with configuration_exceptions(local_accumulator):
parsed_config.jobs.append(
pcontext.job_parser.fromYaml(config_job))
pcontext.job_parser.fromYaml(
config_job, local_accumulator))
for config_semaphore in unparsed_config.semaphores:
classes = self._getLoadClasses(tenant, config_semaphore)
if 'semaphore' not in classes:
continue
with configuration_exceptions('semaphore',
config_semaphore, loading_errors):
local_accumulator = LocalAccumulator(
loading_errors, 'semaphore', config_semaphore)
with configuration_exceptions(local_accumulator):
parsed_config.semaphores.append(
pcontext.semaphore_parser.fromYaml(config_semaphore))
@ -2609,8 +2647,9 @@ class TenantParser(object):
classes = self._getLoadClasses(tenant, config_queue)
if 'queue' not in classes:
continue
with configuration_exceptions('queue',
config_queue, loading_errors):
local_accumulator = LocalAccumulator(
loading_errors, 'queue', config_queue)
with configuration_exceptions(local_accumulator):
parsed_config.queues.append(
pcontext.queue_parser.fromYaml(config_queue))
@ -2618,23 +2657,25 @@ class TenantParser(object):
classes = self._getLoadClasses(tenant, config_template)
if 'project-template' not in classes:
continue
with configuration_exceptions(
'project-template', config_template, loading_errors):
local_accumulator = LocalAccumulator(
loading_errors, 'project-template', config_template)
with configuration_exceptions(local_accumulator):
parsed_config.project_templates.append(
pcontext.project_template_parser.fromYaml(
config_template))
config_template, local_accumulator))
for config_project in unparsed_config.projects:
classes = self._getLoadClasses(tenant, config_project)
if 'project' not in classes:
continue
with configuration_exceptions('project', config_project,
loading_errors):
local_accumulator = LocalAccumulator(
loading_errors, 'project', config_project)
with configuration_exceptions(local_accumulator):
# we need to separate the regex projects as they are
# processed differently later
name = config_project.get('name')
parsed_project = pcontext.project_parser.fromYaml(
config_project)
config_project, local_accumulator)
if name and name.startswith('^'):
parsed_config.projects_by_regex.setdefault(
name, []).append(parsed_project)

View File

@ -258,7 +258,7 @@ class GerritEventFilter(EventFilter):
refs=[], event_approvals={}, comments=[], emails=[],
usernames=[], required_approvals=[], reject_approvals=[],
uuid=None, scheme=None, ignore_deletes=True,
require=None, reject=None):
require=None, reject=None, error_accumulator=None):
EventFilter.__init__(self, connection_name, trigger)
@ -270,13 +270,13 @@ class GerritEventFilter(EventFilter):
if require:
self.require_filter = GerritRefFilter.requiresFromConfig(
connection_name, require)
connection_name, require, error_accumulator)
else:
self.require_filter = None
if reject:
self.reject_filter = GerritRefFilter.rejectFromConfig(
connection_name, reject)
connection_name, reject, error_accumulator)
else:
self.reject_filter = None
@ -438,6 +438,7 @@ class GerritEventFilter(EventFilter):
class GerritRefFilter(RefFilter):
def __init__(self, connection_name,
error_accumulator,
open=None, reject_open=None,
current_patchset=None, reject_current_patchset=None,
wip=None, reject_wip=None,
@ -447,9 +448,10 @@ class GerritRefFilter(RefFilter):
self._required_approvals = copy.deepcopy(required_approvals)
self.required_approvals = self._tidy_approvals(
self._required_approvals)
self._required_approvals, error_accumulator)
self._reject_approvals = copy.deepcopy(reject_approvals)
self.reject_approvals = self._tidy_approvals(self._reject_approvals)
self.reject_approvals = self._tidy_approvals(
self._reject_approvals, error_accumulator)
self.statuses = statuses
self.reject_statuses = reject_statuses
@ -467,9 +469,10 @@ class GerritRefFilter(RefFilter):
self.current_patchset = current_patchset
@classmethod
def requiresFromConfig(cls, connection_name, config):
def requiresFromConfig(cls, connection_name, config, error_accumulator):
return cls(
connection_name=connection_name,
error_accumulator=error_accumulator,
open=config.get('open'),
current_patchset=config.get('current-patchset'),
wip=config.get('wip'),
@ -478,9 +481,10 @@ class GerritRefFilter(RefFilter):
)
@classmethod
def rejectFromConfig(cls, connection_name, config):
def rejectFromConfig(cls, connection_name, config, error_accumulator):
return cls(
connection_name=connection_name,
error_accumulator=error_accumulator,
reject_open=config.get('open'),
reject_current_patchset=config.get('current-patchset'),
reject_wip=config.get('wip'),
@ -561,13 +565,13 @@ class GerritRefFilter(RefFilter):
return True
def _tidy_approvals(self, approvals):
def _tidy_approvals(self, approvals, error_accumulator):
for a in approvals:
for k, v in a.items():
if k == 'username':
a['username'] = make_regex(v)
a['username'] = make_regex(v, error_accumulator)
elif k == 'email':
a['email'] = make_regex(v)
a['email'] = make_regex(v, error_accumulator)
elif k == 'newer-than':
a[k] = time_to_seconds(v)
elif k == 'older-than':

View File

@ -221,16 +221,16 @@ class GerritSource(BaseSource):
def _getGitwebUrl(self, project, sha=None):
return self.connection._getGitwebUrl(project, sha)
def getRequireFilters(self, config):
def getRequireFilters(self, config, error_accumulator):
f = GerritRefFilter.requiresFromConfig(
self.connection.connection_name,
config)
config, error_accumulator)
return [f]
def getRejectFilters(self, config):
def getRejectFilters(self, config, error_accumulator):
f = GerritRefFilter.rejectFromConfig(
self.connection.connection_name,
config)
config, error_accumulator)
return [f]
def getRefForChange(self, change):

View File

@ -64,12 +64,15 @@ class GerritTrigger(BaseTrigger):
error_accumulator.addError(
GerritRejectApprovalDeprecation())
types = [make_regex(x) for x in to_list(trigger['event'])]
branches = [make_regex(x) for x in to_list(trigger.get('branch'))]
refs = [make_regex(x) for x in to_list(trigger.get('ref'))]
comments = [make_regex(x) for x in comments]
emails = [make_regex(x) for x in emails]
usernames = [make_regex(x) for x in usernames]
types = [make_regex(x, error_accumulator)
for x in to_list(trigger['event'])]
branches = [make_regex(x, error_accumulator)
for x in to_list(trigger.get('branch'))]
refs = [make_regex(x, error_accumulator)
for x in to_list(trigger.get('ref'))]
comments = [make_regex(x, error_accumulator) for x in comments]
emails = [make_regex(x, error_accumulator) for x in emails]
usernames = [make_regex(x, error_accumulator) for x in usernames]
f = GerritEventFilter(
connection_name=connection_name,
@ -92,6 +95,7 @@ class GerritTrigger(BaseTrigger):
ignore_deletes=ignore_deletes,
require=trigger.get('require'),
reject=trigger.get('reject'),
error_accumulator=error_accumulator,
)
efilters.append(f)

View File

@ -80,10 +80,10 @@ class GitSource(BaseSource):
def getProjectOpenChanges(self, project):
raise NotImplementedError()
def getRequireFilters(self, config):
def getRequireFilters(self, config, error_accumulator):
return []
def getRejectFilters(self, config):
def getRejectFilters(self, config, error_accumulator):
return []
def getRefForChange(self, change):

View File

@ -29,7 +29,8 @@ class GitTrigger(BaseTrigger):
efilters = []
for trigger in to_list(trigger_conf):
refs = [make_regex(x) for x in to_list(trigger.get('ref'))]
refs = [make_regex(x, error_accumulator)
for x in to_list(trigger.get('ref'))]
f = GitEventFilter(
connection_name=connection_name,

View File

@ -177,13 +177,13 @@ class GithubSource(BaseSource):
def _ghTimestampToDate(self, timestamp):
return time.strptime(timestamp, '%Y-%m-%dT%H:%M:%SZ')
def getRequireFilters(self, config):
def getRequireFilters(self, config, error_accumulator):
f = GithubRefFilter.requiresFromConfig(
self.connection.connection_name,
config)
return [f]
def getRejectFilters(self, config):
def getRejectFilters(self, config, error_accumulator):
f = GithubRefFilter.rejectFromConfig(
self.connection.connection_name,
config)

View File

@ -41,10 +41,14 @@ class GithubTrigger(BaseTrigger):
efilters = []
for trigger in to_list(trigger_config):
types = [make_regex(x) for x in to_list(trigger['event'])]
branches = [make_regex(x) for x in to_list(trigger.get('branch'))]
refs = [make_regex(x) for x in to_list(trigger.get('ref'))]
comments = [make_regex(x) for x in to_list(trigger.get('comment'))]
types = [make_regex(x, error_accumulator)
for x in to_list(trigger['event'])]
branches = [make_regex(x, error_accumulator)
for x in to_list(trigger.get('branch'))]
refs = [make_regex(x, error_accumulator)
for x in to_list(trigger.get('ref'))]
comments = [make_regex(x, error_accumulator)
for x in to_list(trigger.get('comment'))]
f = GithubEventFilter(
connection_name=connection_name,

View File

@ -140,7 +140,7 @@ class GitlabSource(BaseSource):
"""Get the git-web url for a project."""
raise NotImplementedError()
def getRequireFilters(self, config):
def getRequireFilters(self, config, error_accumulator):
f = GitlabRefFilter(
connection_name=self.connection.connection_name,
open=config.get('open'),
@ -150,7 +150,7 @@ class GitlabSource(BaseSource):
)
return [f]
def getRejectFilters(self, config):
def getRejectFilters(self, config, error_accumulator):
raise NotImplementedError()
def getRefForChange(self, change):

View File

@ -28,10 +28,12 @@ class GitlabTrigger(BaseTrigger):
error_accumulator):
efilters = []
for trigger in to_list(trigger_config):
types = [make_regex(x) for x in to_list(trigger['event'])]
refs = [make_regex(x) for x in to_list(trigger.get('ref'))]
comments = [make_regex(x) for x in
to_list(trigger.get('comment'))]
types = [make_regex(x, error_accumulator)
for x in to_list(trigger['event'])]
refs = [make_regex(x, error_accumulator)
for x in to_list(trigger.get('ref'))]
comments = [make_regex(x, error_accumulator)
for x in to_list(trigger.get('comment'))]
f = GitlabEventFilter(
connection_name=connection_name,

View File

@ -144,7 +144,7 @@ class PagureSource(BaseSource):
"""Get the git-web url for a project."""
raise NotImplementedError()
def getRequireFilters(self, config):
def getRequireFilters(self, config, error_accumulator):
f = PagureRefFilter(
connection_name=self.connection.connection_name,
score=config.get('score'),
@ -155,7 +155,7 @@ class PagureSource(BaseSource):
)
return [f]
def getRejectFilters(self, config):
def getRejectFilters(self, config, error_accumulator):
raise NotImplementedError()
def getRefForChange(self, change):

View File

@ -28,9 +28,12 @@ class PagureTrigger(BaseTrigger):
error_accumulator):
efilters = []
for trigger in to_list(trigger_config):
types = [make_regex(x) for x in to_list(trigger['event'])]
refs = [make_regex(x) for x in to_list(trigger.get('ref'))]
comments = [make_regex(x) for x in to_list(trigger.get('comment'))]
types = [make_regex(x, error_accumulator)
for x in to_list(trigger['event'])]
refs = [make_regex(x, error_accumulator)
for x in to_list(trigger.get('ref'))]
comments = [make_regex(x, error_accumulator)
for x in to_list(trigger.get('comment'))]
f = PagureEventFilter(
connection_name=connection_name,

View File

@ -28,7 +28,7 @@ class TimerTrigger(BaseTrigger):
error_accumulator):
efilters = []
for trigger in to_list(trigger_conf):
types = [make_regex('timer')]
types = [make_regex('timer', error_accumulator)]
f = TimerEventFilter(connection_name=connection_name,
trigger=self,
types=types,

View File

@ -33,9 +33,10 @@ class ZuulTrigger(BaseTrigger):
error_accumulator):
efilters = []
for trigger in to_list(trigger_conf):
types = [make_regex(x) for x in to_list(trigger['event'])]
pipelines = [make_regex(x) for x in
to_list(trigger.get('pipeline'))]
types = [make_regex(x, error_accumulator)
for x in to_list(trigger['event'])]
pipelines = [make_regex(x, error_accumulator)
for x in to_list(trigger.get('pipeline'))]
f = ZuulEventFilter(
connection_name=connection_name,
trigger=self,

View File

@ -52,8 +52,23 @@ class ZuulRegex:
def __init__(self, pattern, negate=False):
self.pattern = pattern
self.negate = negate
# TODO: switch this to re2
self.re = re.compile(pattern)
self.re2_failure = False
self.re2_failure_message = None
try:
o = re2.Options()
o.log_errors = False
self.re = re2.compile(pattern, options=o)
except re2.error as e:
# Compile under re first to find out if this is also a
# PCRE error, which should take precedence.
self.re = re.compile(pattern)
# If it compiled okay, then the problem is re2 vs pcre
self.re2_failure = True
if e.args and len(e.args) == 1:
if isinstance(e.args[0], bytes):
self.re2_failure_message = e.args[0].decode('utf8')
elif isinstance(e.args[0], str):
self.re2_failure_message = e.args[0]
def __eq__(self, other):
return (isinstance(other, ZuulRegex) and

View File

@ -219,12 +219,12 @@ class BaseSource(object, metaclass=abc.ABCMeta):
"""Return the current ltime of the project branch cache."""
@abc.abstractmethod
def getRequireFilters(self, config):
def getRequireFilters(self, config, error_accumulator):
"""Return a list of ChangeFilters for the scheduler to match against.
"""
@abc.abstractmethod
def getRejectFilters(self, config):
def getRejectFilters(self, config, error_accumulator):
"""Return a list of ChangeFilters for the scheduler to match against.
"""