Merge "Merger: ensure_cloned() now looks for '.git'"
diff --git a/doc/source/launchers.rst b/doc/source/launchers.rst
index c799291..b95354f 100644
--- a/doc/source/launchers.rst
+++ b/doc/source/launchers.rst
@@ -78,6 +78,10 @@
The target branch for the change that triggered this build.
**ZUUL_CHANGE**
The Gerrit change ID for the change that triggered this build.
+**ZUUL_CHANGES**
+ A caret character separated list of the changes upon which this build
+ is dependent upon in the form of a colon character separated list
+ consisting of project name, target branch, and revision ref.
**ZUUL_CHANGE_IDS**
All of the Gerrit change IDs that are included in this build (useful
when the DependentPipelineManager combines changes for testing).
diff --git a/doc/source/zuul.rst b/doc/source/zuul.rst
index 7a10ca9..6c77477 100644
--- a/doc/source/zuul.rst
+++ b/doc/source/zuul.rst
@@ -809,6 +809,47 @@
file patterns listed here. This field is treated as a regular
expression and multiple expressions may be listed.
+**skip-if (optional)**
+
+ This job should not be run if all the patterns specified by the
+ optional fields listed below match on their targets. When multiple
+ sets of parameters are provided, this job will be skipped if any set
+ matches. For example: ::
+
+ jobs:
+ - name: check-tempest-dsvm-neutron
+ skip-if:
+ - project: ^openstack/neutron$
+ branch: ^stable/juno$
+ all-files-match-any:
+ - ^neutron/tests/.*$
+ - ^tools/.*$
+ - all-files-match-any:
+ - ^doc/.*$
+ - ^.*\.rst$
+
+ With this configuration, the job would be skipped for a neutron
+ patchset for the stable/juno branch provided that every file in the
+ change matched at least one of the specified file regexes. The job
+ will also be skipped for any patchset that modified only the doc
+ tree or rst files.
+
+ *project* (optional)
+ The regular expression to match against the project of the change.
+
+ *branch* (optional)
+ The regular expression to match against the branch or ref of the
+ change.
+
+ *all-files-match-any* (optional)
+ A list of regular expressions intended to match the files involved
+ in the change. This parameter will be considered matching a
+ change only if all files in a change match at least one of these
+ expressions.
+
+ The pattern for '/COMMIT_MSG' is always matched on and does not
+ have to be included.
+
**voting (optional)**
Boolean value (``true`` or ``false``) that indicates whatever
a job is voting or not. Default: ``true``.
@@ -1051,13 +1092,11 @@
If you need to abort Zuul and intend to manually requeue changes for
jobs which were running in its pipelines, prior to terminating you can
use the zuul-changes.py tool script to simplify the process. For
-example, this would give you a list of Gerrit commands to reverify or
-recheck changes for the gate and check pipelines respectively::
+example, this would give you a list of zuul-enqueue commands to requeue
+changes for the gate and check pipelines respectively::
- ./tools/zuul-changes.py --review-host=review.openstack.org \
- http://zuul.openstack.org/ gate 'reverify'
- ./tools/zuul-changes.py --review-host=review.openstack.org \
- http://zuul.openstack.org/ check 'recheck'
+ ./tools/zuul-changes.py http://zuul.openstack.org/ gate
+ ./tools/zuul-changes.py http://zuul.openstack.org/ check
If you send a SIGUSR2 to the zuul-server process, or the forked process
that runs the Gearman daemon, Zuul will dump a stack trace for each
diff --git a/etc/status/.gitignore b/etc/status/.gitignore
index 1ecdbed..218f297 100644
--- a/etc/status/.gitignore
+++ b/etc/status/.gitignore
@@ -1,4 +1 @@
-public_html/jquery.min.js
-public_html/jquery-visibility.js
-public_html/bootstrap
-public_html/jquery.graphite.js
+public_html/lib
diff --git a/etc/status/fetch-dependencies.sh b/etc/status/fetch-dependencies.sh
index b31d0de..ccaf74c 100755
--- a/etc/status/fetch-dependencies.sh
+++ b/etc/status/fetch-dependencies.sh
@@ -1,21 +1,23 @@
#!/bin/bash
BASE_DIR=$(cd $(dirname $0); pwd)
-echo "Destination: $BASE_DIR/public_html"
+DEST_DIR=$BASE_DIR/public_html/lib
+mkdir -p $DEST_DIR
+echo "Destination: $DEST_DIR"
echo "Fetching jquery.min.js..."
-curl -L --silent http://code.jquery.com/jquery.min.js > $BASE_DIR/public_html/jquery.min.js
+curl -L --silent http://code.jquery.com/jquery.min.js > $DEST_DIR/jquery.min.js
echo "Fetching jquery-visibility.min.js..."
-curl -L --silent https://raw.githubusercontent.com/mathiasbynens/jquery-visibility/master/jquery-visibility.js > $BASE_DIR/public_html/jquery-visibility.js
+curl -L --silent https://raw.githubusercontent.com/mathiasbynens/jquery-visibility/master/jquery-visibility.js > $DEST_DIR/jquery-visibility.js
echo "Fetching jquery.graphite.js..."
curl -L --silent https://github.com/prestontimmons/graphitejs/archive/master.zip > jquery-graphite.zip
-unzip -q -o jquery-graphite.zip -d $BASE_DIR/public_html/
-mv $BASE_DIR/public_html/graphitejs-master/jquery.graphite.js $BASE_DIR/public_html/
-rm -R jquery-graphite.zip $BASE_DIR/public_html/graphitejs-master
+unzip -q -o jquery-graphite.zip -d $DEST_DIR/
+mv $DEST_DIR/graphitejs-master/jquery.graphite.js $DEST_DIR/
+rm -R jquery-graphite.zip $DEST_DIR/graphitejs-master
echo "Fetching bootstrap..."
curl -L --silent https://github.com/twbs/bootstrap/releases/download/v3.1.1/bootstrap-3.1.1-dist.zip > bootstrap.zip
-unzip -q -o bootstrap.zip -d $BASE_DIR/public_html/
-mv $BASE_DIR/public_html/bootstrap-3.1.1-dist $BASE_DIR/public_html/bootstrap
+unzip -q -o bootstrap.zip -d $DEST_DIR/
+mv $DEST_DIR/bootstrap-3.1.1-dist $DEST_DIR/bootstrap
rm bootstrap.zip
diff --git a/etc/status/public_html/index.html b/etc/status/public_html/index.html
index 3bd7a12..97025a6 100644
--- a/etc/status/public_html/index.html
+++ b/etc/status/public_html/index.html
@@ -19,16 +19,14 @@
<html dir="ltr" lang="en">
<head>
<title>Zuul Status</title>
- <link rel="stylesheet" href="bootstrap/css/bootstrap.min.css">
+ <link rel="stylesheet" href="lib/bootstrap/css/bootstrap.min.css">
<link rel="stylesheet" href="styles/zuul.css" />
</head>
<body>
-
<div id="zuul_container"></div>
-
- <script src="jquery.min.js"></script>
- <script src="jquery-visibility.js"></script>
- <script src="jquery.graphite.js"></script>
+ <script src="lib/jquery.min.js"></script>
+ <script src="lib/jquery-visibility.js"></script>
+ <script src="lib/jquery.graphite.js"></script>
<script src="jquery.zuul.js"></script>
<script src="zuul.app.js"></script>
<script>
diff --git a/etc/status/public_html/jquery.zuul.js b/etc/status/public_html/jquery.zuul.js
index c13e48c..40a5d4d 100644
--- a/etc/status/public_html/jquery.zuul.js
+++ b/etc/status/public_html/jquery.zuul.js
@@ -316,9 +316,11 @@
var $enqueue_time = $('<small />').addClass('time')
.attr('title', 'Elapsed Time').html(enqueue_time);
- var $right = $('<div />')
- .addClass('col-xs-4 text-right')
- .append($remaining_time, $('<br />'), $enqueue_time);
+ var $right = $('<div />');
+ if (change.live === true) {
+ $right.addClass('col-xs-4 text-right')
+ .append($remaining_time, $('<br />'), $enqueue_time);
+ }
var $header = $('<div />')
.addClass('row')
@@ -373,7 +375,7 @@
else if (change.live !== true) {
// Grey icon
icon_name = 'grey.png';
- icon_title = 'Dependent change independently tested';
+ icon_title = 'Dependent change required for testing';
}
else if (change.failing_reasons &&
change.failing_reasons.length > 0) {
@@ -840,7 +842,9 @@
});
$.each(change_queue.heads, function(head_i, head) {
$.each(head, function(change_i, change) {
- count += 1;
+ if (change.live === true) {
+ count += 1;
+ }
var idx = tree.indexOf(change.id);
if (idx > -1) {
change._tree_index = idx;
diff --git a/etc/status/public_html/zuul.app.js b/etc/status/public_html/zuul.app.js
index 6f87a92..640437b 100644
--- a/etc/status/public_html/zuul.app.js
+++ b/etc/status/public_html/zuul.app.js
@@ -39,6 +39,9 @@
});
}
+/**
+ * @return The $.zuul instance
+ */
function zuul_start($) {
// Start the zuul app (expects default dom)
@@ -94,4 +97,6 @@
}
});
});
-}
\ No newline at end of file
+
+ return zuul;
+}
diff --git a/requirements.txt b/requirements.txt
index b24d171..c682999 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -5,14 +5,13 @@
Paste
WebOb>=1.2.3,<1.3
paramiko>=1.8.0
-GitPython>=0.3.2.1
-lockfile>=0.8
+GitPython>=0.3.3
ordereddict
python-daemon>=2.0.4
extras
statsd>=1.0.0,<3.0
voluptuous>=0.7
-gear>=0.5.4,<1.0.0
+gear>=0.5.7,<1.0.0
apscheduler>=2.1.1,<3.0
PrettyTable>=0.6,<0.8
babel>=1.0
diff --git a/tests/base.py b/tests/base.py
index 5ae0d3e..8c96d18 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -378,6 +378,8 @@
class FakeGerrit(object):
+ log = logging.getLogger("zuul.test.FakeGerrit")
+
def __init__(self, *args, **kw):
self.event_queue = Queue.Queue()
self.fixture_dir = os.path.join(FIXTURE_DIR, 'gerrit')
@@ -394,7 +396,7 @@
return c
def addEvent(self, data):
- return self.event_queue.put(data)
+ return self.event_queue.put((time.time(), data))
def getEvent(self):
return self.event_queue.get()
@@ -418,12 +420,18 @@
return {}
def simpleQuery(self, query):
+ self.log.debug("simpleQuery: %s" % query)
self.queries.append(query)
if query.startswith('change:'):
# Query a specific changeid
changeid = query[len('change:'):]
l = [change.query() for change in self.changes.values()
if change.data['id'] == changeid]
+ elif query.startswith('message:'):
+ # Query the content of a commit message
+ msg = query[len('message:'):].strip()
+ l = [change.query() for change in self.changes.values()
+ if msg in change.data['commitMessage']]
else:
# Query all open changes
l = [change.query() for change in self.changes.values()]
@@ -471,6 +479,7 @@
def __init__(self, upstream_root, *args):
super(FakeGerritTrigger, self).__init__(*args)
self.upstream_root = upstream_root
+ self.gerrit_connector.delay = 0.0
def getGitUrl(self, project):
return os.path.join(self.upstream_root, project.name)
@@ -811,11 +820,11 @@
return endpoint, ''
-class ZuulTestCase(testtools.TestCase):
+class BaseTestCase(testtools.TestCase):
log = logging.getLogger("zuul.test")
def setUp(self):
- super(ZuulTestCase, self).setUp()
+ super(BaseTestCase, self).setUp()
test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
try:
test_timeout = int(test_timeout)
@@ -839,6 +848,12 @@
level=logging.DEBUG,
format='%(asctime)s %(name)-32s '
'%(levelname)-8s %(message)s'))
+
+
+class ZuulTestCase(BaseTestCase):
+
+ def setUp(self):
+ super(ZuulTestCase, self).setUp()
if USE_TEMPDIR:
tmp_root = self.useFixture(fixtures.TempDir(
rootdir=os.environ.get("ZUUL_TEST_ROOT"))
@@ -1101,6 +1116,12 @@
while len(self.gearman_server.functions) < count:
time.sleep(0)
+ def orderedRelease(self):
+ # Run one build at a time to ensure non-race order:
+ while len(self.builds):
+ self.release(self.builds[0])
+ self.waitUntilSettled()
+
def release(self, job):
if isinstance(job, FakeBuild):
job.release()
@@ -1153,8 +1174,6 @@
return True
def areAllBuildsWaiting(self):
- ret = True
-
builds = self.launcher.builds.values()
for build in builds:
client_job = None
@@ -1166,35 +1185,34 @@
if not client_job:
self.log.debug("%s is not known to the gearman client" %
build)
- ret = False
- continue
+ return False
if not client_job.handle:
self.log.debug("%s has no handle" % client_job)
- ret = False
- continue
+ return False
server_job = self.gearman_server.jobs.get(client_job.handle)
if not server_job:
self.log.debug("%s is not known to the gearman server" %
client_job)
- ret = False
- continue
+ return False
if not hasattr(server_job, 'waiting'):
self.log.debug("%s is being enqueued" % server_job)
- ret = False
- continue
+ return False
if server_job.waiting:
continue
worker_job = self.worker.gearman_jobs.get(server_job.unique)
if worker_job:
+ if build.number is None:
+ self.log.debug("%s has not reported start" % worker_job)
+ return False
if worker_job.build.isWaiting():
continue
else:
self.log.debug("%s is running" % worker_job)
- ret = False
+ return False
else:
self.log.debug("%s is unassigned" % server_job)
- ret = False
- return ret
+ return False
+ return True
def waitUntilSettled(self):
self.log.debug("Waiting until settled...")
diff --git a/tests/fixtures/layout-no-timer.yaml b/tests/fixtures/layout-no-timer.yaml
index 9436821..ca40d13 100644
--- a/tests/fixtures/layout-no-timer.yaml
+++ b/tests/fixtures/layout-no-timer.yaml
@@ -1,14 +1,28 @@
pipelines:
+ - name: check
+ manager: IndependentPipelineManager
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
- name: periodic
manager: IndependentPipelineManager
# Trigger is required, set it to one that is a noop
# during tests that check the timer trigger.
trigger:
gerrit:
- - event: patchset-created
+ - event: ref-updated
projects:
- name: org/project
+ check:
+ - project-test1
periodic:
- project-bitrot-stable-old
- project-bitrot-stable-older
diff --git a/tests/fixtures/layout-skip-if.yaml b/tests/fixtures/layout-skip-if.yaml
new file mode 100644
index 0000000..0cfb445
--- /dev/null
+++ b/tests/fixtures/layout-skip-if.yaml
@@ -0,0 +1,29 @@
+pipelines:
+ - name: check
+ manager: IndependentPipelineManager
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+
+jobs:
+ # Defining a metajob will validate that the skip-if attribute of the
+ # metajob is correctly copied to the job.
+ - name: ^.*skip-if$
+ skip-if:
+ - project: ^org/project$
+ branch: ^master$
+ all-files-match-any:
+ - ^README$
+ - name: project-test-skip-if
+
+projects:
+ - name: org/project
+ check:
+ - project-test-skip-if
diff --git a/tests/test_change_matcher.py b/tests/test_change_matcher.py
new file mode 100644
index 0000000..1f4ab93
--- /dev/null
+++ b/tests/test_change_matcher.py
@@ -0,0 +1,154 @@
+# Copyright 2015 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from zuul import change_matcher as cm
+from zuul import model
+
+from tests.base import BaseTestCase
+
+
+class BaseTestMatcher(BaseTestCase):
+
+ project = 'project'
+
+ def setUp(self):
+ super(BaseTestMatcher, self).setUp()
+ self.change = model.Change(self.project)
+
+
+class TestAbstractChangeMatcher(BaseTestMatcher):
+
+ def test_str(self):
+ matcher = cm.ProjectMatcher(self.project)
+ self.assertEqual(str(matcher), '{ProjectMatcher:project}')
+
+ def test_repr(self):
+ matcher = cm.ProjectMatcher(self.project)
+ self.assertEqual(repr(matcher), '<ProjectMatcher project>')
+
+
+class TestProjectMatcher(BaseTestMatcher):
+
+ def test_matches_returns_true(self):
+ matcher = cm.ProjectMatcher(self.project)
+ self.assertTrue(matcher.matches(self.change))
+
+ def test_matches_returns_false(self):
+ matcher = cm.ProjectMatcher('not_project')
+ self.assertFalse(matcher.matches(self.change))
+
+
+class TestBranchMatcher(BaseTestMatcher):
+
+ def setUp(self):
+ super(TestBranchMatcher, self).setUp()
+ self.matcher = cm.BranchMatcher('foo')
+
+ def test_matches_returns_true_on_matching_branch(self):
+ self.change.branch = 'foo'
+ self.assertTrue(self.matcher.matches(self.change))
+
+ def test_matches_returns_true_on_matching_ref(self):
+ self.change.branch = 'bar'
+ self.change.ref = 'foo'
+ self.assertTrue(self.matcher.matches(self.change))
+
+ def test_matches_returns_false_for_no_match(self):
+ self.change.branch = 'bar'
+ self.change.ref = 'baz'
+ self.assertFalse(self.matcher.matches(self.change))
+
+ def test_matches_returns_false_for_missing_attrs(self):
+ delattr(self.change, 'branch')
+ # ref is by default not an attribute
+ self.assertFalse(self.matcher.matches(self.change))
+
+
+class TestFileMatcher(BaseTestMatcher):
+
+ def setUp(self):
+ super(TestFileMatcher, self).setUp()
+ self.matcher = cm.FileMatcher('filename')
+
+ def test_matches_returns_true(self):
+ self.change.files = ['filename']
+ self.assertTrue(self.matcher.matches(self.change))
+
+ def test_matches_returns_false_when_no_files(self):
+ self.assertFalse(self.matcher.matches(self.change))
+
+ def test_matches_returns_false_when_files_attr_missing(self):
+ delattr(self.change, 'files')
+ self.assertFalse(self.matcher.matches(self.change))
+
+
+class TestAbstractMatcherCollection(BaseTestMatcher):
+
+ def test_str(self):
+ matcher = cm.MatchAll([cm.FileMatcher('foo')])
+ self.assertEqual(str(matcher), '{MatchAll:{FileMatcher:foo}}')
+
+ def test_repr(self):
+ matcher = cm.MatchAll([])
+ self.assertEqual(repr(matcher), '<MatchAll>')
+
+
+class TestMatchAllFiles(BaseTestMatcher):
+
+ def setUp(self):
+ super(TestMatchAllFiles, self).setUp()
+ self.matcher = cm.MatchAllFiles([cm.FileMatcher('^docs/.*$')])
+
+ def _test_matches(self, expected, files=None):
+ if files is not None:
+ self.change.files = files
+ self.assertEqual(expected, self.matcher.matches(self.change))
+
+ def test_matches_returns_false_when_files_attr_missing(self):
+ delattr(self.change, 'files')
+ self._test_matches(False)
+
+ def test_matches_returns_false_when_no_files(self):
+ self._test_matches(False)
+
+ def test_matches_returns_false_when_not_all_files_match(self):
+ self._test_matches(False, files=['docs/foo', 'foo/bar'])
+
+ def test_matches_returns_true_when_commit_message_matches(self):
+ self._test_matches(True, files=['/COMMIT_MSG'])
+
+ def test_matches_returns_true_when_all_files_match(self):
+ self._test_matches(True, files=['docs/foo'])
+
+
+class TestMatchAll(BaseTestMatcher):
+
+ def test_matches_returns_true(self):
+ matcher = cm.MatchAll([cm.ProjectMatcher(self.project)])
+ self.assertTrue(matcher.matches(self.change))
+
+ def test_matches_returns_false_for_missing_matcher(self):
+ matcher = cm.MatchAll([cm.ProjectMatcher('not_project')])
+ self.assertFalse(matcher.matches(self.change))
+
+
+class TestMatchAny(BaseTestMatcher):
+
+ def test_matches_returns_true(self):
+ matcher = cm.MatchAny([cm.ProjectMatcher(self.project)])
+ self.assertTrue(matcher.matches(self.change))
+
+ def test_matches_returns_false(self):
+ matcher = cm.MatchAny([cm.ProjectMatcher('not_project')])
+ self.assertFalse(matcher.matches(self.change))
diff --git a/tests/test_model.py b/tests/test_model.py
new file mode 100644
index 0000000..2711618
--- /dev/null
+++ b/tests/test_model.py
@@ -0,0 +1,64 @@
+# Copyright 2015 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from zuul import change_matcher as cm
+from zuul import model
+
+from tests.base import BaseTestCase
+
+
+class TestJob(BaseTestCase):
+
+ @property
+ def job(self):
+ job = model.Job('job')
+ job.skip_if_matcher = cm.MatchAll([
+ cm.ProjectMatcher('^project$'),
+ cm.MatchAllFiles([cm.FileMatcher('^docs/.*$')]),
+ ])
+ return job
+
+ def test_change_matches_returns_false_for_matched_skip_if(self):
+ change = model.Change('project')
+ change.files = ['docs/foo']
+ self.assertFalse(self.job.changeMatches(change))
+
+ def test_change_matches_returns_true_for_unmatched_skip_if(self):
+ change = model.Change('project')
+ change.files = ['foo']
+ self.assertTrue(self.job.changeMatches(change))
+
+ def test_copy_retains_skip_if(self):
+ job = model.Job('job')
+ job.copy(self.job)
+ self.assertTrue(job.skip_if_matcher)
+
+ def _assert_job_booleans_are_not_none(self, job):
+ self.assertIsNotNone(job.voting)
+ self.assertIsNotNone(job.hold_following_changes)
+
+ def test_job_sets_defaults_for_boolean_attributes(self):
+ job = model.Job('job')
+ self._assert_job_booleans_are_not_none(job)
+
+ def test_metajob_does_not_set_defaults_for_boolean_attributes(self):
+ job = model.Job('^job')
+ self.assertIsNone(job.voting)
+ self.assertIsNone(job.hold_following_changes)
+
+ def test_metajob_copy_does_not_set_undefined_boolean_attributes(self):
+ job = model.Job('job')
+ metajob = model.Job('^job')
+ job.copy(metajob)
+ self._assert_job_booleans_are_not_none(job)
diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py
index dcf7a8b..3b59e3e 100755
--- a/tests/test_scheduler.py
+++ b/tests/test_scheduler.py
@@ -22,22 +22,62 @@
import time
import urllib
import urllib2
+import yaml
import git
import testtools
+import zuul.change_matcher
import zuul.scheduler
import zuul.rpcclient
import zuul.reporter.gerrit
import zuul.reporter.smtp
-from tests.base import ZuulTestCase, repack_repo
+from tests.base import (
+ BaseTestCase,
+ ZuulTestCase,
+ repack_repo,
+)
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)-32s '
'%(levelname)-8s %(message)s')
+class TestSchedulerConfigParsing(BaseTestCase):
+
+ def test_parse_skip_if(self):
+ job_yaml = """
+jobs:
+ - name: job_name
+ skip-if:
+ - project: ^project_name$
+ branch: ^stable/icehouse$
+ all-files-match-any:
+ - ^filename$
+ - project: ^project2_name$
+ all-files-match-any:
+ - ^filename2$
+ """.strip()
+ data = yaml.load(job_yaml)
+ config_job = data.get('jobs')[0]
+ sched = zuul.scheduler.Scheduler()
+ cm = zuul.change_matcher
+ expected = cm.MatchAny([
+ cm.MatchAll([
+ cm.ProjectMatcher('^project_name$'),
+ cm.BranchMatcher('^stable/icehouse$'),
+ cm.MatchAllFiles([cm.FileMatcher('^filename$')]),
+ ]),
+ cm.MatchAll([
+ cm.ProjectMatcher('^project2_name$'),
+ cm.MatchAllFiles([cm.FileMatcher('^filename2$')]),
+ ]),
+ ])
+ matcher = sched._parseSkipIf(config_job)
+ self.assertEqual(expected, matcher)
+
+
class TestScheduler(ZuulTestCase):
def test_jobs_launched(self):
@@ -1670,6 +1710,41 @@
self.assertEqual(A.reported, 0, "Abandoned change should not report")
self.assertEqual(B.reported, 1, "Change should report")
+ def test_abandoned_not_timer(self):
+ "Test that an abandoned change does not cancel timer jobs"
+
+ self.worker.hold_jobs_in_build = True
+
+ # Start timer trigger - also org/project
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-idle.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+ # The pipeline triggers every second, so we should have seen
+ # several by now.
+ time.sleep(5)
+ self.waitUntilSettled()
+ # Stop queuing timer triggered jobs so that the assertions
+ # below don't race against more jobs being queued.
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-no-timer.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+ self.assertEqual(len(self.builds), 2, "Two timer jobs")
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(len(self.builds), 3, "One change plus two timer jobs")
+
+ self.fake_gerrit.addEvent(A.getChangeAbandonedEvent())
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 2, "Two timer jobs remain")
+
+ self.worker.release()
+ self.waitUntilSettled()
+
def test_zuul_url_return(self):
"Test if ZUUL_URL is returning when zuul_url is set in zuul.conf"
self.assertTrue(self.sched.config.has_option('merger', 'zuul_url'))
@@ -1965,6 +2040,33 @@
self.assertEqual(B.data['status'], 'MERGED')
self.assertEqual(B.reported, 2)
+ def _test_skip_if_jobs(self, branch, should_skip):
+ "Test that jobs with a skip-if filter run only when appropriate"
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-skip-if.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+
+ change = self.fake_gerrit.addFakeChange('org/project',
+ branch,
+ 'test skip-if')
+ self.fake_gerrit.addEvent(change.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ tested_change_ids = [x.changes[0] for x in self.history
+ if x.name == 'project-test-skip-if']
+
+ if should_skip:
+ self.assertEqual([], tested_change_ids)
+ else:
+ self.assertIn(change.data['number'], tested_change_ids)
+
+ def test_skip_if_match_skips_job(self):
+ self._test_skip_if_jobs(branch='master', should_skip=True)
+
+ def test_skip_if_no_match_runs_job(self):
+ self._test_skip_if_jobs(branch='mp', should_skip=False)
+
def test_test_config(self):
"Test that we can test the config"
sched = zuul.scheduler.Scheduler()
@@ -2022,9 +2124,7 @@
self.waitUntilSettled()
# Run one build at a time to ensure non-race order:
- for x in range(6):
- self.release(self.builds[0])
- self.waitUntilSettled()
+ self.orderedRelease()
self.worker.hold_jobs_in_build = False
self.waitUntilSettled()
@@ -2053,7 +2153,10 @@
self.assertIn('Content-Type', headers)
self.assertEqual(headers['Content-Type'],
'application/json; charset=UTF-8')
+ self.assertIn('Access-Control-Allow-Origin', headers)
+ self.assertIn('Cache-Control', headers)
self.assertIn('Last-Modified', headers)
+ self.assertIn('Expires', headers)
data = f.read()
self.worker.hold_jobs_in_build = False
@@ -3005,7 +3108,6 @@
self.assertEqual('project-merge', job['name'])
self.assertEqual('gate', job['pipeline'])
self.assertEqual(False, job['retry'])
- self.assertEqual(13, len(job['parameters']))
self.assertEqual('https://server/job/project-merge/0/',
job['url'])
self.assertEqual(7, len(job['worker']))
@@ -3216,6 +3318,45 @@
self.assertEqual(A.data['status'], 'MERGED')
self.assertEqual(A.reported, 2)
+ def test_crd_gate_reverse(self):
+ "Test reverse cross-repo dependencies"
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+ A.addApproval('CRVW', 2)
+ B.addApproval('CRVW', 2)
+
+ # A Depends-On: B
+
+ A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ A.subject, B.data['id'])
+
+ self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.data['status'], 'NEW')
+
+ self.worker.hold_jobs_in_build = True
+ A.addApproval('APRV', 1)
+ self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
+ self.waitUntilSettled()
+
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.release('.*-merge')
+ self.waitUntilSettled()
+ self.worker.hold_jobs_in_build = False
+ self.worker.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'MERGED')
+ self.assertEqual(B.data['status'], 'MERGED')
+ self.assertEqual(A.reported, 2)
+ self.assertEqual(B.reported, 2)
+
+ self.assertEqual(self.getJobFromHistory('project1-merge').changes,
+ '2,1 1,1')
+
def test_crd_cycle(self):
"Test cross-repo dependency cycles"
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
@@ -3282,7 +3423,7 @@
def test_crd_check_git_depends(self):
"Test single-repo dependencies in independent pipelines"
- self.gearman_server.hold_jobs_in_queue = True
+ self.gearman_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
@@ -3294,8 +3435,8 @@
self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
- self.gearman_server.hold_jobs_in_queue = False
- self.gearman_server.release()
+ self.orderedRelease()
+ self.gearman_server.hold_jobs_in_build = False
self.waitUntilSettled()
self.assertEqual(A.data['status'], 'NEW')
@@ -3312,7 +3453,7 @@
def test_crd_check_duplicate(self):
"Test duplicate check in independent pipelines"
- self.gearman_server.hold_jobs_in_queue = True
+ self.worker.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
check_pipeline = self.sched.layout.pipelines['check']
@@ -3333,12 +3474,11 @@
self.waitUntilSettled()
self.assertEqual(len(check_pipeline.getAllItems()), 3)
- self.gearman_server.hold_jobs_in_queue = False
- self.gearman_server.release('.*-merge')
- self.waitUntilSettled()
- self.gearman_server.release('.*-merge')
- self.waitUntilSettled()
- self.gearman_server.release()
+ # Release jobs in order to avoid races with change A jobs
+ # finishing before change B jobs.
+ self.orderedRelease()
+ self.worker.hold_jobs_in_build = False
+ self.worker.release()
self.waitUntilSettled()
self.assertEqual(A.data['status'], 'NEW')
@@ -3435,3 +3575,43 @@
# Each job should have tested exactly one change
for job in self.history:
self.assertEqual(len(job.changes.split()), 1)
+
+ def test_crd_check_transitive(self):
+ "Test transitive cross-repo dependencies"
+ # Specifically, if A -> B -> C, and C gets a new patchset and
+ # A gets a new patchset, ensure the test of A,2 includes B,1
+ # and C,2 (not C,1 which would indicate stale data in the
+ # cache for B).
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+ C = self.fake_gerrit.addFakeChange('org/project3', 'master', 'C')
+
+ # A Depends-On: B
+ A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ A.subject, B.data['id'])
+
+ # B Depends-On: C
+ B.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ B.subject, C.data['id'])
+
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(self.history[-1].changes, '3,1 2,1 1,1')
+
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(self.history[-1].changes, '3,1 2,1')
+
+ self.fake_gerrit.addEvent(C.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(self.history[-1].changes, '3,1')
+
+ C.addPatchset()
+ self.fake_gerrit.addEvent(C.getPatchsetCreatedEvent(2))
+ self.waitUntilSettled()
+ self.assertEqual(self.history[-1].changes, '3,2')
+
+ A.addPatchset()
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(2))
+ self.waitUntilSettled()
+ self.assertEqual(self.history[-1].changes, '3,2 2,1 1,2')
diff --git a/tests/test_zuultrigger.py b/tests/test_zuultrigger.py
index a26fa86..2f0e4f0 100644
--- a/tests/test_zuultrigger.py
+++ b/tests/test_zuultrigger.py
@@ -111,8 +111,8 @@
"merged with the current state of the repository. Please rebase "
"your change and upload a new patchset.")
- self.assertEqual(self.fake_gerrit.queries[0],
- "project:org/project status:open")
+ self.assertTrue("project:org/project status:open" in
+ self.fake_gerrit.queries)
# Reconfigure and run the test again. This is a regression
# check to make sure that we don't end up with a stale trigger
diff --git a/tools/zuul-changes.py b/tools/zuul-changes.py
index 7fc541b..9dbf504 100755
--- a/tools/zuul-changes.py
+++ b/tools/zuul-changes.py
@@ -1,5 +1,6 @@
#!/usr/bin/env python
# Copyright 2013 OpenStack Foundation
+# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@@ -13,9 +14,6 @@
# License for the specific language governing permissions and limitations
# under the License.
-# Print commands to leave gerrit comments for every change in one of
-# Zuul's pipelines.
-
import urllib2
import json
import argparse
@@ -23,9 +21,6 @@
parser = argparse.ArgumentParser()
parser.add_argument('url', help='The URL of the running Zuul instance')
parser.add_argument('pipeline_name', help='The name of the Zuul pipeline')
-parser.add_argument('comment', help='The text of the Gerrit comment')
-parser.add_argument('--review-host', default='review',
- help='The Gerrit hostname')
options = parser.parse_args()
data = urllib2.urlopen('%s/status.json' % options.url).read()
@@ -37,7 +32,13 @@
for queue in pipeline['change_queues']:
for head in queue['heads']:
for change in head:
- print 'ssh %s gerrit review %s --message \\"%s\\"' % (
- options.review_host,
- change['id'],
- options.comment)
+ if not change['live']:
+ continue
+ cid, cps = change['id'].split(',')
+ print (
+ "zuul enqueue --trigger gerrit --pipeline %s "
+ "--project %s --change %s,%s" % (
+ options.pipeline_name,
+ change['project'],
+ cid, cps)
+ )
diff --git a/zuul/change_matcher.py b/zuul/change_matcher.py
new file mode 100644
index 0000000..ed380f0
--- /dev/null
+++ b/zuul/change_matcher.py
@@ -0,0 +1,132 @@
+# Copyright 2015 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+This module defines classes used in matching changes based on job
+configuration.
+"""
+
+import re
+
+
+class AbstractChangeMatcher(object):
+
+ def __init__(self, regex):
+ self._regex = regex
+ self.regex = re.compile(regex)
+
+ def matches(self, change):
+ """Return a boolean indication of whether change matches
+ implementation-specific criteria.
+ """
+ raise NotImplementedError()
+
+ def copy(self):
+ return self.__class__(self._regex)
+
+ def __eq__(self, other):
+ return str(self) == str(other)
+
+ def __str__(self):
+ return '{%s:%s}' % (self.__class__.__name__, self._regex)
+
+ def __repr__(self):
+ return '<%s %s>' % (self.__class__.__name__, self._regex)
+
+
+class ProjectMatcher(AbstractChangeMatcher):
+
+ def matches(self, change):
+ return self.regex.match(str(change.project))
+
+
+class BranchMatcher(AbstractChangeMatcher):
+
+ def matches(self, change):
+ return (
+ (hasattr(change, 'branch') and self.regex.match(change.branch)) or
+ (hasattr(change, 'ref') and self.regex.match(change.ref))
+ )
+
+
+class FileMatcher(AbstractChangeMatcher):
+
+ def matches(self, change):
+ if not hasattr(change, 'files'):
+ return False
+ for file_ in change.files:
+ if self.regex.match(file_):
+ return True
+ return False
+
+
+class AbstractMatcherCollection(AbstractChangeMatcher):
+
+ def __init__(self, matchers):
+ self.matchers = matchers
+
+ def __eq__(self, other):
+ return str(self) == str(other)
+
+ def __str__(self):
+ return '{%s:%s}' % (self.__class__.__name__,
+ ','.join([str(x) for x in self.matchers]))
+
+ def __repr__(self):
+ return '<%s>' % self.__class__.__name__
+
+ def copy(self):
+ return self.__class__(self.matchers[:])
+
+
+class MatchAllFiles(AbstractMatcherCollection):
+
+ commit_regex = re.compile('^/COMMIT_MSG$')
+
+ @property
+ def regexes(self):
+ for matcher in self.matchers:
+ yield matcher.regex
+ yield self.commit_regex
+
+ def matches(self, change):
+ if not (hasattr(change, 'files') and change.files):
+ return False
+ for file_ in change.files:
+ matched_file = False
+ for regex in self.regexes:
+ if regex.match(file_):
+ matched_file = True
+ break
+ if not matched_file:
+ return False
+ return True
+
+
+class MatchAll(AbstractMatcherCollection):
+
+ def matches(self, change):
+ for matcher in self.matchers:
+ if not matcher.matches(change):
+ return False
+ return True
+
+
+class MatchAny(AbstractMatcherCollection):
+
+ def matches(self, change):
+ for matcher in self.matchers:
+ if matcher.matches(change):
+ return True
+ return False
diff --git a/zuul/cmd/client.py b/zuul/cmd/client.py
index 766a4ef..bc2c152 100644
--- a/zuul/cmd/client.py
+++ b/zuul/cmd/client.py
@@ -232,9 +232,6 @@
'number': {
'title': 'Number'
},
- 'parameters': {
- 'title': 'Parameters'
- },
'worker.name': {
'title': 'Worker'
},
diff --git a/zuul/launcher/gearman.py b/zuul/launcher/gearman.py
index 564a554..653678a 100644
--- a/zuul/launcher/gearman.py
+++ b/zuul/launcher/gearman.py
@@ -231,6 +231,8 @@
s_config = {}
s_config.update((k, v.format(item=item, job=job,
change=item.change))
+ if isinstance(v, basestring)
+ else (k, v)
for k, v in s.items())
(swift_instructions['URL'],
@@ -402,14 +404,15 @@
self.log.debug("Removed build %s from queue" % build)
return
+ time.sleep(1)
+
self.log.debug("Still unable to find build %s to cancel" % build)
if build.number:
self.log.debug("Build %s has just started" % build)
- else:
- self.log.error("Build %s has not started but was not"
- "found in queue; canceling anyway" % build)
- self.cancelRunningBuild(build)
- self.log.debug("Canceled possibly running build %s" % build)
+ self.log.debug("Canceled running build %s" % build)
+ self.cancelRunningBuild(build)
+ return
+ self.log.debug("Unable to cancel build %s" % build)
def onBuildCompleted(self, job, result=None):
if job.unique in self.meta_jobs:
diff --git a/zuul/layoutvalidator.py b/zuul/layoutvalidator.py
index cc7080c..88d10e2 100644
--- a/zuul/layoutvalidator.py
+++ b/zuul/layoutvalidator.py
@@ -135,6 +135,11 @@
'logserver-prefix': str,
}
+ skip_if = {'project': str,
+ 'branch': str,
+ 'all-files-match-any': toList(str),
+ }
+
job = {v.Required('name'): str,
'queue-name': str,
'failure-message': str,
@@ -147,6 +152,7 @@
'branch': toList(str),
'files': toList(str),
'swift': toList(swift),
+ 'skip-if': toList(skip_if),
}
jobs = [job]
diff --git a/zuul/lib/cloner.py b/zuul/lib/cloner.py
index 2b35e41..67e238a 100644
--- a/zuul/lib/cloner.py
+++ b/zuul/lib/cloner.py
@@ -125,7 +125,7 @@
repo = self.cloneUpstream(project, dest)
- repo.update()
+ repo.reset()
# Ensure that we don't have stale remotes around
repo.prune()
diff --git a/zuul/lib/gerrit.py b/zuul/lib/gerrit.py
index 9aeff3d..6c7906b 100644
--- a/zuul/lib/gerrit.py
+++ b/zuul/lib/gerrit.py
@@ -39,7 +39,7 @@
data = json.loads(l)
self.log.debug("Received data from Gerrit event stream: \n%s" %
pprint.pformat(data))
- self.gerrit.addEvent(data)
+ self.gerrit.addEvent((time.time(), data))
def _listen(self, stdout, stderr):
poll = select.poll()
diff --git a/zuul/lib/swift.py b/zuul/lib/swift.py
index 9b9bea3..3c411d3 100644
--- a/zuul/lib/swift.py
+++ b/zuul/lib/swift.py
@@ -147,8 +147,10 @@
settings[key] = kwargs[altkey]
elif self.config.has_option('swift', 'default_' + key):
settings[key] = self.config.get('swift', 'default_' + key)
+ # TODO: these are always strings; some should be converted
+ # to ints.
- expires = int(time() + settings['expiry'])
+ expires = int(time() + int(settings['expiry']))
redirect = ''
url = os.path.join(self.storage_url, settings['container'],
diff --git a/zuul/merger/client.py b/zuul/merger/client.py
index 8d8f7ee..950c385 100644
--- a/zuul/merger/client.py
+++ b/zuul/merger/client.py
@@ -84,10 +84,10 @@
def submitJob(self, name, data, build_set,
precedence=zuul.model.PRECEDENCE_NORMAL):
uuid = str(uuid4().hex)
- self.log.debug("Submitting job %s with data %s" % (name, data))
job = gear.Job(name,
json.dumps(data),
unique=uuid)
+ self.log.debug("Submitting job %s with data %s" % (job, data))
self.build_sets[uuid] = build_set
self.gearman.submitJob(job, precedence=precedence,
timeout=300)
diff --git a/zuul/merger/server.py b/zuul/merger/server.py
index 0d105f6..1a02322 100644
--- a/zuul/merger/server.py
+++ b/zuul/merger/server.py
@@ -90,10 +90,10 @@
job = self.worker.getJob()
try:
if job.name == 'merger:merge':
- self.log.debug("Got merge job.")
+ self.log.debug("Got merge job: %s" % job.unique)
self.merge(job)
elif job.name == 'merger:update':
- self.log.debug("Got update job.")
+ self.log.debug("Got update job: %s" % job.unique)
self.update(job)
else:
self.log.error("Unable to handle job %s" % job.name)
diff --git a/zuul/model.py b/zuul/model.py
index 4514e7d..4d402ff 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -452,12 +452,19 @@
self.failure_pattern = None
self.success_pattern = None
self.parameter_function = None
- self.hold_following_changes = False
- self.voting = True
+ # A metajob should only supply values for attributes that have
+ # been explicitly provided, so avoid setting boolean defaults.
+ if self.is_metajob:
+ self.hold_following_changes = None
+ self.voting = None
+ else:
+ self.hold_following_changes = False
+ self.voting = True
self.branches = []
self._branches = []
self.files = []
self._files = []
+ self.skip_if_matcher = None
self.swift = {}
def __str__(self):
@@ -466,6 +473,10 @@
def __repr__(self):
return '<Job %s>' % (self.name)
+ @property
+ def is_metajob(self):
+ return self.name.startswith('^')
+
def copy(self, other):
if other.failure_message:
self.failure_message = other.failure_message
@@ -483,10 +494,15 @@
if other.files:
self.files = other.files[:]
self._files = other._files[:]
+ if other.skip_if_matcher:
+ self.skip_if_matcher = other.skip_if_matcher.copy()
if other.swift:
self.swift.update(other.swift)
- self.hold_following_changes = other.hold_following_changes
- self.voting = other.voting
+ # Only non-None values should be copied for boolean attributes.
+ if other.hold_following_changes is not None:
+ self.hold_following_changes = other.hold_following_changes
+ if other.voting is not None:
+ self.voting = other.voting
def changeMatches(self, change):
matches_branch = False
@@ -507,6 +523,9 @@
if self.files and not matches_file:
return False
+ if self.skip_if_matcher and self.skip_if_matcher.matches(change):
+ return False
+
return True
@@ -774,7 +793,6 @@
'canceled': build.canceled if build else None,
'retry': build.retry if build else None,
'number': build.number if build else None,
- 'parameters': build.parameters if build else None,
'worker': worker
})
@@ -947,7 +965,8 @@
return None
def equals(self, other):
- if (self.project == other.project):
+ if (self.project == other.project
+ and other._id() is None):
return True
return False
@@ -1263,8 +1282,7 @@
if name in self.jobs:
return self.jobs[name]
job = Job(name)
- if name.startswith('^'):
- # This is a meta-job
+ if job.is_metajob:
regex = re.compile(name)
self.metajobs.append((regex, job))
else:
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 08a9147..131ad62 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -31,6 +31,7 @@
import model
from model import ActionReporter, Pipeline, Project, ChangeQueue
from model import EventFilter, ChangeishFilter
+from zuul import change_matcher
from zuul import version as zuul_version
statsd = extras.try_import('statsd.statsd')
@@ -166,6 +167,14 @@
self.commit = commit
+def toList(item):
+ if not item:
+ return []
+ if isinstance(item, list):
+ return item
+ return [item]
+
+
class Scheduler(threading.Thread):
log = logging.getLogger("zuul.Scheduler")
@@ -199,17 +208,38 @@
def testConfig(self, config_path):
return self._parseConfig(config_path)
+ def _parseSkipIf(self, config_job):
+ cm = change_matcher
+ skip_matchers = []
+
+ for config_skip in config_job.get('skip-if', []):
+ nested_matchers = []
+
+ project_regex = config_skip.get('project')
+ if project_regex:
+ nested_matchers.append(cm.ProjectMatcher(project_regex))
+
+ branch_regex = config_skip.get('branch')
+ if branch_regex:
+ nested_matchers.append(cm.BranchMatcher(branch_regex))
+
+ file_regexes = toList(config_skip.get('all-files-match-any'))
+ if file_regexes:
+ file_matchers = [cm.FileMatcher(x) for x in file_regexes]
+ all_files_matcher = cm.MatchAllFiles(file_matchers)
+ nested_matchers.append(all_files_matcher)
+
+ # All patterns need to match a given skip-if predicate
+ skip_matchers.append(cm.MatchAll(nested_matchers))
+
+ if skip_matchers:
+ # Any skip-if predicate can be matched to trigger a skip
+ return cm.MatchAny(skip_matchers)
+
def _parseConfig(self, config_path):
layout = model.Layout()
project_templates = {}
- def toList(item):
- if not item:
- return []
- if isinstance(item, list):
- return item
- return [item]
-
if config_path:
config_path = os.path.expanduser(config_path)
if not os.path.exists(config_path):
@@ -397,6 +427,9 @@
if files:
job._files = files
job.files = [re.compile(x) for x in files]
+ skip_if_matcher = self._parseSkipIf(config_job)
+ if skip_if_matcher:
+ job.skip_if_matcher = skip_if_matcher
swift = toList(config_job.get('swift'))
if swift:
for s in swift:
@@ -979,6 +1012,8 @@
efilters += str(b)
for f in tree.job._files:
efilters += str(f)
+ if tree.job.skip_if_matcher:
+ efilters += str(tree.job.skip_if_matcher)
if efilters:
efilters = ' ' + efilters
hold = ''
@@ -1330,10 +1365,11 @@
self.cancelJobs(item)
self.dequeueItem(item)
self.pipeline.setDequeuedNeedingChange(item)
- try:
- self.reportItem(item)
- except MergeFailure:
- pass
+ if item.live:
+ try:
+ self.reportItem(item)
+ except MergeFailure:
+ pass
return (True, nnfi, ready_ahead)
dep_items = self.getFailingDependentItems(item)
actionable = change_queue.isActionable(item)
diff --git a/zuul/trigger/gerrit.py b/zuul/trigger/gerrit.py
index c5fdf9a..c28401c 100644
--- a/zuul/trigger/gerrit.py
+++ b/zuul/trigger/gerrit.py
@@ -26,6 +26,7 @@
"""Move events from Gerrit to the scheduler."""
log = logging.getLogger("zuul.GerritEventConnector")
+ delay = 5.0
def __init__(self, gerrit, sched, trigger):
super(GerritEventConnector, self).__init__()
@@ -37,12 +38,20 @@
def stop(self):
self._stopped = True
- self.gerrit.addEvent(None)
+ self.gerrit.addEvent((None, None))
def _handleEvent(self):
- data = self.gerrit.getEvent()
+ ts, data = self.gerrit.getEvent()
if self._stopped:
return
+ # Gerrit can produce inconsistent data immediately after an
+ # event, So ensure that we do not deliver the event to Zuul
+ # until at least a certain amount of time has passed. Note
+ # that if we receive several events in succession, we will
+ # only need to delay for the first event. In essence, Zuul
+ # should always be a constant number of seconds behind Gerrit.
+ now = time.time()
+ time.sleep(max((ts + self.delay) - now, 0.0))
event = TriggerEvent()
event.type = data.get('type')
event.trigger_name = self.trigger.name
@@ -362,6 +371,27 @@
records.extend(self.gerrit.simpleQuery(query))
return records
+ def _getNeededByFromCommit(self, change_id):
+ records = []
+ seen = set()
+ query = 'message:%s' % change_id
+ self.log.debug("Running query %s to find changes needed-by" %
+ (query,))
+ results = self.gerrit.simpleQuery(query)
+ for result in results:
+ for match in self.depends_on_re.findall(
+ result['commitMessage']):
+ if match != change_id:
+ continue
+ key = (result['number'], result['currentPatchSet']['number'])
+ if key in seen:
+ continue
+ self.log.debug("Found change %s,%s needs %s from commit" %
+ (key[0], key[1], change_id))
+ seen.add(key)
+ records.append(result)
+ return records
+
def updateChange(self, change, history=None):
self.log.info("Updating information for %s,%s" %
(change.number, change.patchset))
@@ -378,18 +408,19 @@
change.branch = data['branch']
change.url = data['url']
max_ps = 0
- change.files = []
+ files = []
for ps in data['patchSets']:
if ps['number'] == change.patchset:
change.refspec = ps['ref']
for f in ps.get('files', []):
- change.files.append(f['file'])
+ files.append(f['file'])
if int(ps['number']) > int(max_ps):
max_ps = ps['number']
if max_ps == change.patchset:
change.is_current_patchset = True
else:
change.is_current_patchset = False
+ change.files = files
change.is_merged = self._isMerged(change)
change.approvals = data['currentPatchSet'].get('approvals', [])
@@ -408,7 +439,7 @@
history = history[:]
history.append(change.number)
- change.needs_changes = []
+ needs_changes = []
if 'dependsOn' in data:
parts = data['dependsOn'][0]['ref'].split('/')
dep_num, dep_ps = parts[3], parts[4]
@@ -418,8 +449,8 @@
self.log.debug("Getting git-dependent change %s,%s" %
(dep_num, dep_ps))
dep = self._getChange(dep_num, dep_ps, history=history)
- if (not dep.is_merged) and dep not in change.needs_changes:
- change.needs_changes.append(dep)
+ if (not dep.is_merged) and dep not in needs_changes:
+ needs_changes.append(dep)
for record in self._getDependsOnFromCommit(data['commitMessage']):
dep_num = record['number']
@@ -430,17 +461,32 @@
self.log.debug("Getting commit-dependent change %s,%s" %
(dep_num, dep_ps))
dep = self._getChange(dep_num, dep_ps, history=history)
- if (not dep.is_merged) and dep not in change.needs_changes:
- change.needs_changes.append(dep)
+ if (not dep.is_merged) and dep not in needs_changes:
+ needs_changes.append(dep)
+ change.needs_changes = needs_changes
- change.needed_by_changes = []
+ needed_by_changes = []
if 'neededBy' in data:
for needed in data['neededBy']:
parts = needed['ref'].split('/')
dep_num, dep_ps = parts[3], parts[4]
dep = self._getChange(dep_num, dep_ps)
if (not dep.is_merged) and dep.is_current_patchset:
- change.needed_by_changes.append(dep)
+ needed_by_changes.append(dep)
+
+ for record in self._getNeededByFromCommit(data['id']):
+ dep_num = record['number']
+ dep_ps = record['currentPatchSet']['number']
+ self.log.debug("Getting commit-needed change %s,%s" %
+ (dep_num, dep_ps))
+ # Because a commit needed-by may be a cross-repo
+ # dependency, cause that change to refresh so that it will
+ # reference the latest patchset of its Depends-On (this
+ # change).
+ dep = self._getChange(dep_num, dep_ps, refresh=True)
+ if (not dep.is_merged) and dep.is_current_patchset:
+ needed_by_changes.append(dep)
+ change.needed_by_changes = needed_by_changes
return change
diff --git a/zuul/webapp.py b/zuul/webapp.py
index e289398..44c333b 100644
--- a/zuul/webapp.py
+++ b/zuul/webapp.py
@@ -121,5 +121,10 @@
raise webob.exc.HTTPNotFound()
response.headers['Access-Control-Allow-Origin'] = '*'
+
+ response.cache_control.public = True
+ response.cache_control.max_age = self.cache_expiry
response.last_modified = self.cache_time
- return response
+ response.expires = self.cache_time + self.cache_expiry
+
+ return response.conditional_response_app