Merge "Changes for Ansible 2.4" into feature/zuulv3
diff --git a/doc/source/user/jobs.rst b/doc/source/user/jobs.rst
index 0639b8b..989338a 100644
--- a/doc/source/user/jobs.rst
+++ b/doc/source/user/jobs.rst
@@ -175,8 +175,8 @@
.. var:: override_checkout
If the job was configured to override the branch or tag checked
- out, this will contain the specified value. Otherwise, it will
- be null.
+ out, this will contain the specified value. Otherwise, this
+ variable will be undefined.
.. var:: pipeline
diff --git a/etc/zuul.conf-sample b/etc/zuul.conf-sample
index f0e1765..17092af 100644
--- a/etc/zuul.conf-sample
+++ b/etc/zuul.conf-sample
@@ -38,6 +38,7 @@
listen_address=127.0.0.1
port=9000
static_cache_expiry=0
+;sql_connection_name=mydatabase
[webapp]
listen_address=0.0.0.0
diff --git a/tests/base.py b/tests/base.py
index 036515d..f274ed6 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -486,6 +486,29 @@
self.changes[self.change_number] = c
return c
+ def addFakeTag(self, project, branch, tag):
+ path = os.path.join(self.upstream_root, project)
+ repo = git.Repo(path)
+ commit = repo.heads[branch].commit
+ newrev = commit.hexsha
+ ref = 'refs/tags/' + tag
+
+ git.Tag.create(repo, tag, commit)
+
+ event = {
+ "type": "ref-updated",
+ "submitter": {
+ "name": "User Name",
+ },
+ "refUpdate": {
+ "oldRev": 40 * '0',
+ "newRev": newrev,
+ "refName": ref,
+ "project": project,
+ }
+ }
+ return event
+
def getFakeBranchCreatedEvent(self, project, branch):
path = os.path.join(self.upstream_root, project)
repo = git.Repo(path)
diff --git a/tests/fixtures/config/branch-negative/git/org_project/.zuul.yaml b/tests/fixtures/config/branch-negative/git/org_project/.zuul.yaml
new file mode 100644
index 0000000..f02f449
--- /dev/null
+++ b/tests/fixtures/config/branch-negative/git/org_project/.zuul.yaml
@@ -0,0 +1,10 @@
+- job:
+ name: test-job
+ run: playbooks/test-job.yaml
+
+- project:
+ name: org/project
+ check:
+ jobs:
+ - test-job:
+ branches: ^(?!stable)
diff --git a/tests/fixtures/config/branch-negative/git/org_project/README b/tests/fixtures/config/branch-negative/git/org_project/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/branch-negative/git/org_project/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/branch-negative/git/org_project/playbooks/test-job.yaml b/tests/fixtures/config/branch-negative/git/org_project/playbooks/test-job.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/branch-negative/git/org_project/playbooks/test-job.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/branch-negative/git/project-config/zuul.yaml b/tests/fixtures/config/branch-negative/git/project-config/zuul.yaml
new file mode 100644
index 0000000..dc4a182
--- /dev/null
+++ b/tests/fixtures/config/branch-negative/git/project-config/zuul.yaml
@@ -0,0 +1,26 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ Verified: 1
+ failure:
+ gerrit:
+ Verified: -1
+
+- job:
+ name: base
+ parent: null
+
+- project:
+ name: project-config
+ check:
+ jobs: []
+
+- project:
+ name: org/project
+ check:
+ jobs: []
diff --git a/tests/fixtures/config/branch-negative/main.yaml b/tests/fixtures/config/branch-negative/main.yaml
new file mode 100644
index 0000000..0ac232f
--- /dev/null
+++ b/tests/fixtures/config/branch-negative/main.yaml
@@ -0,0 +1,8 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - project-config
+ untrusted-projects:
+ - org/project
diff --git a/tests/fixtures/config/branch-tag/git/org_project/.zuul.yaml b/tests/fixtures/config/branch-tag/git/org_project/.zuul.yaml
new file mode 100644
index 0000000..acbba6c
--- /dev/null
+++ b/tests/fixtures/config/branch-tag/git/org_project/.zuul.yaml
@@ -0,0 +1,9 @@
+- job:
+ name: test-job
+ run: playbooks/test-job.yaml
+
+- project:
+ name: org/project
+ tag:
+ jobs:
+ - test-job
diff --git a/tests/fixtures/config/branch-tag/git/org_project/README b/tests/fixtures/config/branch-tag/git/org_project/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/branch-tag/git/org_project/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/branch-tag/git/org_project/playbooks/test-job.yaml b/tests/fixtures/config/branch-tag/git/org_project/playbooks/test-job.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/branch-tag/git/org_project/playbooks/test-job.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/branch-tag/git/project-config/zuul.yaml b/tests/fixtures/config/branch-tag/git/project-config/zuul.yaml
new file mode 100644
index 0000000..0ae6396
--- /dev/null
+++ b/tests/fixtures/config/branch-tag/git/project-config/zuul.yaml
@@ -0,0 +1,21 @@
+- pipeline:
+ name: tag
+ manager: independent
+ trigger:
+ gerrit:
+ - event: ref-updated
+ ref: ^refs/tags/.*$
+
+- job:
+ name: base
+ parent: null
+
+- project:
+ name: project-config
+ tag:
+ jobs: []
+
+- project:
+ name: org/project
+ tag:
+ jobs: []
diff --git a/tests/fixtures/config/branch-tag/main.yaml b/tests/fixtures/config/branch-tag/main.yaml
new file mode 100644
index 0000000..0ac232f
--- /dev/null
+++ b/tests/fixtures/config/branch-tag/main.yaml
@@ -0,0 +1,8 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - project-config
+ untrusted-projects:
+ - org/project
diff --git a/tests/fixtures/config/tenant-parser/git/common-config/zuul.yaml b/tests/fixtures/config/tenant-parser/git/common-config/zuul.yaml
index e21f967..a28ef54 100644
--- a/tests/fixtures/config/tenant-parser/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/tenant-parser/git/common-config/zuul.yaml
@@ -18,8 +18,10 @@
- job:
name: common-config-job
+# Use the canonical name here. This should be merged with the org/project1 in
+# the other repo.
- project:
- name: org/project1
+ name: review.example.com/org/project1
check:
jobs:
- common-config-job
diff --git a/tests/fixtures/layouts/reconfigure-remove-add.yaml b/tests/fixtures/layouts/reconfigure-remove-add.yaml
new file mode 100644
index 0000000..c9bccd3
--- /dev/null
+++ b/tests/fixtures/layouts/reconfigure-remove-add.yaml
@@ -0,0 +1,41 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ start:
+ gerrit:
+ Verified: 0
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+
+- job:
+ name: base
+ parent: null
+ nodeset:
+ nodes:
+ - label: ubuntu-xenial
+ name: controller
+
+- job:
+ name: job1
+ run: playbooks/job1.yaml
+
+- job:
+ name: job2
+ run: playbooks/job2.yaml
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - job1
+ - job2
diff --git a/tests/fixtures/layouts/reconfigure-remove-add2.yaml b/tests/fixtures/layouts/reconfigure-remove-add2.yaml
new file mode 100644
index 0000000..33c169e
--- /dev/null
+++ b/tests/fixtures/layouts/reconfigure-remove-add2.yaml
@@ -0,0 +1,40 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ start:
+ gerrit:
+ Verified: 0
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+
+- job:
+ name: base
+ parent: null
+ nodeset:
+ nodes:
+ - label: ubuntu-xenial
+ name: controller
+
+- job:
+ name: job1
+ run: playbooks/job1.yaml
+
+- job:
+ name: job2
+ run: playbooks/job2.yaml
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - job1
diff --git a/tests/fixtures/layouts/reconfigure-window-fixed.yaml b/tests/fixtures/layouts/reconfigure-window-fixed.yaml
new file mode 100644
index 0000000..9aa1a97
--- /dev/null
+++ b/tests/fixtures/layouts/reconfigure-window-fixed.yaml
@@ -0,0 +1,46 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ start:
+ gerrit:
+ Verified: 0
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+ window: 2
+ window-increase-type: exponential
+ window-increase-factor: 1
+ window-decrease-type: exponential
+ window-decrease-factor: 1
+
+- job:
+ name: base
+ parent: null
+ nodeset:
+ nodes:
+ - label: ubuntu-xenial
+ name: controller
+
+- job:
+ name: job1
+ run: playbooks/job1.yaml
+
+- job:
+ name: job2
+ run: playbooks/job2.yaml
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - job1
+ - job2
diff --git a/tests/fixtures/layouts/reconfigure-window-fixed2.yaml b/tests/fixtures/layouts/reconfigure-window-fixed2.yaml
new file mode 100644
index 0000000..13382c5
--- /dev/null
+++ b/tests/fixtures/layouts/reconfigure-window-fixed2.yaml
@@ -0,0 +1,46 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ start:
+ gerrit:
+ Verified: 0
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+ window: 1
+ window-increase-type: exponential
+ window-increase-factor: 1
+ window-decrease-type: exponential
+ window-decrease-factor: 1
+
+- job:
+ name: base
+ parent: null
+ nodeset:
+ nodes:
+ - label: ubuntu-xenial
+ name: controller
+
+- job:
+ name: job1
+ run: playbooks/job1.yaml
+
+- job:
+ name: job2
+ run: playbooks/job2.yaml
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - job1
+ - job2
diff --git a/tests/fixtures/layouts/reconfigure-window.yaml b/tests/fixtures/layouts/reconfigure-window.yaml
new file mode 100644
index 0000000..c9bccd3
--- /dev/null
+++ b/tests/fixtures/layouts/reconfigure-window.yaml
@@ -0,0 +1,41 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ start:
+ gerrit:
+ Verified: 0
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+
+- job:
+ name: base
+ parent: null
+ nodeset:
+ nodes:
+ - label: ubuntu-xenial
+ name: controller
+
+- job:
+ name: job1
+ run: playbooks/job1.yaml
+
+- job:
+ name: job2
+ run: playbooks/job2.yaml
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - job1
+ - job2
diff --git a/tests/fixtures/layouts/reconfigure-window2.yaml b/tests/fixtures/layouts/reconfigure-window2.yaml
new file mode 100644
index 0000000..8949f7d
--- /dev/null
+++ b/tests/fixtures/layouts/reconfigure-window2.yaml
@@ -0,0 +1,47 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ start:
+ gerrit:
+ Verified: 0
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+ window: 1
+ window-floor: 1
+ window-increase-type: linear
+ window-increase-factor: 1
+ window-decrease-type: linear
+ window-decrease-factor: 1
+
+- job:
+ name: base
+ parent: null
+ nodeset:
+ nodes:
+ - label: ubuntu-xenial
+ name: controller
+
+- job:
+ name: job1
+ run: playbooks/job1.yaml
+
+- job:
+ name: job2
+ run: playbooks/job2.yaml
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - job1
+ - job2
diff --git a/tests/unit/test_log_streamer.py b/tests/unit/test_log_streamer.py
index c808540..27368e3 100644
--- a/tests/unit/test_log_streamer.py
+++ b/tests/unit/test_log_streamer.py
@@ -158,7 +158,7 @@
def runWSClient(self, build_uuid, event):
async def client(loop, build_uuid, event):
- uri = 'http://[::1]:9000/console-stream'
+ uri = 'http://[::1]:9000/tenant-one/console-stream'
try:
session = aiohttp.ClientSession(loop=loop)
async with session.ws_connect(uri) as ws:
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index 53a20ff..aacc81e 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -2581,7 +2581,7 @@
self.assertEqual('project-merge', status_jobs[0]['name'])
# TODO(mordred) pull uuids from self.builds
self.assertEqual(
- 'static/stream.html?uuid={uuid}&logfile=console.log'.format(
+ 'stream.html?uuid={uuid}&logfile=console.log'.format(
uuid=status_jobs[0]['uuid']),
status_jobs[0]['url'])
self.assertEqual(
@@ -2597,7 +2597,7 @@
status_jobs[0]['report_url'])
self.assertEqual('project-test1', status_jobs[1]['name'])
self.assertEqual(
- 'static/stream.html?uuid={uuid}&logfile=console.log'.format(
+ 'stream.html?uuid={uuid}&logfile=console.log'.format(
uuid=status_jobs[1]['uuid']),
status_jobs[1]['url'])
self.assertEqual(
@@ -2613,7 +2613,7 @@
self.assertEqual('project-test2', status_jobs[2]['name'])
self.assertEqual(
- 'static/stream.html?uuid={uuid}&logfile=console.log'.format(
+ 'stream.html?uuid={uuid}&logfile=console.log'.format(
uuid=status_jobs[2]['uuid']),
status_jobs[2]['url'])
self.assertEqual(
@@ -3853,6 +3853,145 @@
self.assertEqual(queue.window_floor, 1)
self.assertEqual(C.data['status'], 'MERGED')
+ @simple_layout('layouts/reconfigure-window.yaml')
+ def test_reconfigure_window_shrink(self):
+ # Test the active window shrinking during reconfiguration
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ A.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+ self.waitUntilSettled()
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ B.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
+ self.waitUntilSettled()
+
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ queue = tenant.layout.pipelines['gate'].queues[0]
+ self.assertEqual(queue.window, 20)
+ self.assertTrue(len(self.builds), 4)
+
+ self.executor_server.release('job1')
+ self.waitUntilSettled()
+ self.commitConfigUpdate('org/common-config',
+ 'layouts/reconfigure-window2.yaml')
+ self.sched.reconfigure(self.config)
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ queue = tenant.layout.pipelines['gate'].queues[0]
+ # Even though we have configured a smaller window, the value
+ # on the existing shared queue should be used.
+ self.assertEqual(queue.window, 20)
+ self.assertTrue(len(self.builds), 4)
+
+ self.sched.reconfigure(self.config)
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ queue = tenant.layout.pipelines['gate'].queues[0]
+ self.assertEqual(queue.window, 20)
+ self.assertTrue(len(self.builds), 4)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='job1', result='SUCCESS', changes='1,1'),
+ dict(name='job1', result='SUCCESS', changes='1,1 2,1'),
+ dict(name='job2', result='SUCCESS', changes='1,1'),
+ dict(name='job2', result='SUCCESS', changes='1,1 2,1'),
+ ], ordered=False)
+
+ @simple_layout('layouts/reconfigure-window-fixed.yaml')
+ def test_reconfigure_window_fixed(self):
+ # Test the active window shrinking during reconfiguration
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ A.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+ self.waitUntilSettled()
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ B.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
+ self.waitUntilSettled()
+
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ queue = tenant.layout.pipelines['gate'].queues[0]
+ self.assertEqual(queue.window, 2)
+ self.assertTrue(len(self.builds), 4)
+
+ self.executor_server.release('job1')
+ self.waitUntilSettled()
+ self.commitConfigUpdate('org/common-config',
+ 'layouts/reconfigure-window-fixed2.yaml')
+ self.sched.reconfigure(self.config)
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ queue = tenant.layout.pipelines['gate'].queues[0]
+ # Because we have configured a static window, it should
+ # be allowed to shrink on reconfiguration.
+ self.assertEqual(queue.window, 1)
+ # B is outside the window, but still marked active until the
+ # next pass through the queue processor, so its builds haven't
+ # been canceled.
+ self.assertTrue(len(self.builds), 4)
+
+ self.sched.reconfigure(self.config)
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ queue = tenant.layout.pipelines['gate'].queues[0]
+ self.assertEqual(queue.window, 1)
+ # B's builds have been canceled now
+ self.assertTrue(len(self.builds), 2)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+
+ # B's builds will be restarted and will show up in our history
+ # twice.
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='job1', result='SUCCESS', changes='1,1'),
+ dict(name='job1', result='SUCCESS', changes='1,1 2,1'),
+ dict(name='job2', result='SUCCESS', changes='1,1'),
+ dict(name='job2', result='SUCCESS', changes='1,1 2,1'),
+ dict(name='job1', result='SUCCESS', changes='1,1 2,1'),
+ dict(name='job2', result='SUCCESS', changes='1,1 2,1'),
+ ], ordered=False)
+
+ @simple_layout('layouts/reconfigure-remove-add.yaml')
+ def test_reconfigure_remove_add(self):
+ # Test removing, then adding a job while in queue
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ A.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+ self.waitUntilSettled()
+ self.assertTrue(len(self.builds), 2)
+ self.executor_server.release('job2')
+ self.assertTrue(len(self.builds), 1)
+
+ # Remove job2
+ self.commitConfigUpdate('org/common-config',
+ 'layouts/reconfigure-remove-add2.yaml')
+ self.sched.reconfigure(self.config)
+ self.assertTrue(len(self.builds), 1)
+
+ # Add job2 back
+ self.commitConfigUpdate('org/common-config',
+ 'layouts/reconfigure-remove-add.yaml')
+ self.sched.reconfigure(self.config)
+ self.assertTrue(len(self.builds), 2)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ # This will run new builds for B
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='job2', result='SUCCESS', changes='1,1'),
+ dict(name='job1', result='SUCCESS', changes='1,1'),
+ dict(name='job2', result='SUCCESS', changes='1,1'),
+ ], ordered=False)
+
def test_worker_update_metadata(self):
"Test if a worker can send back metadata about itself"
self.executor_server.hold_jobs_in_build = True
@@ -4071,7 +4210,7 @@
self.assertEqual('gate', job['pipeline'])
self.assertEqual(False, job['retry'])
self.assertEqual(
- 'static/stream.html?uuid={uuid}&logfile=console.log'
+ 'stream.html?uuid={uuid}&logfile=console.log'
.format(uuid=job['uuid']), job['url'])
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index e2da808..54cf111 100755
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -157,6 +157,39 @@
self.assertIn('Unable to modify final job', A.messages[0])
+class TestBranchTag(ZuulTestCase):
+ tenant_config_file = 'config/branch-tag/main.yaml'
+
+ def test_negative_branch_match(self):
+ # Test that a negative branch matcher works with implied branches.
+ event = self.fake_gerrit.addFakeTag('org/project', 'master', 'foo')
+ self.fake_gerrit.addEvent(event)
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='test-job', result='SUCCESS', ref='refs/tags/foo')])
+
+
+class TestBranchNegative(ZuulTestCase):
+ tenant_config_file = 'config/branch-negative/main.yaml'
+
+ def test_negative_branch_match(self):
+ # Test that a negative branch matcher works with implied branches.
+ self.create_branch('org/project', 'stable/pike')
+ self.fake_gerrit.addEvent(
+ self.fake_gerrit.getFakeBranchCreatedEvent(
+ 'org/project', 'stable/pike'))
+ self.waitUntilSettled()
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ B = self.fake_gerrit.addFakeChange('org/project', 'stable/pike', 'A')
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='test-job', result='SUCCESS', changes='1,1')])
+
+
class TestBranchTemplates(ZuulTestCase):
tenant_config_file = 'config/branch-templates/main.yaml'
diff --git a/zuul/change_matcher.py b/zuul/change_matcher.py
index 7f6673d..eb12f9b 100644
--- a/zuul/change_matcher.py
+++ b/zuul/change_matcher.py
@@ -69,6 +69,20 @@
return False
+class ImpliedBranchMatcher(AbstractChangeMatcher):
+ """
+ A branch matcher that only considers branch refs, and always
+ succeeds on other types (e.g., tags).
+ """
+
+ def matches(self, change):
+ if hasattr(change, 'branch'):
+ if self.regex.match(change.branch):
+ return True
+ return False
+ return True
+
+
class FileMatcher(AbstractChangeMatcher):
def matches(self, change):
diff --git a/zuul/cmd/scheduler.py b/zuul/cmd/scheduler.py
index bfcbef8..539d55b 100755
--- a/zuul/cmd/scheduler.py
+++ b/zuul/cmd/scheduler.py
@@ -40,7 +40,7 @@
def reconfigure_handler(self, signum, frame):
signal.signal(signal.SIGHUP, signal.SIG_IGN)
self.log.debug("Reconfiguration triggered")
- self.read_config()
+ self.readConfig()
self.setup_logging('scheduler', 'log_config')
try:
self.sched.reconfigure(self.config)
diff --git a/zuul/cmd/web.py b/zuul/cmd/web.py
index 6e5489f..ad3062f 100755
--- a/zuul/cmd/web.py
+++ b/zuul/cmd/web.py
@@ -22,6 +22,7 @@
import zuul.cmd
import zuul.web
+from zuul.driver.sql import sqlconnection
from zuul.lib.config import get_default
@@ -48,6 +49,30 @@
params['ssl_cert'] = get_default(self.config, 'gearman', 'ssl_cert')
params['ssl_ca'] = get_default(self.config, 'gearman', 'ssl_ca')
+ sql_conn_name = get_default(self.config, 'web',
+ 'sql_connection_name')
+ sql_conn = None
+ if sql_conn_name:
+ # we want a specific sql connection
+ sql_conn = self.connections.connections.get(sql_conn_name)
+ if not sql_conn:
+ self.log.error("Couldn't find sql connection '%s'" %
+ sql_conn_name)
+ sys.exit(1)
+ else:
+ # look for any sql connection
+ connections = [c for c in self.connections.connections.values()
+ if isinstance(c, sqlconnection.SQLConnection)]
+ if len(connections) > 1:
+ self.log.error("Multiple sql connection found, "
+ "set the sql_connection_name option "
+ "in zuul.conf [web] section")
+ sys.exit(1)
+ if connections:
+ # use this sql connection by default
+ sql_conn = connections[0]
+ params['sql_connection'] = sql_conn
+
try:
self.web = zuul.web.ZuulWeb(**params)
except Exception as e:
@@ -79,6 +104,8 @@
self.setup_logging('web', 'log_config')
self.log = logging.getLogger("zuul.WebServer")
+ self.configure_connections()
+
try:
self._run()
except Exception:
diff --git a/zuul/configloader.py b/zuul/configloader.py
index 99f10f6..bcb3e49 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -517,6 +517,7 @@
# "job.run.append(...)").
job = model.Job(name)
+ job.description = conf.get('description')
job.source_context = conf.get('_source_context')
job.source_line = conf.get('_start_mark').line + 1
@@ -1161,8 +1162,8 @@
tenant.config_projects,
tenant.untrusted_projects,
cached, tenant)
- unparsed_config.extend(tenant.config_projects_config)
- unparsed_config.extend(tenant.untrusted_projects_config)
+ unparsed_config.extend(tenant.config_projects_config, tenant=tenant)
+ unparsed_config.extend(tenant.untrusted_projects_config, tenant=tenant)
tenant.layout = TenantParser._parseLayout(base, tenant,
unparsed_config,
scheduler,
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index 09321e4..a8b94f0 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -137,16 +137,12 @@
merger_items=[]):
tenant = pipeline.layout.tenant
uuid = str(uuid4().hex)
+ nodeset = item.current_build_set.getJobNodeSet(job.name)
self.log.info(
"Execute job %s (uuid: %s) on nodes %s for change %s "
"with dependent changes %s" % (
- job, uuid,
- item.current_build_set.getJobNodeSet(job.name),
- item.change,
- dependent_changes))
+ job, uuid, nodeset, item.change, dependent_changes))
- # TODOv3(jeblair): This ansible vars data structure will
- # replace the environment variables below.
project = dict(
name=item.change.project.name,
short_name=item.change.project.name.split('/')[-1],
@@ -165,8 +161,9 @@
tenant=tenant.name,
timeout=job.timeout,
jobtags=sorted(job.tags),
- override_checkout=job.override_checkout,
_inheritance_path=list(job.inheritance_path))
+ if job.override_checkout:
+ zuul_params['override_checkout'] = job.override_checkout
if hasattr(item.change, 'branch'):
zuul_params['branch'] = item.change.branch
if hasattr(item.change, 'tag'):
@@ -205,7 +202,6 @@
params['pre_playbooks'] = [x.toDict() for x in job.pre_run]
params['post_playbooks'] = [x.toDict() for x in job.post_run]
- nodeset = item.current_build_set.getJobNodeSet(job.name)
nodes = []
for node in nodeset.getNodes():
n = node.toDict()
@@ -278,6 +274,7 @@
build = Build(job, uuid)
build.parameters = params
+ build.nodeset = nodeset
if job.name == 'noop':
self.sched.onBuildStarted(build)
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index 00377ec..6969201 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -358,8 +358,10 @@
# there is a period of time where the user can click on the live log
# link on the status page but the log streaming fails because the file
# is not there yet.
- with open(self.job_output_file, 'w'):
- pass
+ with open(self.job_output_file, 'w') as job_output:
+ job_output.write("{now} | Job console starting...\n".format(
+ now=datetime.datetime.now()
+ ))
self.trusted_projects = []
self.trusted_project_index = {}
@@ -768,22 +770,22 @@
branches = repo.getBranches()
refs = [r.name for r in repo.getRefs()]
selected_ref = None
- if project_override_branch in branches:
- selected_ref = project_override_branch
- self.log.info("Checking out %s project override branch %s",
- project_name, selected_ref)
if project_override_checkout in refs:
selected_ref = project_override_checkout
self.log.info("Checking out %s project override ref %s",
project_name, selected_ref)
- elif job_override_branch in branches:
- selected_ref = job_override_branch
- self.log.info("Checking out %s job override branch %s",
+ elif project_override_branch in branches:
+ selected_ref = project_override_branch
+ self.log.info("Checking out %s project override branch %s",
project_name, selected_ref)
elif job_override_checkout in refs:
selected_ref = job_override_checkout
self.log.info("Checking out %s job override ref %s",
project_name, selected_ref)
+ elif job_override_branch in branches:
+ selected_ref = job_override_branch
+ self.log.info("Checking out %s job override branch %s",
+ project_name, selected_ref)
elif ref and ref.startswith('refs/heads/'):
selected_ref = ref[len('refs/heads/'):]
self.log.info("Checking out %s branch ref %s",
diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py
index 6c72c2d..d205afc 100644
--- a/zuul/manager/__init__.py
+++ b/zuul/manager/__init__.py
@@ -853,20 +853,22 @@
if dt:
self.sched.statsd.timing(key + '.resident_time', dt)
self.sched.statsd.incr(key + '.total_changes')
-
- hostname = (item.change.project.canonical_hostname.
- replace('.', '_'))
- projectname = (item.change.project.name.
- replace('.', '_').replace('/', '.'))
- projectname = projectname.replace('.', '_').replace('/', '.')
- branchname = item.change.branch.replace('.', '_').replace('/', '.')
- # stats.timers.zuul.tenant.<tenant>.pipeline.<pipeline>.
- # project.<host>.<project>.<branch>.resident_time
- # stats_counts.zuul.tenant.<tenant>.pipeline.<pipeline>.
- # project.<host>.<project>.<branch>.total_changes
- key += '.project.%s.%s.%s' % (hostname, projectname, branchname)
- if dt:
- self.sched.statsd.timing(key + '.resident_time', dt)
- self.sched.statsd.incr(key + '.total_changes')
+ if hasattr(item.change, 'branch'):
+ hostname = (item.change.project.canonical_hostname.
+ replace('.', '_'))
+ projectname = (item.change.project.name.
+ replace('.', '_').replace('/', '.'))
+ projectname = projectname.replace('.', '_').replace('/', '.')
+ branchname = item.change.branch.replace('.', '_').replace(
+ '/', '.')
+ # stats.timers.zuul.tenant.<tenant>.pipeline.<pipeline>.
+ # project.<host>.<project>.<branch>.resident_time
+ # stats_counts.zuul.tenant.<tenant>.pipeline.<pipeline>.
+ # project.<host>.<project>.<branch>.total_changes
+ key += '.project.%s.%s.%s' % (hostname, projectname,
+ branchname)
+ if dt:
+ self.sched.statsd.timing(key + '.resident_time', dt)
+ self.sched.statsd.incr(key + '.total_changes')
except Exception:
self.log.exception("Exception reporting pipeline stats")
diff --git a/zuul/model.py b/zuul/model.py
index 081d165..f3f358b 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -858,6 +858,7 @@
source_line=None,
inheritance_path=(),
parent_data=None,
+ description=None,
)
self.inheritable_attributes = {}
@@ -961,12 +962,12 @@
m = m.matchers[0]
if not isinstance(m, change_matcher.BranchMatcher):
return None
- return m._regex
+ return m
- def addBranchMatcher(self, branch):
+ def addImpliedBranchMatcher(self, branch):
# Add a branch matcher that combines as a boolean *and* with
# existing branch matchers, if any.
- matchers = [change_matcher.BranchMatcher(branch)]
+ matchers = [change_matcher.ImpliedBranchMatcher(branch)]
if self.branch_matcher:
matchers.append(self.branch_matcher)
self.branch_matcher = change_matcher.MatchAll(matchers)
@@ -1121,25 +1122,8 @@
joblist = self.jobs.setdefault(jobname, [])
for job in jobs:
if implied_branch:
- # If setting an implied branch and the current
- # branch matcher is a simple match for a different
- # branch, then simply do not add this job. If it
- # is absent, set it to the implied branch.
- # Otherwise, combine it with the implied branch to
- # ensure that it still only affects this branch
- # (whatever else it may do).
- simple_branch = job.getSimpleBranchMatcher()
- if simple_branch and simple_branch != implied_branch:
- # Job is for a different branch, don't add it.
- continue
- if not simple_branch:
- # The branch matcher could be complex, or
- # missing. Add our implied matcher.
- job = job.copy()
- job.addBranchMatcher(implied_branch)
- # Otherwise we have a simple branch matcher which
- # is the same as our implied branch, the job can
- # be added as-is.
+ job = job.copy()
+ job.addImpliedBranchMatcher(implied_branch)
if job not in joblist:
joblist.append(job)
@@ -1212,8 +1196,8 @@
if soft:
current_parent_jobs = set()
else:
- raise Exception("Dependent job %s not found: " %
- (dependent_job,))
+ raise Exception("Job %s depends on %s which was not run." %
+ (dependent_job, current_job))
new_parent_jobs = current_parent_jobs - all_parent_jobs
jobs_to_iterate |= new_parent_jobs
all_parent_jobs |= new_parent_jobs
@@ -1246,6 +1230,7 @@
self.worker = Worker()
self.node_labels = []
self.node_name = None
+ self.nodeset = None
def __repr__(self):
return ('<Build %s of %s on %s>' %
@@ -1878,7 +1863,7 @@
result = build.result
finger_url = build.url
# TODO(tobiash): add support for custom web root
- urlformat = 'static/stream.html?' \
+ urlformat = 'stream.html?' \
'uuid={build.uuid}&' \
'logfile=console.log'
if websocket_url:
@@ -2400,14 +2385,25 @@
r.semaphores = copy.deepcopy(self.semaphores)
return r
- def extend(self, conf):
+ def extend(self, conf, tenant=None):
if isinstance(conf, UnparsedTenantConfig):
self.pragmas.extend(conf.pragmas)
self.pipelines.extend(conf.pipelines)
self.jobs.extend(conf.jobs)
self.project_templates.extend(conf.project_templates)
for k, v in conf.projects.items():
- self.projects.setdefault(k, []).extend(v)
+ name = k
+ # If we have the tenant add the projects to
+ # the according canonical name instead of the given project
+ # name. If it is not found, it's ok to add this to the given
+ # name. We also don't need to throw the
+ # ProjectNotFoundException here as semantic validation occurs
+ # later where it will fail then.
+ if tenant is not None:
+ trusted, project = tenant.getProject(k)
+ if project is not None:
+ name = project.canonical_name
+ self.projects.setdefault(name, []).extend(v)
self.nodesets.extend(conf.nodesets)
self.secrets.extend(conf.secrets)
self.semaphores.extend(conf.semaphores)
diff --git a/zuul/rpclistener.py b/zuul/rpclistener.py
index 8c8c783..d40505e 100644
--- a/zuul/rpclistener.py
+++ b/zuul/rpclistener.py
@@ -21,6 +21,7 @@
import gear
from zuul import model
+from zuul.lib import encryption
from zuul.lib.config import get_default
@@ -58,6 +59,8 @@
self.worker.registerFunction("zuul:get_job_log_stream_address")
self.worker.registerFunction("zuul:tenant_list")
self.worker.registerFunction("zuul:status_get")
+ self.worker.registerFunction("zuul:job_list")
+ self.worker.registerFunction("zuul:key_get")
def getFunctions(self):
functions = {}
@@ -283,3 +286,25 @@
args = json.loads(job.arguments)
output = self.sched.formatStatusJSON(args.get("tenant"))
job.sendWorkComplete(output)
+
+ def handle_job_list(self, job):
+ args = json.loads(job.arguments)
+ tenant = self.sched.abide.tenants.get(args.get("tenant"))
+ output = []
+ for job_name in sorted(tenant.layout.jobs):
+ desc = None
+ for tenant_job in tenant.layout.jobs[job_name]:
+ if tenant_job.description:
+ desc = tenant_job.description.split('\n')[0]
+ break
+ output.append({"name": job_name,
+ "description": desc})
+ job.sendWorkComplete(json.dumps(output))
+
+ def handle_key_get(self, job):
+ args = json.loads(job.arguments)
+ source_name, project_name = args.get("source"), args.get("project")
+ source = self.sched.connections.getSource(source_name)
+ project = source.getProject(project_name)
+ job.sendWorkComplete(
+ encryption.serialize_rsa_public_key(project.public_key))
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index ed7d64b..7dee00d 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -595,10 +595,32 @@
"when reconfiguring" % name)
continue
self.log.debug("Re-enqueueing changes for pipeline %s" % name)
+ # TODO(jeblair): This supports an undocument and
+ # unanticipated hack to create a static window. If we
+ # really want to support this, maybe we should have a
+ # 'static' type? But it may be in use in the wild, so we
+ # should allow this at least until there's an upgrade
+ # path.
+ if (new_pipeline.window and
+ new_pipeline.window_increase_type == 'exponential' and
+ new_pipeline.window_decrease_type == 'exponential' and
+ new_pipeline.window_increase_factor == 1 and
+ new_pipeline.window_decrease_factor == 1):
+ static_window = True
+ else:
+ static_window = False
+ if old_pipeline.window and (not static_window):
+ new_pipeline.window = max(old_pipeline.window,
+ new_pipeline.window_floor)
items_to_remove = []
builds_to_cancel = []
last_head = None
for shared_queue in old_pipeline.queues:
+ # Attempt to keep window sizes from shrinking where possible
+ new_queue = new_pipeline.getQueue(shared_queue.projects[0])
+ if new_queue and shared_queue.window and (not static_window):
+ new_queue.window = max(shared_queue.window,
+ new_queue.window_floor)
for item in shared_queue.queue:
if not item.item_ahead:
last_head = item
@@ -650,6 +672,15 @@
self.log.exception(
"Exception while canceling build %s "
"for change %s" % (build, build.build_set.item.change))
+ # In the unlikely case that a build is removed and
+ # later added back, make sure we clear out the nodeset
+ # so it gets requested again.
+ try:
+ build.build_set.removeJobNodeSet(build.job.name)
+ except Exception:
+ self.log.exception(
+ "Exception while removing nodeset from build %s "
+ "for change %s" % (build, build.build_set.item.change))
finally:
tenant.semaphore_handler.release(
build.build_set.item, build.job)
@@ -920,7 +951,7 @@
# to pass this on to the pipeline manager, make sure we return
# the nodes to nodepool.
try:
- nodeset = build.build_set.getJobNodeSet(build.job.name)
+ nodeset = build.nodeset
autohold_key = (build.pipeline.layout.tenant.name,
build.build_set.item.change.project.canonical_name,
build.job.name)
diff --git a/zuul/web/__init__.py b/zuul/web/__init__.py
index 766a21d..e4a3612 100755
--- a/zuul/web/__init__.py
+++ b/zuul/web/__init__.py
@@ -20,11 +20,14 @@
import logging
import os
import time
+import urllib.parse
import uvloop
import aiohttp
from aiohttp import web
+from sqlalchemy.sql import select
+
import zuul.rpcclient
STATIC_DIR = os.path.join(os.path.dirname(__file__), 'static')
@@ -162,6 +165,8 @@
self.controllers = {
'tenant_list': self.tenant_list,
'status_get': self.status_get,
+ 'job_list': self.job_list,
+ 'key_get': self.key_get,
}
def tenant_list(self, request):
@@ -182,6 +187,18 @@
resp.last_modified = self.cache_time[tenant]
return resp
+ def job_list(self, request):
+ tenant = request.match_info["tenant"]
+ job = self.rpc.submitJob('zuul:job_list', {'tenant': tenant})
+ return web.json_response(json.loads(job.data[0]))
+
+ def key_get(self, request):
+ source = request.match_info["source"]
+ project = request.match_info["project"]
+ job = self.rpc.submitJob('zuul:key_get', {'source': source,
+ 'project': project})
+ return web.Response(body=job.data[0])
+
async def processRequest(self, request, action):
try:
resp = self.controllers[action](request)
@@ -194,6 +211,92 @@
return resp
+class SqlHandler(object):
+ log = logging.getLogger("zuul.web.SqlHandler")
+ filters = ("project", "pipeline", "change", "patchset", "ref",
+ "result", "uuid", "job_name", "voting", "node_name", "newrev")
+
+ def __init__(self, connection):
+ self.connection = connection
+
+ def query(self, args):
+ build = self.connection.zuul_build_table
+ buildset = self.connection.zuul_buildset_table
+ query = select([
+ buildset.c.project,
+ buildset.c.pipeline,
+ buildset.c.change,
+ buildset.c.patchset,
+ buildset.c.ref,
+ buildset.c.newrev,
+ buildset.c.ref_url,
+ build.c.result,
+ build.c.uuid,
+ build.c.job_name,
+ build.c.voting,
+ build.c.node_name,
+ build.c.start_time,
+ build.c.end_time,
+ build.c.log_url]).select_from(build.join(buildset))
+ for table in ('build', 'buildset'):
+ for k, v in args['%s_filters' % table].items():
+ if table == 'build':
+ column = build.c
+ else:
+ column = buildset.c
+ query = query.where(getattr(column, k).in_(v))
+ return query.limit(args['limit']).offset(args['skip']).order_by(
+ build.c.id.desc())
+
+ def get_builds(self, args):
+ """Return a list of build"""
+ builds = []
+ with self.connection.engine.begin() as conn:
+ query = self.query(args)
+ for row in conn.execute(query):
+ build = dict(row)
+ # Convert date to iso format
+ if row.start_time:
+ build['start_time'] = row.start_time.strftime(
+ '%Y-%m-%dT%H:%M:%S')
+ if row.end_time:
+ build['end_time'] = row.end_time.strftime(
+ '%Y-%m-%dT%H:%M:%S')
+ # Compute run duration
+ if row.start_time and row.end_time:
+ build['duration'] = (row.end_time -
+ row.start_time).total_seconds()
+ builds.append(build)
+ return builds
+
+ async def processRequest(self, request):
+ try:
+ args = {
+ 'buildset_filters': {},
+ 'build_filters': {},
+ 'limit': 50,
+ 'skip': 0,
+ }
+ for k, v in urllib.parse.parse_qsl(request.rel_url.query_string):
+ if k in ("tenant", "project", "pipeline", "change",
+ "patchset", "ref", "newrev"):
+ args['buildset_filters'].setdefault(k, []).append(v)
+ elif k in ("uuid", "job_name", "voting", "node_name",
+ "result"):
+ args['build_filters'].setdefault(k, []).append(v)
+ elif k in ("limit", "skip"):
+ args[k] = int(v)
+ else:
+ raise ValueError("Unknown parameter %s" % k)
+ data = self.get_builds(args)
+ resp = web.json_response(data)
+ except Exception as e:
+ self.log.exception("Jobs exception:")
+ resp = web.json_response({'error_description': 'Internal error'},
+ status=500)
+ return resp
+
+
class ZuulWeb(object):
log = logging.getLogger("zuul.web.ZuulWeb")
@@ -201,7 +304,8 @@
def __init__(self, listen_address, listen_port,
gear_server, gear_port,
ssl_key=None, ssl_cert=None, ssl_ca=None,
- static_cache_expiry=3600):
+ static_cache_expiry=3600,
+ sql_connection=None):
self.listen_address = listen_address
self.listen_port = listen_port
self.event_loop = None
@@ -212,6 +316,10 @@
ssl_key, ssl_cert, ssl_ca)
self.log_streaming_handler = LogStreamingHandler(self.rpc)
self.gearman_handler = GearmanHandler(self.rpc)
+ if sql_connection:
+ self.sql_handler = SqlHandler(sql_connection)
+ else:
+ self.sql_handler = None
async def _handleWebsocket(self, request):
return await self.log_streaming_handler.processRequest(
@@ -224,12 +332,27 @@
async def _handleStatusRequest(self, request):
return await self.gearman_handler.processRequest(request, 'status_get')
+ async def _handleJobsRequest(self, request):
+ return await self.gearman_handler.processRequest(request, 'job_list')
+
+ async def _handleSqlRequest(self, request):
+ return await self.sql_handler.processRequest(request)
+
+ async def _handleKeyRequest(self, request):
+ return await self.gearman_handler.processRequest(request, 'key_get')
+
async def _handleStaticRequest(self, request):
fp = None
if request.path.endswith("tenants.html") or request.path.endswith("/"):
fp = os.path.join(STATIC_DIR, "index.html")
elif request.path.endswith("status.html"):
fp = os.path.join(STATIC_DIR, "status.html")
+ elif request.path.endswith("jobs.html"):
+ fp = os.path.join(STATIC_DIR, "jobs.html")
+ elif request.path.endswith("builds.html"):
+ fp = os.path.join(STATIC_DIR, "builds.html")
+ elif request.path.endswith("stream.html"):
+ fp = os.path.join(STATIC_DIR, "stream.html")
headers = {}
if self.static_cache_expiry:
headers['Cache-Control'] = "public, max-age=%d" % \
@@ -248,14 +371,24 @@
is run within a separate (non-main) thread.
"""
routes = [
- ('GET', '/console-stream', self._handleWebsocket),
('GET', '/tenants.json', self._handleTenantsRequest),
('GET', '/{tenant}/status.json', self._handleStatusRequest),
+ ('GET', '/{tenant}/jobs.json', self._handleJobsRequest),
+ ('GET', '/{tenant}/console-stream', self._handleWebsocket),
+ ('GET', '/{source}/{project}.pub', self._handleKeyRequest),
('GET', '/{tenant}/status.html', self._handleStaticRequest),
+ ('GET', '/{tenant}/jobs.html', self._handleStaticRequest),
+ ('GET', '/{tenant}/stream.html', self._handleStaticRequest),
('GET', '/tenants.html', self._handleStaticRequest),
('GET', '/', self._handleStaticRequest),
]
+ if self.sql_handler:
+ routes.append(('GET', '/{tenant}/builds.json',
+ self._handleSqlRequest))
+ routes.append(('GET', '/{tenant}/builds.html',
+ self._handleStaticRequest))
+
self.log.debug("ZuulWeb starting")
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
user_supplied_loop = loop is not None
diff --git a/zuul/web/static/README b/zuul/web/static/README
index f17ea5f..e924dc7 100644
--- a/zuul/web/static/README
+++ b/zuul/web/static/README
@@ -50,8 +50,7 @@
</Directory>
# Console-stream needs a special proxy-pass for websocket
- ProxyPass /console-stream ws://localhost:9000/console-stream nocanon retry=0
- ProxyPassReverse /console-stream ws://localhost:9000/console-stream
+ ProxyPassMatch /(.*)/console-stream ws://localhost:9000/$1/console-stream nocanon retry=0
# Then only the json calls are sent to the zuul-web endpoints
ProxyPassMatch ^/(.*.json)$ http://localhost:9000/$1 nocanon retry=0
diff --git a/zuul/web/static/builds.html b/zuul/web/static/builds.html
new file mode 100644
index 0000000..921c9e2
--- /dev/null
+++ b/zuul/web/static/builds.html
@@ -0,0 +1,84 @@
+<!--
+Copyright 2017 Red Hat
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may
+not use this file except in compliance with the License. You may obtain
+a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations
+under the License.
+-->
+<!DOCTYPE html>
+<html>
+<head>
+ <title>Zuul Builds</title>
+ <link rel="stylesheet" href="/static/bootstrap/css/bootstrap.min.css">
+ <link rel="stylesheet" href="../static/styles/zuul.css" />
+ <script src="/static/js/jquery.min.js"></script>
+ <script src="/static/js/angular.min.js"></script>
+ <script src="../static/javascripts/zuul.angular.js"></script>
+</head>
+<body ng-app="zuulBuilds" ng-controller="mainController"><div class="container-fluid">
+ <nav class="navbar navbar-default">
+ <div class="container-fluid">
+ <div class="navbar-header">
+ <a class="navbar-brand" href="../" target="_self">Zuul Dashboard</a>
+ </div>
+ <ul class="nav navbar-nav">
+ <li><a href="status.html" target="_self">Status</a></li>
+ <li><a href="jobs.html" target="_self">Jobs</a></li>
+ <li class="active"><a href="builds.html" target="_self">Builds</a></li>
+ </ul>
+ <span style="float: right; margin-top: 7px;">
+ <form ng-submit="builds_fetch()">
+ <label>Pipeline:</label>
+ <input name="pipeline" ng-model="pipeline" />
+ <label>Job:</label>
+ <input name="job_name" ng-model="job_name" />
+ <label>Project:</label>
+ <input name="project" ng-model="project" />
+ <input type="submit" value="Refresh" />
+ </form>
+ </span>
+ </div>
+ </nav>
+ <table class="table table-hover table-condensed">
+ <thead>
+ <tr>
+ <th width="20px">id</th>
+ <th>Job</th>
+ <th>Project</th>
+ <th>Pipeline</th>
+ <th>Change</th>
+ <th>Newrev</th>
+ <th>Duration</th>
+ <th>Log url</th>
+ <th>Node name</th>
+ <th>Start time</th>
+ <th>End time</th>
+ <th>Result</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr ng-repeat="build in builds" ng-class="rowClass(build)">
+ <td>{{ build.id }}</td>
+ <td>{{ build.job_name }}</td>
+ <td>{{ build.project }}</td>
+ <td>{{ build.pipeline }}</td>
+ <td><a href="{{ build.ref_url }}" target="_self">change</a></td>
+ <td>{{ build.newrev }}</td>
+ <td>{{ build.duration }} seconds</td>
+ <td><a ng-if="build.log_url" href="{{ build.log_url }}" target="_self">logs</a></td>
+ <td>{{ build.node_name }}</td>
+ <td>{{ build.start_time }}</td>
+ <td>{{ build.end_time }}</td>
+ <td>{{ build.result }}</td>
+ </tr>
+ </tbody>
+ </table>
+</div></body></html>
diff --git a/zuul/web/static/javascripts/zuul.angular.js b/zuul/web/static/javascripts/zuul.angular.js
index 3152fc0..87cbbdd 100644
--- a/zuul/web/static/javascripts/zuul.angular.js
+++ b/zuul/web/static/javascripts/zuul.angular.js
@@ -30,3 +30,70 @@
}
$scope.tenants_fetch();
});
+
+angular.module('zuulJobs', []).controller(
+ 'mainController', function($scope, $http)
+{
+ $scope.jobs = undefined;
+ $scope.jobs_fetch = function() {
+ $http.get("jobs.json")
+ .then(function success(result) {
+ $scope.jobs = result.data;
+ });
+ }
+ $scope.jobs_fetch();
+});
+
+angular.module('zuulBuilds', [], function($locationProvider) {
+ $locationProvider.html5Mode({
+ enabled: true,
+ requireBase: false
+ });
+}).controller('mainController', function($scope, $http, $location)
+{
+ $scope.rowClass = function(build) {
+ if (build.result == "SUCCESS") {
+ return "success";
+ } else {
+ return "warning";
+ }
+ };
+ var query_args = $location.search();
+ var url = $location.url();
+ var tenant_start = url.lastIndexOf(
+ '/', url.lastIndexOf('/builds.html') - 1) + 1;
+ var tenant_length = url.lastIndexOf('/builds.html') - tenant_start;
+ $scope.tenant = url.substr(tenant_start, tenant_length);
+ $scope.builds = undefined;
+ if (query_args["pipeline"]) {$scope.pipeline = query_args["pipeline"];
+ } else {$scope.pipeline = "";}
+ if (query_args["job_name"]) {$scope.job_name = query_args["job_name"];
+ } else {$scope.job_name = "";}
+ if (query_args["project"]) {$scope.project = query_args["project"];
+ } else {$scope.project = "";}
+ $scope.builds_fetch = function() {
+ query_string = "";
+ if ($scope.tenant) {query_string += "&tenant="+$scope.tenant;}
+ if ($scope.pipeline) {query_string += "&pipeline="+$scope.pipeline;}
+ if ($scope.job_name) {query_string += "&job_name="+$scope.job_name;}
+ if ($scope.project) {query_string += "&project="+$scope.project;}
+ if (query_string != "") {query_string = "?" + query_string.substr(1);}
+ $http.get("builds.json" + query_string)
+ .then(function success(result) {
+ for (build_pos = 0;
+ build_pos < result.data.length;
+ build_pos += 1) {
+ build = result.data[build_pos]
+ if (build.node_name == null) {
+ build.node_name = 'master'
+ }
+ /* Fix incorect url for post_failure job */
+ if (build.log_url == build.job_name) {
+ build.log_url = undefined;
+ }
+ }
+ $scope.builds = result.data;
+ });
+ }
+ $scope.builds_fetch()
+});
diff --git a/zuul/web/static/jobs.html b/zuul/web/static/jobs.html
new file mode 100644
index 0000000..6946723
--- /dev/null
+++ b/zuul/web/static/jobs.html
@@ -0,0 +1,55 @@
+<!--
+Copyright 2017 Red Hat
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may
+not use this file except in compliance with the License. You may obtain
+a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations
+under the License.
+-->
+<!DOCTYPE html>
+<html>
+<head>
+ <title>Zuul Builds</title>
+ <link rel="stylesheet" href="/static/bootstrap/css/bootstrap.min.css">
+ <link rel="stylesheet" href="../static/styles/zuul.css" />
+ <script src="/static/js/jquery.min.js"></script>
+ <script src="/static/js/angular.min.js"></script>
+ <script src="../static/javascripts/zuul.angular.js"></script>
+</head>
+<body ng-app="zuulJobs" ng-controller="mainController"><div class="container-fluid">
+ <nav class="navbar navbar-default">
+ <div class="container-fluid">
+ <div class="navbar-header">
+ <a class="navbar-brand" href="../" target="_self">Zuul Dashboard</a>
+ </div>
+ <ul class="nav navbar-nav">
+ <li><a href="status.html" target="_self">Status</a></li>
+ <li class="active"><a href="jobs.html" target="_self">Jobs</a></li>
+ <li><a href="builds.html" target="_self">Builds</a></li>
+ </ul>
+ </div>
+ </nav>
+ <table class="table table-hover table-condensed">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Description</th>
+ <th>Last builds</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr ng-repeat="job in jobs">
+ <td>{{ job.name }}</td>
+ <td>{{ job.description }}</td>
+ <td><a href="builds.html?job_name={{ job.name }}">builds</a></td>
+ </tr>
+ </tbody>
+ </table>
+</div></body></html>
diff --git a/zuul/web/static/stream.html b/zuul/web/static/stream.html
index dbeb66b..f2e7081 100644
--- a/zuul/web/static/stream.html
+++ b/zuul/web/static/stream.html
@@ -73,7 +73,7 @@
} else {
protocol = 'ws://';
}
- path = url['pathname'].replace(/static\/.*$/g, '') + 'console-stream';
+ path = url['pathname'].replace(/stream.html.*$/g, '') + 'console-stream';
params['websocket_url'] = protocol + url['host'] + path;
}
var ws = new WebSocket(params['websocket_url']);