Merge "Add support for shared ansible_host in inventory" into feature/zuulv3
diff --git a/doc/source/admin/components.rst b/doc/source/admin/components.rst
index b20aba7..b3c2e44 100644
--- a/doc/source/admin/components.rst
+++ b/doc/source/admin/components.rst
@@ -601,6 +601,12 @@
Base URL on which the websocket service is exposed, if different
than the base URL of the web app.
+ .. attr:: static_cache_expiry
+ :default: 3600
+
+ The Cache-Control max-age response header value for static files served
+ by the zuul-web. Set to 0 during development to disable Cache-Control.
+
Operation
~~~~~~~~~
diff --git a/doc/source/user/config.rst b/doc/source/user/config.rst
index edd3222..96e55a8 100644
--- a/doc/source/user/config.rst
+++ b/doc/source/user/config.rst
@@ -658,8 +658,17 @@
* In the case of a job variant defined within a
:ref:`project-template`, if no branch specifier appears, the
- implied branch specifier for the :ref:`project` definition which
- uses the project-template will be used.
+ implied branch containing the project-template definition is
+ used as an implied branch specifier. This means that
+ definitions of the same project-template on different branches
+ may run different jobs.
+
+ When that project-template is used by a :ref:`project`
+ definition within a :term:`untrusted-project`, the branch
+ containing that project definition is combined with the branch
+ specifier of the project-template. This means it is possible
+ for a project to use a template on one branch, but not on
+ another.
This allows for the very simple and expected workflow where if a
project defines a job on the ``master`` branch with no branch
diff --git a/doc/source/user/jobs.rst b/doc/source/user/jobs.rst
index f65ee19..ae720d8 100644
--- a/doc/source/user/jobs.rst
+++ b/doc/source/user/jobs.rst
@@ -172,6 +172,12 @@
The git ref of the item. This will be the full path (e.g.,
`refs/heads/master` or `refs/changes/...`).
+ .. var:: override_checkout
+
+ If the job was configured to override the branch or tag checked
+ out, this will contain the specified value. Otherwise, this
+ variable will be undefined.
+
.. var:: pipeline
The name of the pipeline in which the job is being run.
@@ -214,14 +220,15 @@
`src/git.example.com/org/project`.
.. var:: projects
- :type: list
+ :type: dict
A list of all projects prepared by Zuul for the item. It
includes, at least, the item's own project. It also includes
the projects of any items this item depends on, as well as the
projects that appear in :attr:`job.required-projects`.
- This is a list of dictionaries, with each element consisting of:
+ This is a dictionary of dictionaries. Each value has a key of
+ the `canonical_name`, then each entry consists of:
.. var:: name
@@ -252,6 +259,26 @@
A boolean indicating whether this project appears in the
:attr:`job.required-projects` list for this job.
+ .. var:: checkout
+
+ The branch or tag that Zuul checked out for this project.
+ This may be influenced by the branch or tag associated with
+ the item as well as the job configuration.
+
+ For example, to access the source directory of a single known
+ project, you might use::
+
+ {{ zuul.projects['git.example.com/org/project'].src_dir }}
+
+ To iterate over the project list, you might write a task
+ something like::
+
+ - name: Sample project iteration
+ debug:
+ msg: "Project {{ item.name }} is at {{ item.src_dir }}
+ with_items: {{ zuul.projects.values() | list }}
+
+
.. var:: _projects
:type: dict
diff --git a/etc/zuul.conf-sample b/etc/zuul.conf-sample
index 76494ad..17092af 100644
--- a/etc/zuul.conf-sample
+++ b/etc/zuul.conf-sample
@@ -37,6 +37,8 @@
[web]
listen_address=127.0.0.1
port=9000
+static_cache_expiry=0
+;sql_connection_name=mydatabase
[webapp]
listen_address=0.0.0.0
diff --git a/playbooks/zuul-stream/templates/ansible.cfg.j2 b/playbooks/zuul-stream/templates/ansible.cfg.j2
index 24f459e..41ffc0c 100644
--- a/playbooks/zuul-stream/templates/ansible.cfg.j2
+++ b/playbooks/zuul-stream/templates/ansible.cfg.j2
@@ -1,5 +1,5 @@
[defaults]
-hostfile = {{ ansible_user_dir }}/inventory.yaml
+inventory = {{ ansible_user_dir }}/inventory.yaml
gathering = smart
gather_subset = !all
lookup_plugins = {{ ansible_user_dir }}/src/git.openstack.org/openstack-infra/zuul/zuul/ansible/lookup
diff --git a/tests/base.py b/tests/base.py
index a683426..210f03b 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -486,6 +486,29 @@
self.changes[self.change_number] = c
return c
+ def addFakeTag(self, project, branch, tag):
+ path = os.path.join(self.upstream_root, project)
+ repo = git.Repo(path)
+ commit = repo.heads[branch].commit
+ newrev = commit.hexsha
+ ref = 'refs/tags/' + tag
+
+ git.Tag.create(repo, tag, commit)
+
+ event = {
+ "type": "ref-updated",
+ "submitter": {
+ "name": "User Name",
+ },
+ "refUpdate": {
+ "oldRev": 40 * '0',
+ "newRev": newrev,
+ "refName": ref,
+ "project": project,
+ }
+ }
+ return event
+
def getFakeBranchCreatedEvent(self, project, branch):
path = os.path.join(self.upstream_root, project)
repo = git.Repo(path)
@@ -1713,6 +1736,8 @@
image_id=None,
host_keys=["fake-key1", "fake-key2"],
executor='fake-nodepool')
+ if 'fakeuser' in node_type:
+ data['username'] = 'fakeuser'
data = json.dumps(data).encode('utf8')
path = self.client.create(path, data,
makepath=True,
diff --git a/tests/fixtures/config/branch-negative/git/org_project/.zuul.yaml b/tests/fixtures/config/branch-negative/git/org_project/.zuul.yaml
new file mode 100644
index 0000000..f02f449
--- /dev/null
+++ b/tests/fixtures/config/branch-negative/git/org_project/.zuul.yaml
@@ -0,0 +1,10 @@
+- job:
+ name: test-job
+ run: playbooks/test-job.yaml
+
+- project:
+ name: org/project
+ check:
+ jobs:
+ - test-job:
+ branches: ^(?!stable)
diff --git a/tests/fixtures/config/branch-negative/git/org_project/README b/tests/fixtures/config/branch-negative/git/org_project/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/branch-negative/git/org_project/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/branch-negative/git/org_project/playbooks/test-job.yaml b/tests/fixtures/config/branch-negative/git/org_project/playbooks/test-job.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/branch-negative/git/org_project/playbooks/test-job.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/branch-negative/git/project-config/zuul.yaml b/tests/fixtures/config/branch-negative/git/project-config/zuul.yaml
new file mode 100644
index 0000000..dc4a182
--- /dev/null
+++ b/tests/fixtures/config/branch-negative/git/project-config/zuul.yaml
@@ -0,0 +1,26 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ Verified: 1
+ failure:
+ gerrit:
+ Verified: -1
+
+- job:
+ name: base
+ parent: null
+
+- project:
+ name: project-config
+ check:
+ jobs: []
+
+- project:
+ name: org/project
+ check:
+ jobs: []
diff --git a/tests/fixtures/config/branch-negative/main.yaml b/tests/fixtures/config/branch-negative/main.yaml
new file mode 100644
index 0000000..0ac232f
--- /dev/null
+++ b/tests/fixtures/config/branch-negative/main.yaml
@@ -0,0 +1,8 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - project-config
+ untrusted-projects:
+ - org/project
diff --git a/tests/fixtures/config/branch-tag/git/org_project/.zuul.yaml b/tests/fixtures/config/branch-tag/git/org_project/.zuul.yaml
new file mode 100644
index 0000000..acbba6c
--- /dev/null
+++ b/tests/fixtures/config/branch-tag/git/org_project/.zuul.yaml
@@ -0,0 +1,9 @@
+- job:
+ name: test-job
+ run: playbooks/test-job.yaml
+
+- project:
+ name: org/project
+ tag:
+ jobs:
+ - test-job
diff --git a/tests/fixtures/config/branch-tag/git/org_project/README b/tests/fixtures/config/branch-tag/git/org_project/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/branch-tag/git/org_project/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/branch-tag/git/org_project/playbooks/test-job.yaml b/tests/fixtures/config/branch-tag/git/org_project/playbooks/test-job.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/branch-tag/git/org_project/playbooks/test-job.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/branch-tag/git/project-config/zuul.yaml b/tests/fixtures/config/branch-tag/git/project-config/zuul.yaml
new file mode 100644
index 0000000..0ae6396
--- /dev/null
+++ b/tests/fixtures/config/branch-tag/git/project-config/zuul.yaml
@@ -0,0 +1,21 @@
+- pipeline:
+ name: tag
+ manager: independent
+ trigger:
+ gerrit:
+ - event: ref-updated
+ ref: ^refs/tags/.*$
+
+- job:
+ name: base
+ parent: null
+
+- project:
+ name: project-config
+ tag:
+ jobs: []
+
+- project:
+ name: org/project
+ tag:
+ jobs: []
diff --git a/tests/fixtures/config/branch-tag/main.yaml b/tests/fixtures/config/branch-tag/main.yaml
new file mode 100644
index 0000000..0ac232f
--- /dev/null
+++ b/tests/fixtures/config/branch-tag/main.yaml
@@ -0,0 +1,8 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - project-config
+ untrusted-projects:
+ - org/project
diff --git a/tests/fixtures/config/branch-templates/git/project-config/zuul.yaml b/tests/fixtures/config/branch-templates/git/project-config/zuul.yaml
new file mode 100644
index 0000000..ce08877
--- /dev/null
+++ b/tests/fixtures/config/branch-templates/git/project-config/zuul.yaml
@@ -0,0 +1,26 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ Verified: 1
+ failure:
+ gerrit:
+ Verified: -1
+
+- job:
+ name: base
+ parent: null
+
+- project:
+ name: project-config
+ check:
+ jobs: []
+
+- project:
+ name: puppet-integration
+ check:
+ jobs: []
diff --git a/tests/fixtures/config/branch-templates/git/puppet-integration/.zuul.yaml b/tests/fixtures/config/branch-templates/git/puppet-integration/.zuul.yaml
new file mode 100644
index 0000000..dfea632
--- /dev/null
+++ b/tests/fixtures/config/branch-templates/git/puppet-integration/.zuul.yaml
@@ -0,0 +1,25 @@
+- job:
+ name: puppet-unit-base
+ run: playbooks/run-unit-tests.yaml
+
+- job:
+ name: puppet-unit-3.8
+ parent: puppet-unit-base
+ branches: ^(stable/(newton|ocata)).*$
+ vars:
+ puppet_gem_version: 3.8
+
+- job:
+ name: puppet-something
+ run: playbooks/run-unit-tests.yaml
+
+- project-template:
+ name: puppet-unit
+ check:
+ jobs:
+ - puppet-unit-3.8
+
+- project:
+ name: puppet-integration
+ templates:
+ - puppet-unit
diff --git a/tests/fixtures/config/branch-templates/git/puppet-integration/README b/tests/fixtures/config/branch-templates/git/puppet-integration/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/branch-templates/git/puppet-integration/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/branch-templates/git/puppet-integration/playbooks/run-unit-tests.yaml b/tests/fixtures/config/branch-templates/git/puppet-integration/playbooks/run-unit-tests.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/branch-templates/git/puppet-integration/playbooks/run-unit-tests.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/branch-templates/git/puppet-tripleo/.zuul.yaml b/tests/fixtures/config/branch-templates/git/puppet-tripleo/.zuul.yaml
new file mode 100644
index 0000000..4be8146
--- /dev/null
+++ b/tests/fixtures/config/branch-templates/git/puppet-tripleo/.zuul.yaml
@@ -0,0 +1,4 @@
+- project:
+ name: puppet-tripleo
+ templates:
+ - puppet-unit
diff --git a/tests/fixtures/config/branch-templates/git/puppet-tripleo/README b/tests/fixtures/config/branch-templates/git/puppet-tripleo/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/branch-templates/git/puppet-tripleo/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/branch-templates/main.yaml b/tests/fixtures/config/branch-templates/main.yaml
new file mode 100644
index 0000000..f7677a3
--- /dev/null
+++ b/tests/fixtures/config/branch-templates/main.yaml
@@ -0,0 +1,9 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - project-config
+ untrusted-projects:
+ - puppet-integration
+ - puppet-tripleo
diff --git a/tests/fixtures/config/inventory/git/common-config/playbooks/hostvars-inventory.yaml b/tests/fixtures/config/inventory/git/common-config/playbooks/hostvars-inventory.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/common-config/playbooks/hostvars-inventory.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/inventory/git/common-config/zuul.yaml b/tests/fixtures/config/inventory/git/common-config/zuul.yaml
index e5e2ba0..ad530a7 100644
--- a/tests/fixtures/config/inventory/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/inventory/git/common-config/zuul.yaml
@@ -31,6 +31,14 @@
- compute1
- compute2
+- nodeset:
+ name: nodeset2
+ nodes:
+ - name: default
+ label: default-label
+ - name: fakeuser
+ label: fakeuser-label
+
- job:
name: base
parent: null
@@ -57,3 +65,8 @@
name: group-inventory
nodeset: nodeset1
run: playbooks/group-inventory.yaml
+
+- job:
+ name: hostvars-inventory
+ run: playbooks/hostvars-inventory.yaml
+ nodeset: nodeset2
diff --git a/tests/fixtures/config/inventory/git/org_project/.zuul.yaml b/tests/fixtures/config/inventory/git/org_project/.zuul.yaml
index 689d771..6a29049 100644
--- a/tests/fixtures/config/inventory/git/org_project/.zuul.yaml
+++ b/tests/fixtures/config/inventory/git/org_project/.zuul.yaml
@@ -5,3 +5,4 @@
- single-inventory
- single-inventory-list
- group-inventory
+ - hostvars-inventory
diff --git a/tests/fixtures/config/tenant-parser/git/common-config/zuul.yaml b/tests/fixtures/config/tenant-parser/git/common-config/zuul.yaml
index e21f967..a28ef54 100644
--- a/tests/fixtures/config/tenant-parser/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/tenant-parser/git/common-config/zuul.yaml
@@ -18,8 +18,10 @@
- job:
name: common-config-job
+# Use the canonical name here. This should be merged with the org/project1 in
+# the other repo.
- project:
- name: org/project1
+ name: review.example.com/org/project1
check:
jobs:
- common-config-job
diff --git a/tests/fixtures/config/zuultrigger/project-change-merged/main.yaml b/tests/fixtures/config/zuultrigger/project-change-merged/main.yaml
index 9d01f54..208e274 100644
--- a/tests/fixtures/config/zuultrigger/project-change-merged/main.yaml
+++ b/tests/fixtures/config/zuultrigger/project-change-merged/main.yaml
@@ -4,3 +4,5 @@
gerrit:
config-projects:
- common-config
+ untrusted-projects:
+ - org/project
diff --git a/tests/fixtures/layouts/reconfigure-remove-add.yaml b/tests/fixtures/layouts/reconfigure-remove-add.yaml
new file mode 100644
index 0000000..c9bccd3
--- /dev/null
+++ b/tests/fixtures/layouts/reconfigure-remove-add.yaml
@@ -0,0 +1,41 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ start:
+ gerrit:
+ Verified: 0
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+
+- job:
+ name: base
+ parent: null
+ nodeset:
+ nodes:
+ - label: ubuntu-xenial
+ name: controller
+
+- job:
+ name: job1
+ run: playbooks/job1.yaml
+
+- job:
+ name: job2
+ run: playbooks/job2.yaml
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - job1
+ - job2
diff --git a/tests/fixtures/layouts/reconfigure-remove-add2.yaml b/tests/fixtures/layouts/reconfigure-remove-add2.yaml
new file mode 100644
index 0000000..33c169e
--- /dev/null
+++ b/tests/fixtures/layouts/reconfigure-remove-add2.yaml
@@ -0,0 +1,40 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ start:
+ gerrit:
+ Verified: 0
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+
+- job:
+ name: base
+ parent: null
+ nodeset:
+ nodes:
+ - label: ubuntu-xenial
+ name: controller
+
+- job:
+ name: job1
+ run: playbooks/job1.yaml
+
+- job:
+ name: job2
+ run: playbooks/job2.yaml
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - job1
diff --git a/tests/fixtures/layouts/reconfigure-window-fixed.yaml b/tests/fixtures/layouts/reconfigure-window-fixed.yaml
new file mode 100644
index 0000000..9aa1a97
--- /dev/null
+++ b/tests/fixtures/layouts/reconfigure-window-fixed.yaml
@@ -0,0 +1,46 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ start:
+ gerrit:
+ Verified: 0
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+ window: 2
+ window-increase-type: exponential
+ window-increase-factor: 1
+ window-decrease-type: exponential
+ window-decrease-factor: 1
+
+- job:
+ name: base
+ parent: null
+ nodeset:
+ nodes:
+ - label: ubuntu-xenial
+ name: controller
+
+- job:
+ name: job1
+ run: playbooks/job1.yaml
+
+- job:
+ name: job2
+ run: playbooks/job2.yaml
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - job1
+ - job2
diff --git a/tests/fixtures/layouts/reconfigure-window-fixed2.yaml b/tests/fixtures/layouts/reconfigure-window-fixed2.yaml
new file mode 100644
index 0000000..13382c5
--- /dev/null
+++ b/tests/fixtures/layouts/reconfigure-window-fixed2.yaml
@@ -0,0 +1,46 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ start:
+ gerrit:
+ Verified: 0
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+ window: 1
+ window-increase-type: exponential
+ window-increase-factor: 1
+ window-decrease-type: exponential
+ window-decrease-factor: 1
+
+- job:
+ name: base
+ parent: null
+ nodeset:
+ nodes:
+ - label: ubuntu-xenial
+ name: controller
+
+- job:
+ name: job1
+ run: playbooks/job1.yaml
+
+- job:
+ name: job2
+ run: playbooks/job2.yaml
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - job1
+ - job2
diff --git a/tests/fixtures/layouts/reconfigure-window.yaml b/tests/fixtures/layouts/reconfigure-window.yaml
new file mode 100644
index 0000000..c9bccd3
--- /dev/null
+++ b/tests/fixtures/layouts/reconfigure-window.yaml
@@ -0,0 +1,41 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ start:
+ gerrit:
+ Verified: 0
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+
+- job:
+ name: base
+ parent: null
+ nodeset:
+ nodes:
+ - label: ubuntu-xenial
+ name: controller
+
+- job:
+ name: job1
+ run: playbooks/job1.yaml
+
+- job:
+ name: job2
+ run: playbooks/job2.yaml
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - job1
+ - job2
diff --git a/tests/fixtures/layouts/reconfigure-window2.yaml b/tests/fixtures/layouts/reconfigure-window2.yaml
new file mode 100644
index 0000000..8949f7d
--- /dev/null
+++ b/tests/fixtures/layouts/reconfigure-window2.yaml
@@ -0,0 +1,47 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ start:
+ gerrit:
+ Verified: 0
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+ window: 1
+ window-floor: 1
+ window-increase-type: linear
+ window-increase-factor: 1
+ window-decrease-type: linear
+ window-decrease-factor: 1
+
+- job:
+ name: base
+ parent: null
+ nodeset:
+ nodes:
+ - label: ubuntu-xenial
+ name: controller
+
+- job:
+ name: job1
+ run: playbooks/job1.yaml
+
+- job:
+ name: job2
+ run: playbooks/job2.yaml
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - job1
+ - job2
diff --git a/tests/unit/test_inventory.py b/tests/unit/test_inventory.py
index 71cb05e..1c41f5f 100644
--- a/tests/unit/test_inventory.py
+++ b/tests/unit/test_inventory.py
@@ -100,3 +100,24 @@
self.executor_server.release()
self.waitUntilSettled()
+
+ def test_hostvars_inventory(self):
+
+ inventory = self._get_build_inventory('hostvars-inventory')
+
+ all_nodes = ('default', 'fakeuser')
+ self.assertIn('all', inventory)
+ self.assertIn('hosts', inventory['all'])
+ self.assertIn('vars', inventory['all'])
+ for node_name in all_nodes:
+ self.assertIn(node_name, inventory['all']['hosts'])
+ # check if the nodes use the correct username
+ if node_name == 'fakeuser':
+ username = 'fakeuser'
+ else:
+ username = 'zuul'
+ self.assertEqual(
+ inventory['all']['hosts'][node_name]['ansible_user'], username)
+
+ self.executor_server.release()
+ self.waitUntilSettled()
diff --git a/tests/unit/test_log_streamer.py b/tests/unit/test_log_streamer.py
index c808540..27368e3 100644
--- a/tests/unit/test_log_streamer.py
+++ b/tests/unit/test_log_streamer.py
@@ -158,7 +158,7 @@
def runWSClient(self, build_uuid, event):
async def client(loop, build_uuid, event):
- uri = 'http://[::1]:9000/console-stream'
+ uri = 'http://[::1]:9000/tenant-one/console-stream'
try:
session = aiohttp.ClientSession(loop=loop)
async with session.ws_connect(uri) as ws:
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index 53a20ff..aacc81e 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -2581,7 +2581,7 @@
self.assertEqual('project-merge', status_jobs[0]['name'])
# TODO(mordred) pull uuids from self.builds
self.assertEqual(
- 'static/stream.html?uuid={uuid}&logfile=console.log'.format(
+ 'stream.html?uuid={uuid}&logfile=console.log'.format(
uuid=status_jobs[0]['uuid']),
status_jobs[0]['url'])
self.assertEqual(
@@ -2597,7 +2597,7 @@
status_jobs[0]['report_url'])
self.assertEqual('project-test1', status_jobs[1]['name'])
self.assertEqual(
- 'static/stream.html?uuid={uuid}&logfile=console.log'.format(
+ 'stream.html?uuid={uuid}&logfile=console.log'.format(
uuid=status_jobs[1]['uuid']),
status_jobs[1]['url'])
self.assertEqual(
@@ -2613,7 +2613,7 @@
self.assertEqual('project-test2', status_jobs[2]['name'])
self.assertEqual(
- 'static/stream.html?uuid={uuid}&logfile=console.log'.format(
+ 'stream.html?uuid={uuid}&logfile=console.log'.format(
uuid=status_jobs[2]['uuid']),
status_jobs[2]['url'])
self.assertEqual(
@@ -3853,6 +3853,145 @@
self.assertEqual(queue.window_floor, 1)
self.assertEqual(C.data['status'], 'MERGED')
+ @simple_layout('layouts/reconfigure-window.yaml')
+ def test_reconfigure_window_shrink(self):
+ # Test the active window shrinking during reconfiguration
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ A.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+ self.waitUntilSettled()
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ B.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
+ self.waitUntilSettled()
+
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ queue = tenant.layout.pipelines['gate'].queues[0]
+ self.assertEqual(queue.window, 20)
+ self.assertTrue(len(self.builds), 4)
+
+ self.executor_server.release('job1')
+ self.waitUntilSettled()
+ self.commitConfigUpdate('org/common-config',
+ 'layouts/reconfigure-window2.yaml')
+ self.sched.reconfigure(self.config)
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ queue = tenant.layout.pipelines['gate'].queues[0]
+ # Even though we have configured a smaller window, the value
+ # on the existing shared queue should be used.
+ self.assertEqual(queue.window, 20)
+ self.assertTrue(len(self.builds), 4)
+
+ self.sched.reconfigure(self.config)
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ queue = tenant.layout.pipelines['gate'].queues[0]
+ self.assertEqual(queue.window, 20)
+ self.assertTrue(len(self.builds), 4)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='job1', result='SUCCESS', changes='1,1'),
+ dict(name='job1', result='SUCCESS', changes='1,1 2,1'),
+ dict(name='job2', result='SUCCESS', changes='1,1'),
+ dict(name='job2', result='SUCCESS', changes='1,1 2,1'),
+ ], ordered=False)
+
+ @simple_layout('layouts/reconfigure-window-fixed.yaml')
+ def test_reconfigure_window_fixed(self):
+ # Test the active window shrinking during reconfiguration
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ A.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+ self.waitUntilSettled()
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ B.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
+ self.waitUntilSettled()
+
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ queue = tenant.layout.pipelines['gate'].queues[0]
+ self.assertEqual(queue.window, 2)
+ self.assertTrue(len(self.builds), 4)
+
+ self.executor_server.release('job1')
+ self.waitUntilSettled()
+ self.commitConfigUpdate('org/common-config',
+ 'layouts/reconfigure-window-fixed2.yaml')
+ self.sched.reconfigure(self.config)
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ queue = tenant.layout.pipelines['gate'].queues[0]
+ # Because we have configured a static window, it should
+ # be allowed to shrink on reconfiguration.
+ self.assertEqual(queue.window, 1)
+ # B is outside the window, but still marked active until the
+ # next pass through the queue processor, so its builds haven't
+ # been canceled.
+ self.assertTrue(len(self.builds), 4)
+
+ self.sched.reconfigure(self.config)
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ queue = tenant.layout.pipelines['gate'].queues[0]
+ self.assertEqual(queue.window, 1)
+ # B's builds have been canceled now
+ self.assertTrue(len(self.builds), 2)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+
+ # B's builds will be restarted and will show up in our history
+ # twice.
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='job1', result='SUCCESS', changes='1,1'),
+ dict(name='job1', result='SUCCESS', changes='1,1 2,1'),
+ dict(name='job2', result='SUCCESS', changes='1,1'),
+ dict(name='job2', result='SUCCESS', changes='1,1 2,1'),
+ dict(name='job1', result='SUCCESS', changes='1,1 2,1'),
+ dict(name='job2', result='SUCCESS', changes='1,1 2,1'),
+ ], ordered=False)
+
+ @simple_layout('layouts/reconfigure-remove-add.yaml')
+ def test_reconfigure_remove_add(self):
+ # Test removing, then adding a job while in queue
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ A.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+ self.waitUntilSettled()
+ self.assertTrue(len(self.builds), 2)
+ self.executor_server.release('job2')
+ self.assertTrue(len(self.builds), 1)
+
+ # Remove job2
+ self.commitConfigUpdate('org/common-config',
+ 'layouts/reconfigure-remove-add2.yaml')
+ self.sched.reconfigure(self.config)
+ self.assertTrue(len(self.builds), 1)
+
+ # Add job2 back
+ self.commitConfigUpdate('org/common-config',
+ 'layouts/reconfigure-remove-add.yaml')
+ self.sched.reconfigure(self.config)
+ self.assertTrue(len(self.builds), 2)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ # This will run new builds for B
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='job2', result='SUCCESS', changes='1,1'),
+ dict(name='job1', result='SUCCESS', changes='1,1'),
+ dict(name='job2', result='SUCCESS', changes='1,1'),
+ ], ordered=False)
+
def test_worker_update_metadata(self):
"Test if a worker can send back metadata about itself"
self.executor_server.hold_jobs_in_build = True
@@ -4071,7 +4210,7 @@
self.assertEqual('gate', job['pipeline'])
self.assertEqual(False, job['retry'])
self.assertEqual(
- 'static/stream.html?uuid={uuid}&logfile=console.log'
+ 'stream.html?uuid={uuid}&logfile=console.log'
.format(uuid=job['uuid']), job['url'])
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
diff --git a/tests/unit/test_scheduler_cmd.py b/tests/unit/test_scheduler_cmd.py
deleted file mode 100644
index ee6200f..0000000
--- a/tests/unit/test_scheduler_cmd.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import os
-
-import testtools
-import zuul.cmd.scheduler
-
-from tests import base
-
-
-class TestSchedulerCmdArguments(testtools.TestCase):
-
- def setUp(self):
- super(TestSchedulerCmdArguments, self).setUp()
- self.app = zuul.cmd.scheduler.Scheduler()
-
- def test_test_config(self):
- conf_path = os.path.join(base.FIXTURE_DIR, 'zuul.conf')
- self.app.parse_arguments(['-t', '-c', conf_path])
- self.assertTrue(self.app.args.validate)
- self.app.read_config()
- self.assertEqual(0, self.app.test_config())
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index c04604d..54cf111 100755
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -157,6 +157,148 @@
self.assertIn('Unable to modify final job', A.messages[0])
+class TestBranchTag(ZuulTestCase):
+ tenant_config_file = 'config/branch-tag/main.yaml'
+
+ def test_negative_branch_match(self):
+ # Test that a negative branch matcher works with implied branches.
+ event = self.fake_gerrit.addFakeTag('org/project', 'master', 'foo')
+ self.fake_gerrit.addEvent(event)
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='test-job', result='SUCCESS', ref='refs/tags/foo')])
+
+
+class TestBranchNegative(ZuulTestCase):
+ tenant_config_file = 'config/branch-negative/main.yaml'
+
+ def test_negative_branch_match(self):
+ # Test that a negative branch matcher works with implied branches.
+ self.create_branch('org/project', 'stable/pike')
+ self.fake_gerrit.addEvent(
+ self.fake_gerrit.getFakeBranchCreatedEvent(
+ 'org/project', 'stable/pike'))
+ self.waitUntilSettled()
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ B = self.fake_gerrit.addFakeChange('org/project', 'stable/pike', 'A')
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='test-job', result='SUCCESS', changes='1,1')])
+
+
+class TestBranchTemplates(ZuulTestCase):
+ tenant_config_file = 'config/branch-templates/main.yaml'
+
+ def test_template_removal_from_branch(self):
+ # Test that a template can be removed from one branch but not
+ # another.
+ # This creates a new branch with a copy of the config in master
+ self.create_branch('puppet-integration', 'stable/newton')
+ self.create_branch('puppet-integration', 'stable/ocata')
+ self.create_branch('puppet-tripleo', 'stable/newton')
+ self.create_branch('puppet-tripleo', 'stable/ocata')
+ self.fake_gerrit.addEvent(
+ self.fake_gerrit.getFakeBranchCreatedEvent(
+ 'puppet-integration', 'stable/newton'))
+ self.fake_gerrit.addEvent(
+ self.fake_gerrit.getFakeBranchCreatedEvent(
+ 'puppet-integration', 'stable/ocata'))
+ self.fake_gerrit.addEvent(
+ self.fake_gerrit.getFakeBranchCreatedEvent(
+ 'puppet-tripleo', 'stable/newton'))
+ self.fake_gerrit.addEvent(
+ self.fake_gerrit.getFakeBranchCreatedEvent(
+ 'puppet-tripleo', 'stable/ocata'))
+ self.waitUntilSettled()
+
+ in_repo_conf = textwrap.dedent(
+ """
+ - project:
+ name: puppet-tripleo
+ check:
+ jobs:
+ - puppet-something
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf}
+ A = self.fake_gerrit.addFakeChange('puppet-tripleo', 'stable/newton',
+ 'A', files=file_dict)
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='puppet-something', result='SUCCESS', changes='1,1')])
+
+ def test_template_change_on_branch(self):
+ # Test that the contents of a template can be changed on one
+ # branch without affecting another.
+
+ # This creates a new branch with a copy of the config in master
+ self.create_branch('puppet-integration', 'stable/newton')
+ self.create_branch('puppet-integration', 'stable/ocata')
+ self.create_branch('puppet-tripleo', 'stable/newton')
+ self.create_branch('puppet-tripleo', 'stable/ocata')
+ self.fake_gerrit.addEvent(
+ self.fake_gerrit.getFakeBranchCreatedEvent(
+ 'puppet-integration', 'stable/newton'))
+ self.fake_gerrit.addEvent(
+ self.fake_gerrit.getFakeBranchCreatedEvent(
+ 'puppet-integration', 'stable/ocata'))
+ self.fake_gerrit.addEvent(
+ self.fake_gerrit.getFakeBranchCreatedEvent(
+ 'puppet-tripleo', 'stable/newton'))
+ self.fake_gerrit.addEvent(
+ self.fake_gerrit.getFakeBranchCreatedEvent(
+ 'puppet-tripleo', 'stable/ocata'))
+ self.waitUntilSettled()
+
+ in_repo_conf = textwrap.dedent("""
+ - job:
+ name: puppet-unit-base
+ run: playbooks/run-unit-tests.yaml
+
+ - job:
+ name: puppet-unit-3.8
+ parent: puppet-unit-base
+ branches: ^(stable/(newton|ocata)).*$
+ vars:
+ puppet_gem_version: 3.8
+
+ - job:
+ name: puppet-something
+ run: playbooks/run-unit-tests.yaml
+
+ - project-template:
+ name: puppet-unit
+ check:
+ jobs:
+ - puppet-something
+
+ - project:
+ name: puppet-integration
+ templates:
+ - puppet-unit
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf}
+ A = self.fake_gerrit.addFakeChange('puppet-integration',
+ 'stable/newton',
+ 'A', files=file_dict)
+ B = self.fake_gerrit.addFakeChange('puppet-tripleo',
+ 'stable/newton',
+ 'B')
+ B.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ B.subject, A.data['id'])
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='puppet-something', result='SUCCESS',
+ changes='1,1 2,1')])
+
+
class TestBranchVariants(ZuulTestCase):
tenant_config_file = 'config/branch-variants/main.yaml'
diff --git a/tests/unit/test_zuultrigger.py b/tests/unit/test_zuultrigger.py
index 3c4dead..3954a21 100644
--- a/tests/unit/test_zuultrigger.py
+++ b/tests/unit/test_zuultrigger.py
@@ -64,9 +64,6 @@
class TestZuulTriggerProjectChangeMerged(ZuulTestCase):
- def setUp(self):
- self.skip("Disabled because v3 noop job does not perform merge")
-
tenant_config_file = 'config/zuultrigger/project-change-merged/main.yaml'
def test_zuul_trigger_project_change_merged(self):
diff --git a/tools/test-logs.sh b/tools/test-logs.sh
index bf2147d..a514dd8 100644
--- a/tools/test-logs.sh
+++ b/tools/test-logs.sh
@@ -42,7 +42,7 @@
cat >$WORK_DIR/ansible.cfg <<EOF
[defaults]
-hostfile = $INVENTORY
+inventory = $INVENTORY
gathering = smart
gather_subset = !all
fact_caching = jsonfile
diff --git a/zuul/ansible/library/zuul_return.py b/zuul/ansible/library/zuul_return.py
index 9f3332b..4935226 100644
--- a/zuul/ansible/library/zuul_return.py
+++ b/zuul/ansible/library/zuul_return.py
@@ -63,7 +63,7 @@
path = os.path.join(os.environ['ZUUL_JOBDIR'], 'work',
'results.json')
set_value(path, p['data'], p['file'])
- module.exit_json(changed=True, e=os.environ)
+ module.exit_json(changed=True, e=os.environ.copy())
from ansible.module_utils.basic import * # noqa
from ansible.module_utils.basic import AnsibleModule
diff --git a/zuul/change_matcher.py b/zuul/change_matcher.py
index 7f6673d..eb12f9b 100644
--- a/zuul/change_matcher.py
+++ b/zuul/change_matcher.py
@@ -69,6 +69,20 @@
return False
+class ImpliedBranchMatcher(AbstractChangeMatcher):
+ """
+ A branch matcher that only considers branch refs, and always
+ succeeds on other types (e.g., tags).
+ """
+
+ def matches(self, change):
+ if hasattr(change, 'branch'):
+ if self.regex.match(change.branch):
+ return True
+ return False
+ return True
+
+
class FileMatcher(AbstractChangeMatcher):
def matches(self, change):
diff --git a/zuul/cmd/__init__.py b/zuul/cmd/__init__.py
index 86f7f12..e150f9c 100755
--- a/zuul/cmd/__init__.py
+++ b/zuul/cmd/__init__.py
@@ -14,7 +14,9 @@
# License for the specific language governing permissions and limitations
# under the License.
+import argparse
import configparser
+import daemon
import extras
import io
import logging
@@ -28,8 +30,13 @@
yappi = extras.try_import('yappi')
objgraph = extras.try_import('objgraph')
+# as of python-daemon 1.6 it doesn't bundle pidlockfile anymore
+# instead it depends on lockfile-0.9.1 which uses pidfile.
+pid_file_module = extras.try_imports(['daemon.pidlockfile', 'daemon.pidfile'])
+
from zuul.ansible import logconfig
import zuul.lib.connections
+from zuul.lib.config import get_default
# Do not import modules that will pull in paramiko which must not be
# imported until after the daemonization.
@@ -87,6 +94,8 @@
class ZuulApp(object):
+ app_name = None # type: str
+ app_description = None # type: str
def __init__(self):
self.args = None
@@ -97,7 +106,21 @@
from zuul.version import version_info as zuul_version_info
return "Zuul version: %s" % zuul_version_info.release_string()
- def read_config(self):
+ def createParser(self):
+ parser = argparse.ArgumentParser(description=self.app_description)
+ parser.add_argument('-c', dest='config',
+ help='specify the config file')
+ parser.add_argument('--version', dest='version', action='version',
+ version=self._get_version(),
+ help='show zuul version')
+ return parser
+
+ def parseArguments(self, args=None):
+ parser = self.createParser()
+ self.args = parser.parse_args(args)
+ return parser
+
+ def readConfig(self):
self.config = configparser.ConfigParser()
if self.args.config:
locations = [self.args.config]
@@ -130,3 +153,34 @@
def configure_connections(self, source_only=False):
self.connections = zuul.lib.connections.ConnectionRegistry()
self.connections.configure(self.config, source_only)
+
+
+class ZuulDaemonApp(ZuulApp):
+ def createParser(self):
+ parser = super(ZuulDaemonApp, self).createParser()
+ parser.add_argument('-d', dest='nodaemon', action='store_true',
+ help='do not run as a daemon')
+ return parser
+
+ def getPidFile(self):
+ pid_fn = get_default(self.config, self.app_name, 'pidfile',
+ '/var/run/zuul/%s.pid' % self.app_name,
+ expand_user=True)
+ return pid_fn
+
+ def main(self):
+ self.parseArguments()
+ self.readConfig()
+
+ pid_fn = self.getPidFile()
+ pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
+
+ if self.args.nodaemon:
+ self.run()
+ else:
+ # Exercise the pidfile before we do anything else (including
+ # logging or daemonizing)
+ with daemon.DaemonContext(pidfile=pid):
+ pass
+ with daemon.DaemonContext(pidfile=pid):
+ self.run()
diff --git a/zuul/cmd/client.py b/zuul/cmd/client.py
index 7a26a62..ebf59b9 100755
--- a/zuul/cmd/client.py
+++ b/zuul/cmd/client.py
@@ -30,18 +30,14 @@
class Client(zuul.cmd.ZuulApp):
+ app_name = 'zuul'
+ app_description = 'Zuul RPC client.'
log = logging.getLogger("zuul.Client")
- def parse_arguments(self):
- parser = argparse.ArgumentParser(
- description='Zuul Project Gating System Client.')
- parser.add_argument('-c', dest='config',
- help='specify the config file')
+ def createParser(self):
+ parser = super(Client, self).createParser()
parser.add_argument('-v', dest='verbose', action='store_true',
help='verbose output')
- parser.add_argument('--version', dest='version', action='version',
- version=self._get_version(),
- help='show zuul version')
subparsers = parser.add_subparsers(title='commands',
description='valid commands',
@@ -133,7 +129,10 @@
# TODO: add filters such as queue, project, changeid etc
show_running_jobs.set_defaults(func=self.show_running_jobs)
- self.args = parser.parse_args()
+ return parser
+
+ def parseArguments(self, args=None):
+ parser = super(Client, self).parseArguments()
if not getattr(self.args, 'func', None):
parser.print_help()
sys.exit(1)
@@ -156,8 +155,8 @@
logging.basicConfig(level=logging.DEBUG)
def main(self):
- self.parse_arguments()
- self.read_config()
+ self.parseArguments()
+ self.readConfig()
self.setup_logging()
self.server = self.config.get('gearman', 'server')
@@ -363,10 +362,8 @@
def main():
- client = Client()
- client.main()
+ Client().main()
if __name__ == "__main__":
- sys.path.insert(0, '.')
main()
diff --git a/zuul/cmd/executor.py b/zuul/cmd/executor.py
index 979989d..aef8c95 100755
--- a/zuul/cmd/executor.py
+++ b/zuul/cmd/executor.py
@@ -14,14 +14,6 @@
# License for the specific language governing permissions and limitations
# under the License.
-import argparse
-import daemon
-import extras
-
-# as of python-daemon 1.6 it doesn't bundle pidlockfile anymore
-# instead it depends on lockfile-0.9.1 which uses pidfile.
-pid_file_module = extras.try_imports(['daemon.pidlockfile', 'daemon.pidfile'])
-
import grp
import logging
import os
@@ -41,25 +33,24 @@
# Similar situation with gear and statsd.
-class Executor(zuul.cmd.ZuulApp):
+class Executor(zuul.cmd.ZuulDaemonApp):
+ app_name = 'executor'
+ app_description = 'A standalone Zuul executor.'
- def parse_arguments(self):
- parser = argparse.ArgumentParser(description='Zuul executor.')
- parser.add_argument('-c', dest='config',
- help='specify the config file')
- parser.add_argument('-d', dest='nodaemon', action='store_true',
- help='do not run as a daemon')
- parser.add_argument('--version', dest='version', action='version',
- version=self._get_version(),
- help='show zuul version')
+ def createParser(self):
+ parser = super(Executor, self).createParser()
parser.add_argument('--keep-jobdir', dest='keep_jobdir',
action='store_true',
help='keep local jobdirs after run completes')
parser.add_argument('command',
choices=zuul.executor.server.COMMANDS,
nargs='?')
+ return parser
- self.args = parser.parse_args()
+ def parseArguments(self, args=None):
+ super(Executor, self).parseArguments()
+ if self.args.command:
+ self.args.nodaemon = True
def send_command(self, cmd):
state_dir = get_default(self.config, 'executor', 'state_dir',
@@ -111,8 +102,12 @@
os.chdir(pw.pw_dir)
os.umask(0o022)
- def main(self, daemon=True):
- # See comment at top of file about zuul imports
+ def run(self):
+ if self.args.command in zuul.executor.server.COMMANDS:
+ self.send_command(self.args.command)
+ sys.exit(0)
+
+ self.configure_connections(source_only=True)
self.user = get_default(self.config, 'executor', 'user', 'zuul')
@@ -145,9 +140,8 @@
self.executor.start()
signal.signal(signal.SIGUSR2, zuul.cmd.stack_dump_handler)
- if daemon:
- self.executor.join()
- else:
+
+ if self.args.nodaemon:
while True:
try:
signal.pause()
@@ -155,31 +149,13 @@
print("Ctrl + C: asking executor to exit nicely...\n")
self.exit_handler()
sys.exit(0)
+ else:
+ self.executor.join()
def main():
- server = Executor()
- server.parse_arguments()
- server.read_config()
-
- if server.args.command in zuul.executor.server.COMMANDS:
- server.send_command(server.args.command)
- sys.exit(0)
-
- server.configure_connections(source_only=True)
-
- pid_fn = get_default(server.config, 'executor', 'pidfile',
- '/var/run/zuul-executor/zuul-executor.pid',
- expand_user=True)
- pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
-
- if server.args.nodaemon:
- server.main(False)
- else:
- with daemon.DaemonContext(pidfile=pid):
- server.main(True)
+ Executor().main()
if __name__ == "__main__":
- sys.path.insert(0, '.')
main()
diff --git a/zuul/cmd/merger.py b/zuul/cmd/merger.py
index 9771fff..56b6b44 100755
--- a/zuul/cmd/merger.py
+++ b/zuul/cmd/merger.py
@@ -14,19 +14,9 @@
# License for the specific language governing permissions and limitations
# under the License.
-import argparse
-import daemon
-import extras
-
-# as of python-daemon 1.6 it doesn't bundle pidlockfile anymore
-# instead it depends on lockfile-0.9.1 which uses pidfile.
-pid_file_module = extras.try_imports(['daemon.pidlockfile', 'daemon.pidfile'])
-
-import sys
import signal
import zuul.cmd
-from zuul.lib.config import get_default
# No zuul imports here because they pull in paramiko which must not be
# imported until after the daemonization.
@@ -34,28 +24,21 @@
# Similar situation with gear and statsd.
-class Merger(zuul.cmd.ZuulApp):
-
- def parse_arguments(self):
- parser = argparse.ArgumentParser(description='Zuul merge worker.')
- parser.add_argument('-c', dest='config',
- help='specify the config file')
- parser.add_argument('-d', dest='nodaemon', action='store_true',
- help='do not run as a daemon')
- parser.add_argument('--version', dest='version', action='version',
- version=self._get_version(),
- help='show zuul version')
- self.args = parser.parse_args()
+class Merger(zuul.cmd.ZuulDaemonApp):
+ app_name = 'merger'
+ app_description = 'A standalone Zuul merger.'
def exit_handler(self, signum, frame):
signal.signal(signal.SIGUSR1, signal.SIG_IGN)
self.merger.stop()
self.merger.join()
- def main(self):
+ def run(self):
# See comment at top of file about zuul imports
import zuul.merger.server
+ self.configure_connections(source_only=True)
+
self.setup_logging('merger', 'log_config')
self.merger = zuul.merger.server.MergeServer(self.config,
@@ -73,24 +56,8 @@
def main():
- server = Merger()
- server.parse_arguments()
-
- server.read_config()
- server.configure_connections(source_only=True)
-
- pid_fn = get_default(server.config, 'merger', 'pidfile',
- '/var/run/zuul-merger/zuul-merger.pid',
- expand_user=True)
- pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
-
- if server.args.nodaemon:
- server.main()
- else:
- with daemon.DaemonContext(pidfile=pid):
- server.main()
+ Merger().main()
if __name__ == "__main__":
- sys.path.insert(0, '.')
main()
diff --git a/zuul/cmd/scheduler.py b/zuul/cmd/scheduler.py
index 2d71f4d..539d55b 100755
--- a/zuul/cmd/scheduler.py
+++ b/zuul/cmd/scheduler.py
@@ -14,14 +14,6 @@
# License for the specific language governing permissions and limitations
# under the License.
-import argparse
-import daemon
-import extras
-
-# as of python-daemon 1.6 it doesn't bundle pidlockfile anymore
-# instead it depends on lockfile-0.9.1 which uses pidfile.
-pid_file_module = extras.try_imports(['daemon.pidlockfile', 'daemon.pidfile'])
-
import logging
import os
import sys
@@ -37,29 +29,18 @@
# Similar situation with gear and statsd.
-class Scheduler(zuul.cmd.ZuulApp):
+class Scheduler(zuul.cmd.ZuulDaemonApp):
+ app_name = 'scheduler'
+ app_description = 'The main zuul process.'
+
def __init__(self):
super(Scheduler, self).__init__()
self.gear_server_pid = None
- def parse_arguments(self, args=None):
- parser = argparse.ArgumentParser(description='Project gating system.')
- parser.add_argument('-c', dest='config',
- help='specify the config file')
- parser.add_argument('-d', dest='nodaemon', action='store_true',
- help='do not run as a daemon')
- parser.add_argument('-t', dest='validate', action='store_true',
- help='validate config file syntax (Does not'
- 'validate config repo validity)')
- parser.add_argument('--version', dest='version', action='version',
- version=self._get_version(),
- help='show zuul version')
- self.args = parser.parse_args(args)
-
def reconfigure_handler(self, signum, frame):
signal.signal(signal.SIGHUP, signal.SIG_IGN)
self.log.debug("Reconfiguration triggered")
- self.read_config()
+ self.readConfig()
self.setup_logging('scheduler', 'log_config')
try:
self.sched.reconfigure(self.config)
@@ -77,20 +58,6 @@
self.stop_gear_server()
os._exit(0)
- def test_config(self):
- # See comment at top of file about zuul imports
- import zuul.scheduler
- import zuul.executor.client
-
- logging.basicConfig(level=logging.DEBUG)
- try:
- self.sched = zuul.scheduler.Scheduler(self.config,
- testonly=True)
- except Exception as e:
- self.log.error("%s" % e)
- return -1
- return 0
-
def start_gear_server(self):
pipe_read, pipe_write = os.pipe()
child_pid = os.fork()
@@ -134,7 +101,7 @@
if self.gear_server_pid:
os.kill(self.gear_server_pid, signal.SIGKILL)
- def main(self):
+ def run(self):
# See comment at top of file about zuul imports
import zuul.scheduler
import zuul.executor.client
@@ -206,26 +173,8 @@
def main():
- scheduler = Scheduler()
- scheduler.parse_arguments()
-
- scheduler.read_config()
-
- if scheduler.args.validate:
- sys.exit(scheduler.test_config())
-
- pid_fn = get_default(scheduler.config, 'scheduler', 'pidfile',
- '/var/run/zuul-scheduler/zuul-scheduler.pid',
- expand_user=True)
- pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
-
- if scheduler.args.nodaemon:
- scheduler.main()
- else:
- with daemon.DaemonContext(pidfile=pid):
- scheduler.main()
+ Scheduler().main()
if __name__ == "__main__":
- sys.path.insert(0, '.')
main()
diff --git a/zuul/cmd/web.py b/zuul/cmd/web.py
index 9869a2c..ad3062f 100755
--- a/zuul/cmd/web.py
+++ b/zuul/cmd/web.py
@@ -13,10 +13,7 @@
# License for the specific language governing permissions and limitations
# under the License.
-import argparse
import asyncio
-import daemon
-import extras
import logging
import signal
import sys
@@ -25,42 +22,57 @@
import zuul.cmd
import zuul.web
+from zuul.driver.sql import sqlconnection
from zuul.lib.config import get_default
-# as of python-daemon 1.6 it doesn't bundle pidlockfile anymore
-# instead it depends on lockfile-0.9.1 which uses pidfile.
-pid_file_module = extras.try_imports(['daemon.pidlockfile', 'daemon.pidfile'])
-
-class WebServer(zuul.cmd.ZuulApp):
-
- def parse_arguments(self):
- parser = argparse.ArgumentParser(description='Zuul Web Server.')
- parser.add_argument('-c', dest='config',
- help='specify the config file')
- parser.add_argument('-d', dest='nodaemon', action='store_true',
- help='do not run as a daemon')
- parser.add_argument('--version', dest='version', action='version',
- version=self._get_version(),
- help='show zuul version')
- self.args = parser.parse_args()
+class WebServer(zuul.cmd.ZuulDaemonApp):
+ app_name = 'web'
+ app_description = 'A standalone Zuul web server.'
def exit_handler(self, signum, frame):
self.web.stop()
- def _main(self):
+ def _run(self):
params = dict()
params['listen_address'] = get_default(self.config,
'web', 'listen_address',
'127.0.0.1')
params['listen_port'] = get_default(self.config, 'web', 'port', 9000)
+ params['static_cache_expiry'] = get_default(self.config, 'web',
+ 'static_cache_expiry',
+ 3600)
params['gear_server'] = get_default(self.config, 'gearman', 'server')
params['gear_port'] = get_default(self.config, 'gearman', 'port', 4730)
params['ssl_key'] = get_default(self.config, 'gearman', 'ssl_key')
params['ssl_cert'] = get_default(self.config, 'gearman', 'ssl_cert')
params['ssl_ca'] = get_default(self.config, 'gearman', 'ssl_ca')
+ sql_conn_name = get_default(self.config, 'web',
+ 'sql_connection_name')
+ sql_conn = None
+ if sql_conn_name:
+ # we want a specific sql connection
+ sql_conn = self.connections.connections.get(sql_conn_name)
+ if not sql_conn:
+ self.log.error("Couldn't find sql connection '%s'" %
+ sql_conn_name)
+ sys.exit(1)
+ else:
+ # look for any sql connection
+ connections = [c for c in self.connections.connections.values()
+ if isinstance(c, sqlconnection.SQLConnection)]
+ if len(connections) > 1:
+ self.log.error("Multiple sql connection found, "
+ "set the sql_connection_name option "
+ "in zuul.conf [web] section")
+ sys.exit(1)
+ if connections:
+ # use this sql connection by default
+ sql_conn = connections[0]
+ params['sql_connection'] = sql_conn
+
try:
self.web = zuul.web.ZuulWeb(**params)
except Exception as e:
@@ -88,28 +100,21 @@
loop.close()
self.log.info("Zuul Web Server stopped")
- def main(self):
+ def run(self):
self.setup_logging('web', 'log_config')
self.log = logging.getLogger("zuul.WebServer")
+ self.configure_connections()
+
try:
- self._main()
+ self._run()
except Exception:
self.log.exception("Exception from WebServer:")
def main():
- server = WebServer()
- server.parse_arguments()
- server.read_config()
+ WebServer().main()
- pid_fn = get_default(server.config, 'web', 'pidfile',
- '/var/run/zuul-web/zuul-web.pid', expand_user=True)
- pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
-
- if server.args.nodaemon:
- server.main()
- else:
- with daemon.DaemonContext(pidfile=pid):
- server.main()
+if __name__ == "__main__":
+ main()
diff --git a/zuul/configloader.py b/zuul/configloader.py
index 98bb23f..fb1695c 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -519,6 +519,7 @@
# "job.run.append(...)").
job = model.Job(name)
+ job.description = conf.get('description')
job.source_context = conf.get('_source_context')
job.source_line = conf.get('_start_mark').line + 1
@@ -1163,8 +1164,8 @@
tenant.config_projects,
tenant.untrusted_projects,
cached, tenant)
- unparsed_config.extend(tenant.config_projects_config)
- unparsed_config.extend(tenant.untrusted_projects_config)
+ unparsed_config.extend(tenant.config_projects_config, tenant=tenant)
+ unparsed_config.extend(tenant.untrusted_projects_config, tenant=tenant)
tenant.layout = TenantParser._parseLayout(base, tenant,
unparsed_config,
scheduler,
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index fba472f..06c2087 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -137,16 +137,12 @@
merger_items=[]):
tenant = pipeline.layout.tenant
uuid = str(uuid4().hex)
+ nodeset = item.current_build_set.getJobNodeSet(job.name)
self.log.info(
"Execute job %s (uuid: %s) on nodes %s for change %s "
"with dependent changes %s" % (
- job, uuid,
- item.current_build_set.getJobNodeSet(job.name),
- item.change,
- dependent_changes))
+ job, uuid, nodeset, item.change, dependent_changes))
- # TODOv3(jeblair): This ansible vars data structure will
- # replace the environment variables below.
project = dict(
name=item.change.project.name,
short_name=item.change.project.name.split('/')[-1],
@@ -166,6 +162,8 @@
timeout=job.timeout,
jobtags=sorted(job.tags),
_inheritance_path=list(job.inheritance_path))
+ if job.override_checkout:
+ zuul_params['override_checkout'] = job.override_checkout
if hasattr(item.change, 'branch'):
zuul_params['branch'] = item.change.branch
if hasattr(item.change, 'tag'):
@@ -182,8 +180,7 @@
if (hasattr(item.change, 'newrev') and item.change.newrev
and item.change.newrev != '0' * 40):
zuul_params['newrev'] = item.change.newrev
- zuul_params['projects'] = [] # Set below
- zuul_params['_projects'] = {} # transitional to convert to dict
+ zuul_params['projects'] = {} # Set below
zuul_params['items'] = dependent_changes
params = dict()
@@ -204,7 +201,6 @@
params['pre_playbooks'] = [x.toDict() for x in job.pre_run]
params['post_playbooks'] = [x.toDict() for x in job.post_run]
- nodeset = item.current_build_set.getJobNodeSet(job.name)
nodes = []
for node in nodeset.getNodes():
n = node.toDict()
@@ -256,7 +252,7 @@
params['projects'].append(make_project_dict(project))
projects.add(project)
for p in projects:
- zuul_params['_projects'][p.canonical_name] = (dict(
+ zuul_params['projects'][p.canonical_name] = (dict(
name=p.name,
short_name=p.name.split('/')[-1],
# Duplicate this into the dict too, so that iterating
@@ -268,15 +264,14 @@
))
# We are transitioning "projects" from a list to a dict
# indexed by canonical name, as it is much easier to access
- # values in ansible. Existing callers are converted to
- # "_projects", then once "projects" is unused we switch it,
- # then convert callers back. Finally when "_projects" is
- # unused it will be removed.
- for cn, p in zuul_params['_projects'].items():
- zuul_params['projects'].append(p)
+ # values in ansible. Existing callers have been converted to
+ # "_projects" and "projects" is swapped; we will convert users
+ # back to "projects" and remove this soon.
+ zuul_params['_projects'] = zuul_params['projects']
build = Build(job, uuid)
build.parameters = params
+ build.nodeset = nodeset
if job.name == 'noop':
self.sched.onBuildStarted(build)
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index 928d802..83fdc3c 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -358,8 +358,10 @@
# there is a period of time where the user can click on the live log
# link on the status page but the log streaming fails because the file
# is not there yet.
- with open(self.job_output_file, 'w'):
- pass
+ with open(self.job_output_file, 'w') as job_output:
+ job_output.write("{now} | Job console starting...\n".format(
+ now=datetime.datetime.now()
+ ))
self.trusted_projects = []
self.trusted_project_index = {}
@@ -677,16 +679,19 @@
ref = args['zuul']['ref']
else:
ref = None
- self.checkoutBranch(repo,
- project['name'],
- ref,
- args['branch'],
- args['override_branch'],
- args['override_checkout'],
- project['override_branch'],
- project['override_checkout'],
- project['default_branch'])
-
+ selected = self.checkoutBranch(repo,
+ project['name'],
+ ref,
+ args['branch'],
+ args['override_branch'],
+ args['override_checkout'],
+ project['override_branch'],
+ project['override_checkout'],
+ project['default_branch'])
+ # Update the inventory variables to indicate the ref we
+ # checked out
+ p = args['zuul']['_projects'][project['canonical_name']]
+ p['checkout'] = selected
# Delete the origin remote from each repo we set up since
# it will not be valid within the jobs.
for repo in repos.values():
@@ -765,44 +770,45 @@
project_default_branch):
branches = repo.getBranches()
refs = [r.name for r in repo.getRefs()]
- if project_override_branch in branches:
- self.log.info("Checking out %s project override branch %s",
- project_name, project_override_branch)
- repo.checkout(project_override_branch)
+ selected_ref = None
if project_override_checkout in refs:
+ selected_ref = project_override_checkout
self.log.info("Checking out %s project override ref %s",
- project_name, project_override_checkout)
- repo.checkout(project_override_checkout)
- elif job_override_branch in branches:
- self.log.info("Checking out %s job override branch %s",
- project_name, job_override_branch)
- repo.checkout(job_override_branch)
+ project_name, selected_ref)
+ elif project_override_branch in branches:
+ selected_ref = project_override_branch
+ self.log.info("Checking out %s project override branch %s",
+ project_name, selected_ref)
elif job_override_checkout in refs:
+ selected_ref = job_override_checkout
self.log.info("Checking out %s job override ref %s",
- project_name, job_override_checkout)
- repo.checkout(job_override_checkout)
+ project_name, selected_ref)
+ elif job_override_branch in branches:
+ selected_ref = job_override_branch
+ self.log.info("Checking out %s job override branch %s",
+ project_name, selected_ref)
elif ref and ref.startswith('refs/heads/'):
- b = ref[len('refs/heads/'):]
+ selected_ref = ref[len('refs/heads/'):]
self.log.info("Checking out %s branch ref %s",
- project_name, b)
- repo.checkout(b)
+ project_name, selected_ref)
elif ref and ref.startswith('refs/tags/'):
- t = ref[len('refs/tags/'):]
+ selected_ref = ref[len('refs/tags/'):]
self.log.info("Checking out %s tag ref %s",
- project_name, t)
- repo.checkout(t)
+ project_name, selected_ref)
elif zuul_branch and zuul_branch in branches:
+ selected_ref = zuul_branch
self.log.info("Checking out %s zuul branch %s",
- project_name, zuul_branch)
- repo.checkout(zuul_branch)
+ project_name, selected_ref)
elif project_default_branch in branches:
+ selected_ref = project_default_branch
self.log.info("Checking out %s project default branch %s",
- project_name, project_default_branch)
- repo.checkout(project_default_branch)
+ project_name, selected_ref)
else:
raise ExecutorError("Project %s does not have the "
"default branch %s" %
(project_name, project_default_branch))
+ repo.checkout(selected_ref)
+ return selected_ref
def runPlaybooks(self, args):
result = None
@@ -921,6 +927,10 @@
private_ipv4=node.get('private_ipv4'),
public_ipv6=node.get('public_ipv6')))
+ username = node.get('username')
+ if username:
+ host_vars['ansible_user'] = username
+
host_keys = []
for key in node.get('host_keys'):
if port != 22:
@@ -1178,7 +1188,7 @@
callback_path = self.executor_server.callback_dir
with open(jobdir_playbook.ansible_config, 'w') as config:
config.write('[defaults]\n')
- config.write('hostfile = %s\n' % self.jobdir.inventory)
+ config.write('inventory = %s\n' % self.jobdir.inventory)
config.write('local_tmp = %s/local_tmp\n' %
self.jobdir.ansible_cache_root)
config.write('retry_files_enabled = False\n')
diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py
index 6c72c2d..d205afc 100644
--- a/zuul/manager/__init__.py
+++ b/zuul/manager/__init__.py
@@ -853,20 +853,22 @@
if dt:
self.sched.statsd.timing(key + '.resident_time', dt)
self.sched.statsd.incr(key + '.total_changes')
-
- hostname = (item.change.project.canonical_hostname.
- replace('.', '_'))
- projectname = (item.change.project.name.
- replace('.', '_').replace('/', '.'))
- projectname = projectname.replace('.', '_').replace('/', '.')
- branchname = item.change.branch.replace('.', '_').replace('/', '.')
- # stats.timers.zuul.tenant.<tenant>.pipeline.<pipeline>.
- # project.<host>.<project>.<branch>.resident_time
- # stats_counts.zuul.tenant.<tenant>.pipeline.<pipeline>.
- # project.<host>.<project>.<branch>.total_changes
- key += '.project.%s.%s.%s' % (hostname, projectname, branchname)
- if dt:
- self.sched.statsd.timing(key + '.resident_time', dt)
- self.sched.statsd.incr(key + '.total_changes')
+ if hasattr(item.change, 'branch'):
+ hostname = (item.change.project.canonical_hostname.
+ replace('.', '_'))
+ projectname = (item.change.project.name.
+ replace('.', '_').replace('/', '.'))
+ projectname = projectname.replace('.', '_').replace('/', '.')
+ branchname = item.change.branch.replace('.', '_').replace(
+ '/', '.')
+ # stats.timers.zuul.tenant.<tenant>.pipeline.<pipeline>.
+ # project.<host>.<project>.<branch>.resident_time
+ # stats_counts.zuul.tenant.<tenant>.pipeline.<pipeline>.
+ # project.<host>.<project>.<branch>.total_changes
+ key += '.project.%s.%s.%s' % (hostname, projectname,
+ branchname)
+ if dt:
+ self.sched.statsd.timing(key + '.resident_time', dt)
+ self.sched.statsd.incr(key + '.total_changes')
except Exception:
self.log.exception("Exception reporting pipeline stats")
diff --git a/zuul/model.py b/zuul/model.py
index 695212c..3b49591 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -388,6 +388,7 @@
self.az = None
self.provider = None
self.region = None
+ self.username = None
@property
def state(self):
@@ -858,6 +859,7 @@
source_line=None,
inheritance_path=(),
parent_data=None,
+ description=None,
)
self.inheritable_attributes = {}
@@ -949,6 +951,28 @@
matchers.append(change_matcher.BranchMatcher(branch))
self.branch_matcher = change_matcher.MatchAny(matchers)
+ def getSimpleBranchMatcher(self):
+ # If the job has a simple branch matcher, return it; otherwise None.
+ if not self.branch_matcher:
+ return None
+ m = self.branch_matcher
+ if not isinstance(m, change_matcher.AbstractMatcherCollection):
+ return None
+ if len(m.matchers) != 1:
+ return None
+ m = m.matchers[0]
+ if not isinstance(m, change_matcher.BranchMatcher):
+ return None
+ return m
+
+ def addImpliedBranchMatcher(self, branch):
+ # Add a branch matcher that combines as a boolean *and* with
+ # existing branch matchers, if any.
+ matchers = [change_matcher.ImpliedBranchMatcher(branch)]
+ if self.branch_matcher:
+ matchers.append(self.branch_matcher)
+ self.branch_matcher = change_matcher.MatchAll(matchers)
+
def updateVariables(self, other_vars):
v = copy.deepcopy(self.variables)
Job._deepUpdate(v, other_vars)
@@ -1098,9 +1122,9 @@
for jobname, jobs in other.jobs.items():
joblist = self.jobs.setdefault(jobname, [])
for job in jobs:
- if not job.branch_matcher and implied_branch:
+ if implied_branch:
job = job.copy()
- job.setBranchMatcher([implied_branch])
+ job.addImpliedBranchMatcher(implied_branch)
if job not in joblist:
joblist.append(job)
@@ -1173,8 +1197,8 @@
if soft:
current_parent_jobs = set()
else:
- raise Exception("Dependent job %s not found: " %
- (dependent_job,))
+ raise Exception("Job %s depends on %s which was not run." %
+ (dependent_job, current_job))
new_parent_jobs = current_parent_jobs - all_parent_jobs
jobs_to_iterate |= new_parent_jobs
all_parent_jobs |= new_parent_jobs
@@ -1207,6 +1231,7 @@
self.worker = Worker()
self.node_labels = []
self.node_name = None
+ self.nodeset = None
def __repr__(self):
return ('<Build %s of %s on %s>' %
@@ -1839,7 +1864,7 @@
result = build.result
finger_url = build.url
# TODO(tobiash): add support for custom web root
- urlformat = 'static/stream.html?' \
+ urlformat = 'stream.html?' \
'uuid={build.uuid}&' \
'logfile=console.log'
if websocket_url:
@@ -2361,14 +2386,25 @@
r.semaphores = copy.deepcopy(self.semaphores)
return r
- def extend(self, conf):
+ def extend(self, conf, tenant=None):
if isinstance(conf, UnparsedTenantConfig):
self.pragmas.extend(conf.pragmas)
self.pipelines.extend(conf.pipelines)
self.jobs.extend(conf.jobs)
self.project_templates.extend(conf.project_templates)
for k, v in conf.projects.items():
- self.projects.setdefault(k, []).extend(v)
+ name = k
+ # If we have the tenant add the projects to
+ # the according canonical name instead of the given project
+ # name. If it is not found, it's ok to add this to the given
+ # name. We also don't need to throw the
+ # ProjectNotFoundException here as semantic validation occurs
+ # later where it will fail then.
+ if tenant is not None:
+ trusted, project = tenant.getProject(k)
+ if project is not None:
+ name = project.canonical_name
+ self.projects.setdefault(name, []).extend(v)
self.nodesets.extend(conf.nodesets)
self.secrets.extend(conf.secrets)
self.semaphores.extend(conf.semaphores)
diff --git a/zuul/rpclistener.py b/zuul/rpclistener.py
index 8c8c783..d40505e 100644
--- a/zuul/rpclistener.py
+++ b/zuul/rpclistener.py
@@ -21,6 +21,7 @@
import gear
from zuul import model
+from zuul.lib import encryption
from zuul.lib.config import get_default
@@ -58,6 +59,8 @@
self.worker.registerFunction("zuul:get_job_log_stream_address")
self.worker.registerFunction("zuul:tenant_list")
self.worker.registerFunction("zuul:status_get")
+ self.worker.registerFunction("zuul:job_list")
+ self.worker.registerFunction("zuul:key_get")
def getFunctions(self):
functions = {}
@@ -283,3 +286,25 @@
args = json.loads(job.arguments)
output = self.sched.formatStatusJSON(args.get("tenant"))
job.sendWorkComplete(output)
+
+ def handle_job_list(self, job):
+ args = json.loads(job.arguments)
+ tenant = self.sched.abide.tenants.get(args.get("tenant"))
+ output = []
+ for job_name in sorted(tenant.layout.jobs):
+ desc = None
+ for tenant_job in tenant.layout.jobs[job_name]:
+ if tenant_job.description:
+ desc = tenant_job.description.split('\n')[0]
+ break
+ output.append({"name": job_name,
+ "description": desc})
+ job.sendWorkComplete(json.dumps(output))
+
+ def handle_key_get(self, job):
+ args = json.loads(job.arguments)
+ source_name, project_name = args.get("source"), args.get("project")
+ source = self.sched.connections.getSource(source_name)
+ project = source.getProject(project_name)
+ job.sendWorkComplete(
+ encryption.serialize_rsa_public_key(project.public_key))
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index a725fcd..7dee00d 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -184,14 +184,6 @@
self.request_id = request.id
-def toList(item):
- if not item:
- return []
- if isinstance(item, list):
- return item
- return [item]
-
-
class Scheduler(threading.Thread):
"""The engine of Zuul.
@@ -603,10 +595,32 @@
"when reconfiguring" % name)
continue
self.log.debug("Re-enqueueing changes for pipeline %s" % name)
+ # TODO(jeblair): This supports an undocument and
+ # unanticipated hack to create a static window. If we
+ # really want to support this, maybe we should have a
+ # 'static' type? But it may be in use in the wild, so we
+ # should allow this at least until there's an upgrade
+ # path.
+ if (new_pipeline.window and
+ new_pipeline.window_increase_type == 'exponential' and
+ new_pipeline.window_decrease_type == 'exponential' and
+ new_pipeline.window_increase_factor == 1 and
+ new_pipeline.window_decrease_factor == 1):
+ static_window = True
+ else:
+ static_window = False
+ if old_pipeline.window and (not static_window):
+ new_pipeline.window = max(old_pipeline.window,
+ new_pipeline.window_floor)
items_to_remove = []
builds_to_cancel = []
last_head = None
for shared_queue in old_pipeline.queues:
+ # Attempt to keep window sizes from shrinking where possible
+ new_queue = new_pipeline.getQueue(shared_queue.projects[0])
+ if new_queue and shared_queue.window and (not static_window):
+ new_queue.window = max(shared_queue.window,
+ new_queue.window_floor)
for item in shared_queue.queue:
if not item.item_ahead:
last_head = item
@@ -658,6 +672,15 @@
self.log.exception(
"Exception while canceling build %s "
"for change %s" % (build, build.build_set.item.change))
+ # In the unlikely case that a build is removed and
+ # later added back, make sure we clear out the nodeset
+ # so it gets requested again.
+ try:
+ build.build_set.removeJobNodeSet(build.job.name)
+ except Exception:
+ self.log.exception(
+ "Exception while removing nodeset from build %s "
+ "for change %s" % (build, build.build_set.item.change))
finally:
tenant.semaphore_handler.release(
build.build_set.item, build.job)
@@ -928,7 +951,7 @@
# to pass this on to the pipeline manager, make sure we return
# the nodes to nodepool.
try:
- nodeset = build.build_set.getJobNodeSet(build.job.name)
+ nodeset = build.nodeset
autohold_key = (build.pipeline.layout.tenant.name,
build.build_set.item.change.project.canonical_name,
build.job.name)
diff --git a/zuul/web/__init__.py b/zuul/web/__init__.py
index 61a1cee..e4a3612 100755
--- a/zuul/web/__init__.py
+++ b/zuul/web/__init__.py
@@ -20,11 +20,14 @@
import logging
import os
import time
+import urllib.parse
import uvloop
import aiohttp
from aiohttp import web
+from sqlalchemy.sql import select
+
import zuul.rpcclient
STATIC_DIR = os.path.join(os.path.dirname(__file__), 'static')
@@ -162,6 +165,8 @@
self.controllers = {
'tenant_list': self.tenant_list,
'status_get': self.status_get,
+ 'job_list': self.job_list,
+ 'key_get': self.key_get,
}
def tenant_list(self, request):
@@ -182,6 +187,18 @@
resp.last_modified = self.cache_time[tenant]
return resp
+ def job_list(self, request):
+ tenant = request.match_info["tenant"]
+ job = self.rpc.submitJob('zuul:job_list', {'tenant': tenant})
+ return web.json_response(json.loads(job.data[0]))
+
+ def key_get(self, request):
+ source = request.match_info["source"]
+ project = request.match_info["project"]
+ job = self.rpc.submitJob('zuul:key_get', {'source': source,
+ 'project': project})
+ return web.Response(body=job.data[0])
+
async def processRequest(self, request, action):
try:
resp = self.controllers[action](request)
@@ -194,22 +211,115 @@
return resp
+class SqlHandler(object):
+ log = logging.getLogger("zuul.web.SqlHandler")
+ filters = ("project", "pipeline", "change", "patchset", "ref",
+ "result", "uuid", "job_name", "voting", "node_name", "newrev")
+
+ def __init__(self, connection):
+ self.connection = connection
+
+ def query(self, args):
+ build = self.connection.zuul_build_table
+ buildset = self.connection.zuul_buildset_table
+ query = select([
+ buildset.c.project,
+ buildset.c.pipeline,
+ buildset.c.change,
+ buildset.c.patchset,
+ buildset.c.ref,
+ buildset.c.newrev,
+ buildset.c.ref_url,
+ build.c.result,
+ build.c.uuid,
+ build.c.job_name,
+ build.c.voting,
+ build.c.node_name,
+ build.c.start_time,
+ build.c.end_time,
+ build.c.log_url]).select_from(build.join(buildset))
+ for table in ('build', 'buildset'):
+ for k, v in args['%s_filters' % table].items():
+ if table == 'build':
+ column = build.c
+ else:
+ column = buildset.c
+ query = query.where(getattr(column, k).in_(v))
+ return query.limit(args['limit']).offset(args['skip']).order_by(
+ build.c.id.desc())
+
+ def get_builds(self, args):
+ """Return a list of build"""
+ builds = []
+ with self.connection.engine.begin() as conn:
+ query = self.query(args)
+ for row in conn.execute(query):
+ build = dict(row)
+ # Convert date to iso format
+ if row.start_time:
+ build['start_time'] = row.start_time.strftime(
+ '%Y-%m-%dT%H:%M:%S')
+ if row.end_time:
+ build['end_time'] = row.end_time.strftime(
+ '%Y-%m-%dT%H:%M:%S')
+ # Compute run duration
+ if row.start_time and row.end_time:
+ build['duration'] = (row.end_time -
+ row.start_time).total_seconds()
+ builds.append(build)
+ return builds
+
+ async def processRequest(self, request):
+ try:
+ args = {
+ 'buildset_filters': {},
+ 'build_filters': {},
+ 'limit': 50,
+ 'skip': 0,
+ }
+ for k, v in urllib.parse.parse_qsl(request.rel_url.query_string):
+ if k in ("tenant", "project", "pipeline", "change",
+ "patchset", "ref", "newrev"):
+ args['buildset_filters'].setdefault(k, []).append(v)
+ elif k in ("uuid", "job_name", "voting", "node_name",
+ "result"):
+ args['build_filters'].setdefault(k, []).append(v)
+ elif k in ("limit", "skip"):
+ args[k] = int(v)
+ else:
+ raise ValueError("Unknown parameter %s" % k)
+ data = self.get_builds(args)
+ resp = web.json_response(data)
+ except Exception as e:
+ self.log.exception("Jobs exception:")
+ resp = web.json_response({'error_description': 'Internal error'},
+ status=500)
+ return resp
+
+
class ZuulWeb(object):
log = logging.getLogger("zuul.web.ZuulWeb")
def __init__(self, listen_address, listen_port,
gear_server, gear_port,
- ssl_key=None, ssl_cert=None, ssl_ca=None):
+ ssl_key=None, ssl_cert=None, ssl_ca=None,
+ static_cache_expiry=3600,
+ sql_connection=None):
self.listen_address = listen_address
self.listen_port = listen_port
self.event_loop = None
self.term = None
+ self.static_cache_expiry = static_cache_expiry
# instanciate handlers
self.rpc = zuul.rpcclient.RPCClient(gear_server, gear_port,
ssl_key, ssl_cert, ssl_ca)
self.log_streaming_handler = LogStreamingHandler(self.rpc)
self.gearman_handler = GearmanHandler(self.rpc)
+ if sql_connection:
+ self.sql_handler = SqlHandler(sql_connection)
+ else:
+ self.sql_handler = None
async def _handleWebsocket(self, request):
return await self.log_streaming_handler.processRequest(
@@ -222,13 +332,32 @@
async def _handleStatusRequest(self, request):
return await self.gearman_handler.processRequest(request, 'status_get')
+ async def _handleJobsRequest(self, request):
+ return await self.gearman_handler.processRequest(request, 'job_list')
+
+ async def _handleSqlRequest(self, request):
+ return await self.sql_handler.processRequest(request)
+
+ async def _handleKeyRequest(self, request):
+ return await self.gearman_handler.processRequest(request, 'key_get')
+
async def _handleStaticRequest(self, request):
fp = None
if request.path.endswith("tenants.html") or request.path.endswith("/"):
fp = os.path.join(STATIC_DIR, "index.html")
elif request.path.endswith("status.html"):
fp = os.path.join(STATIC_DIR, "status.html")
- return web.FileResponse(fp)
+ elif request.path.endswith("jobs.html"):
+ fp = os.path.join(STATIC_DIR, "jobs.html")
+ elif request.path.endswith("builds.html"):
+ fp = os.path.join(STATIC_DIR, "builds.html")
+ elif request.path.endswith("stream.html"):
+ fp = os.path.join(STATIC_DIR, "stream.html")
+ headers = {}
+ if self.static_cache_expiry:
+ headers['Cache-Control'] = "public, max-age=%d" % \
+ self.static_cache_expiry
+ return web.FileResponse(fp, headers=headers)
def run(self, loop=None):
"""
@@ -242,14 +371,24 @@
is run within a separate (non-main) thread.
"""
routes = [
- ('GET', '/console-stream', self._handleWebsocket),
('GET', '/tenants.json', self._handleTenantsRequest),
('GET', '/{tenant}/status.json', self._handleStatusRequest),
+ ('GET', '/{tenant}/jobs.json', self._handleJobsRequest),
+ ('GET', '/{tenant}/console-stream', self._handleWebsocket),
+ ('GET', '/{source}/{project}.pub', self._handleKeyRequest),
('GET', '/{tenant}/status.html', self._handleStaticRequest),
+ ('GET', '/{tenant}/jobs.html', self._handleStaticRequest),
+ ('GET', '/{tenant}/stream.html', self._handleStaticRequest),
('GET', '/tenants.html', self._handleStaticRequest),
('GET', '/', self._handleStaticRequest),
]
+ if self.sql_handler:
+ routes.append(('GET', '/{tenant}/builds.json',
+ self._handleSqlRequest))
+ routes.append(('GET', '/{tenant}/builds.html',
+ self._handleStaticRequest))
+
self.log.debug("ZuulWeb starting")
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
user_supplied_loop = loop is not None
diff --git a/zuul/web/static/README b/zuul/web/static/README
index 487c3ee..e924dc7 100644
--- a/zuul/web/static/README
+++ b/zuul/web/static/README
@@ -5,6 +5,35 @@
* /static/js/jquery.graphite.min.js
* /static/bootstrap/css/bootstrap.min.css
+
+Use python2-rjsmin or another js minifier:
+```
+DEST_DIR=/var/www/html/static/
+mkdir -p $DEST_DIR/js
+echo "Fetching angular..."
+curl -L --silent https://ajax.googleapis.com/ajax/libs/angularjs/1.5.6/angular.min.js > $DEST_DIR/js/angular.min.js
+
+echo "Fetching jquery..."
+curl -L --silent http://code.jquery.com/jquery.min.js > $DEST_DIR/js/jquery.min.js
+
+echo "Fetching jquery-visibility..."
+curl -L --silent https://raw.githubusercontent.com/mathiasbynens/jquery-visibility/master/jquery-visibility.js > $DEST_DIR/js/jquery-visibility.js
+python2 -mrjsmin < $DEST_DIR/js/jquery-visibility.js > $DEST_DIR/js/jquery-visibility.min.js
+
+echo "Fetching bootstrap..."
+curl -L --silent https://github.com/twbs/bootstrap/releases/download/v3.1.1/bootstrap-3.1.1-dist.zip > bootstrap.zip
+unzip -q -o bootstrap.zip -d $DEST_DIR/
+mv $DEST_DIR/bootstrap-3.1.1-dist $DEST_DIR/bootstrap
+rm -f bootstrap.zip
+
+echo "Fetching jquery-graphite..."
+curl -L --silent https://github.com/prestontimmons/graphitejs/archive/master.zip > jquery-graphite.zip
+unzip -q -o jquery-graphite.zip -d $DEST_DIR/
+python2 -mrjsmin < $DEST_DIR/graphitejs-master/jquery.graphite.js > $DEST_DIR/js/jquery.graphite.min.js
+rm -Rf jquery-graphite.zip $DEST_DIR/graphitejs-master
+```
+
+
Here is an example apache vhost configuration:
<VirtualHost zuul-web.example.com:80>
DocumentRoot /var/www/zuul-web
@@ -21,8 +50,7 @@
</Directory>
# Console-stream needs a special proxy-pass for websocket
- ProxyPass /console-stream ws://localhost:9000/console-stream nocanon retry=0
- ProxyPassReverse /console-stream ws://localhost:9000/console-stream
+ ProxyPassMatch /(.*)/console-stream ws://localhost:9000/$1/console-stream nocanon retry=0
# Then only the json calls are sent to the zuul-web endpoints
ProxyPassMatch ^/(.*.json)$ http://localhost:9000/$1 nocanon retry=0
diff --git a/zuul/web/static/builds.html b/zuul/web/static/builds.html
new file mode 100644
index 0000000..921c9e2
--- /dev/null
+++ b/zuul/web/static/builds.html
@@ -0,0 +1,84 @@
+<!--
+Copyright 2017 Red Hat
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may
+not use this file except in compliance with the License. You may obtain
+a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations
+under the License.
+-->
+<!DOCTYPE html>
+<html>
+<head>
+ <title>Zuul Builds</title>
+ <link rel="stylesheet" href="/static/bootstrap/css/bootstrap.min.css">
+ <link rel="stylesheet" href="../static/styles/zuul.css" />
+ <script src="/static/js/jquery.min.js"></script>
+ <script src="/static/js/angular.min.js"></script>
+ <script src="../static/javascripts/zuul.angular.js"></script>
+</head>
+<body ng-app="zuulBuilds" ng-controller="mainController"><div class="container-fluid">
+ <nav class="navbar navbar-default">
+ <div class="container-fluid">
+ <div class="navbar-header">
+ <a class="navbar-brand" href="../" target="_self">Zuul Dashboard</a>
+ </div>
+ <ul class="nav navbar-nav">
+ <li><a href="status.html" target="_self">Status</a></li>
+ <li><a href="jobs.html" target="_self">Jobs</a></li>
+ <li class="active"><a href="builds.html" target="_self">Builds</a></li>
+ </ul>
+ <span style="float: right; margin-top: 7px;">
+ <form ng-submit="builds_fetch()">
+ <label>Pipeline:</label>
+ <input name="pipeline" ng-model="pipeline" />
+ <label>Job:</label>
+ <input name="job_name" ng-model="job_name" />
+ <label>Project:</label>
+ <input name="project" ng-model="project" />
+ <input type="submit" value="Refresh" />
+ </form>
+ </span>
+ </div>
+ </nav>
+ <table class="table table-hover table-condensed">
+ <thead>
+ <tr>
+ <th width="20px">id</th>
+ <th>Job</th>
+ <th>Project</th>
+ <th>Pipeline</th>
+ <th>Change</th>
+ <th>Newrev</th>
+ <th>Duration</th>
+ <th>Log url</th>
+ <th>Node name</th>
+ <th>Start time</th>
+ <th>End time</th>
+ <th>Result</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr ng-repeat="build in builds" ng-class="rowClass(build)">
+ <td>{{ build.id }}</td>
+ <td>{{ build.job_name }}</td>
+ <td>{{ build.project }}</td>
+ <td>{{ build.pipeline }}</td>
+ <td><a href="{{ build.ref_url }}" target="_self">change</a></td>
+ <td>{{ build.newrev }}</td>
+ <td>{{ build.duration }} seconds</td>
+ <td><a ng-if="build.log_url" href="{{ build.log_url }}" target="_self">logs</a></td>
+ <td>{{ build.node_name }}</td>
+ <td>{{ build.start_time }}</td>
+ <td>{{ build.end_time }}</td>
+ <td>{{ build.result }}</td>
+ </tr>
+ </tbody>
+ </table>
+</div></body></html>
diff --git a/zuul/web/static/javascripts/zuul.angular.js b/zuul/web/static/javascripts/zuul.angular.js
index 3152fc0..87cbbdd 100644
--- a/zuul/web/static/javascripts/zuul.angular.js
+++ b/zuul/web/static/javascripts/zuul.angular.js
@@ -30,3 +30,70 @@
}
$scope.tenants_fetch();
});
+
+angular.module('zuulJobs', []).controller(
+ 'mainController', function($scope, $http)
+{
+ $scope.jobs = undefined;
+ $scope.jobs_fetch = function() {
+ $http.get("jobs.json")
+ .then(function success(result) {
+ $scope.jobs = result.data;
+ });
+ }
+ $scope.jobs_fetch();
+});
+
+angular.module('zuulBuilds', [], function($locationProvider) {
+ $locationProvider.html5Mode({
+ enabled: true,
+ requireBase: false
+ });
+}).controller('mainController', function($scope, $http, $location)
+{
+ $scope.rowClass = function(build) {
+ if (build.result == "SUCCESS") {
+ return "success";
+ } else {
+ return "warning";
+ }
+ };
+ var query_args = $location.search();
+ var url = $location.url();
+ var tenant_start = url.lastIndexOf(
+ '/', url.lastIndexOf('/builds.html') - 1) + 1;
+ var tenant_length = url.lastIndexOf('/builds.html') - tenant_start;
+ $scope.tenant = url.substr(tenant_start, tenant_length);
+ $scope.builds = undefined;
+ if (query_args["pipeline"]) {$scope.pipeline = query_args["pipeline"];
+ } else {$scope.pipeline = "";}
+ if (query_args["job_name"]) {$scope.job_name = query_args["job_name"];
+ } else {$scope.job_name = "";}
+ if (query_args["project"]) {$scope.project = query_args["project"];
+ } else {$scope.project = "";}
+ $scope.builds_fetch = function() {
+ query_string = "";
+ if ($scope.tenant) {query_string += "&tenant="+$scope.tenant;}
+ if ($scope.pipeline) {query_string += "&pipeline="+$scope.pipeline;}
+ if ($scope.job_name) {query_string += "&job_name="+$scope.job_name;}
+ if ($scope.project) {query_string += "&project="+$scope.project;}
+ if (query_string != "") {query_string = "?" + query_string.substr(1);}
+ $http.get("builds.json" + query_string)
+ .then(function success(result) {
+ for (build_pos = 0;
+ build_pos < result.data.length;
+ build_pos += 1) {
+ build = result.data[build_pos]
+ if (build.node_name == null) {
+ build.node_name = 'master'
+ }
+ /* Fix incorect url for post_failure job */
+ if (build.log_url == build.job_name) {
+ build.log_url = undefined;
+ }
+ }
+ $scope.builds = result.data;
+ });
+ }
+ $scope.builds_fetch()
+});
diff --git a/zuul/web/static/jobs.html b/zuul/web/static/jobs.html
new file mode 100644
index 0000000..6946723
--- /dev/null
+++ b/zuul/web/static/jobs.html
@@ -0,0 +1,55 @@
+<!--
+Copyright 2017 Red Hat
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may
+not use this file except in compliance with the License. You may obtain
+a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations
+under the License.
+-->
+<!DOCTYPE html>
+<html>
+<head>
+ <title>Zuul Builds</title>
+ <link rel="stylesheet" href="/static/bootstrap/css/bootstrap.min.css">
+ <link rel="stylesheet" href="../static/styles/zuul.css" />
+ <script src="/static/js/jquery.min.js"></script>
+ <script src="/static/js/angular.min.js"></script>
+ <script src="../static/javascripts/zuul.angular.js"></script>
+</head>
+<body ng-app="zuulJobs" ng-controller="mainController"><div class="container-fluid">
+ <nav class="navbar navbar-default">
+ <div class="container-fluid">
+ <div class="navbar-header">
+ <a class="navbar-brand" href="../" target="_self">Zuul Dashboard</a>
+ </div>
+ <ul class="nav navbar-nav">
+ <li><a href="status.html" target="_self">Status</a></li>
+ <li class="active"><a href="jobs.html" target="_self">Jobs</a></li>
+ <li><a href="builds.html" target="_self">Builds</a></li>
+ </ul>
+ </div>
+ </nav>
+ <table class="table table-hover table-condensed">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Description</th>
+ <th>Last builds</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr ng-repeat="job in jobs">
+ <td>{{ job.name }}</td>
+ <td>{{ job.description }}</td>
+ <td><a href="builds.html?job_name={{ job.name }}">builds</a></td>
+ </tr>
+ </tbody>
+ </table>
+</div></body></html>
diff --git a/zuul/web/static/stream.html b/zuul/web/static/stream.html
index dbeb66b..f2e7081 100644
--- a/zuul/web/static/stream.html
+++ b/zuul/web/static/stream.html
@@ -73,7 +73,7 @@
} else {
protocol = 'ws://';
}
- path = url['pathname'].replace(/static\/.*$/g, '') + 'console-stream';
+ path = url['pathname'].replace(/stream.html.*$/g, '') + 'console-stream';
params['websocket_url'] = protocol + url['host'] + path;
}
var ws = new WebSocket(params['websocket_url']);