Merge "Re-add the ability to set username on zuul-executor" into feature/zuulv3
diff --git a/.zuul.yaml b/.zuul.yaml
index 50223fa..c21b30f 100644
--- a/.zuul.yaml
+++ b/.zuul.yaml
@@ -1,49 +1,3 @@
-- job:
- name: base
- pre-run: base/pre
- post-run: base/post
- success-url: http://zuulv3-dev.openstack.org/logs/{build.uuid}/
- failure-url: http://zuulv3-dev.openstack.org/logs/{build.uuid}/
- timeout: 1800
- vars:
- zuul_workspace_root: /home/zuul
- nodes:
- - name: ubuntu-xenial
- image: ubuntu-xenial
-
-- job:
- name: tox
- parent: base
- pre-run: tox/pre
- post-run: tox/post
-
-- job:
- name: tox-cover
- parent: tox
- run: tox/cover
- voting: false
-
-- job:
- name: tox-docs
- parent: tox
- run: tox/docs
-
-- job:
- name: tox-linters
- parent: tox
- run: tox/linters
-
-- job:
- name: tox-py27
- parent: tox
- run: tox/py27
-
-- job:
- name: tox-tarball
- parent: tox
- run: tox/tarball
- post-run: tox/tarball-post
-
- project:
name: openstack-infra/zuul
check:
@@ -52,4 +6,5 @@
- tox-cover
- tox-linters
- tox-py27
+ - tox-py35
- tox-tarball
diff --git a/bindep.txt b/bindep.txt
index 6895444..5db144b 100644
--- a/bindep.txt
+++ b/bindep.txt
@@ -14,3 +14,4 @@
libffi-devel [platform:rpm]
python-dev [platform:dpkg]
python-devel [platform:rpm]
+bubblewrap [platform:rpm]
diff --git a/doc/source/connections.rst b/doc/source/connections.rst
index 614b44a..120d529 100644
--- a/doc/source/connections.rst
+++ b/doc/source/connections.rst
@@ -65,6 +65,33 @@
be added to Gerrit. Zuul is very flexible and can take advantage of
those.
+GitHub
+------
+
+Create a connection with GitHub.
+
+**driver=github**
+
+**api_token**
+ API token for accessing GitHub.
+ See `Creating an access token for command-line use
+ <https://help.github.com/articles/creating-an-access-token-for-command-line-use/>`_.
+
+**webhook_token**
+ Optional: Token for validating the webhook event payloads.
+ If not specified, payloads are not validated.
+ See `Securing your webhooks
+ <https://developer.github.com/webhooks/securing/>`_.
+
+**sshkey**
+ Path to SSH key to use when cloning github repositories.
+ ``sshkey=/home/zuul/.ssh/id_rsa``
+
+**git_host**
+ Optional: Hostname of the github install (such as a GitHub Enterprise)
+ If not specified, defaults to ``github.com``
+ ``git_host=github.myenterprise.com``
+
SMTP
----
diff --git a/doc/source/developer/datamodel.rst b/doc/source/developer/datamodel.rst
index 2996ff4..acb8612 100644
--- a/doc/source/developer/datamodel.rst
+++ b/doc/source/developer/datamodel.rst
@@ -54,7 +54,7 @@
Filters
~~~~~~~
-.. autoclass:: zuul.model.ChangeishFilter
+.. autoclass:: zuul.model.RefFilter
.. autoclass:: zuul.model.EventFilter
diff --git a/doc/source/developer/testing.rst b/doc/source/developer/testing.rst
index 4a813d0..057ab7e 100644
--- a/doc/source/developer/testing.rst
+++ b/doc/source/developer/testing.rst
@@ -9,6 +9,8 @@
access to a number of attributes useful for manipulating or inspecting
the environment being simulated in the test:
+.. autofunction:: tests.base.simple_layout
+
.. autoclass:: tests.base.ZuulTestCase
:members:
diff --git a/doc/source/reporters.rst b/doc/source/reporters.rst
index b01c8d1..dd053fa 100644
--- a/doc/source/reporters.rst
+++ b/doc/source/reporters.rst
@@ -28,6 +28,47 @@
A :ref:`connection` that uses the gerrit driver must be supplied to the
trigger.
+GitHub
+------
+
+Zuul reports back to GitHub pull requests via GitHub API.
+On success and failure, it creates a comment containing the build results.
+It also sets the status on start, success and failure. Status name and
+description is taken from the pipeline.
+
+A :ref:`connection` that uses the github driver must be supplied to the
+reporter. It has the following options:
+
+ **status**
+ String value (``pending``, ``success``, ``failure``) that the reporter should
+ set as the commit status on github.
+ ``status: 'success'``
+
+ **status-url**
+ String value for a link url to set in the github status. Defaults to the zuul
+ server status_url, or the empty string if that is unset.
+
+ **comment**
+ Boolean value (``true`` or ``false``) that determines if the reporter should
+ add a comment to the pipeline status to the github pull request. Defaults
+ to ``true``.
+ ``comment: false``
+
+ **merge**
+ Boolean value (``true`` or ``false``) that determines if the reporter should
+ merge the pull reqeust. Defaults to ``false``.
+ ``merge=true``
+
+ **label**
+ List of strings each representing an exact label name which should be added
+ to the pull request by reporter.
+ ``label: 'test successful'``
+
+ **unlabel**
+ List of strings each representing an exact label name which should be removed
+ from the pull request by reporter.
+ ``unlabel: 'test failed'``
+
SMTP
----
diff --git a/doc/source/triggers.rst b/doc/source/triggers.rst
index 263f280..41a56a0 100644
--- a/doc/source/triggers.rst
+++ b/doc/source/triggers.rst
@@ -4,7 +4,7 @@
========
The process of merging a change starts with proposing a change to be
-merged. Primarily, Zuul supports Gerrit as a triggering system.
+merged. Zuul supports Gerrit and GitHub as triggering systems.
Zuul's design is modular, so alternate triggering and reporting
systems can be supported.
@@ -100,6 +100,96 @@
*require-approval* but will fail to enter the pipeline if there is
a matching approval.
+GitHub
+------
+
+Github webhook events can be configured as triggers.
+
+A connection name with the github driver can take multiple events with the
+following options.
+
+ **event**
+ The event from github. Supported events are ``pull_request``,
+ ``pull_request_review``, and ``push``.
+
+ A ``pull_request`` event will
+ have associated action(s) to trigger from. The supported actions are:
+
+ *opened* - pull request opened
+
+ *changed* - pull request synchronized
+
+ *closed* - pull request closed
+
+ *reopened* - pull request reopened
+
+ *comment* - comment added on pull request
+
+ *labeled* - label added on pull request
+
+ *unlabeled* - label removed from pull request
+
+ *review* - review added on pull request
+
+ *push* - head reference updated (pushed to branch)
+
+ *status* - status set on commit
+
+ A ``pull_request_review`` event will
+ have associated action(s) to trigger from. The supported actions are:
+
+ *submitted* - pull request review added
+
+ *dismissed* - pull request review removed
+
+ **branch**
+ The branch associated with the event. Example: ``master``. This
+ field is treated as a regular expression, and multiple branches may
+ be listed. Used for ``pull_request`` and ``pull_request_review`` events.
+
+ **comment**
+ This is only used for ``pull_request`` ``comment`` actions. It accepts a
+ list of regexes that are searched for in the comment string. If any of these
+ regexes matches a portion of the comment string the trigger is matched.
+ ``comment: retrigger`` will match when comments containing 'retrigger'
+ somewhere in the comment text are added to a pull request.
+
+ **label**
+ This is only used for ``labeled`` and ``unlabeled`` ``pull_request`` actions.
+ It accepts a list of strings each of which matches the label name in the
+ event literally. ``label: recheck`` will match a ``labeled`` action when
+ pull request is labeled with a ``recheck`` label. ``label: 'do not test'``
+ will match a ``unlabeled`` action when a label with name ``do not test`` is
+ removed from the pull request.
+
+ **state**
+ This is only used for ``pull_request_review`` events. It accepts a list of
+ strings each of which is matched to the review state, which can be one of
+ ``approved``, ``comment``, or ``request_changes``.
+
+ **status**
+ This is only used for ``status`` actions. It accepts a list of strings each of
+ which matches the user setting the status, the status context, and the status
+ itself in the format of ``user:context:status``. For example,
+ ``zuul_github_ci_bot:check_pipeline:success``.
+
+ **ref**
+ This is only used for ``push`` events. This field is treated as a regular
+ expression and multiple refs may be listed. Github always sends full ref
+ name, eg. ``refs/tags/bar`` and this string is matched against the regexp.
+
+GitHub Configuration
+~~~~~~~~~~~~~~~~~~~~
+
+Configure GitHub `webhook events
+<https://developer.github.com/webhooks/creating/>`_.
+
+Set *Payload URL* to
+``http://<zuul-hostname>/connection/<connection-name>/payload``.
+
+Set *Content Type* to ``application/json``.
+
+Select *Events* you are interested in. See above for the supported events.
Timer
-----
@@ -154,4 +244,4 @@
*reject-approval*
This takes a list of approvals in the same format as
*require-approval* but will fail to enter the pipeline if there is
- a matching approval.
\ No newline at end of file
+ a matching approval.
diff --git a/doc/source/zuul.rst b/doc/source/zuul.rst
index 56cc6a8..a7dfb44 100644
--- a/doc/source/zuul.rst
+++ b/doc/source/zuul.rst
@@ -108,6 +108,10 @@
commands.
``state_dir=/var/lib/zuul``
+**jobroot_dir**
+ Path to directory that Zuul should store temporary job files.
+ ``jobroot_dir=/tmp``
+
**report_times**
Boolean value (``true`` or ``false``) that determines if Zuul should
include elapsed times for each job in the textual report. Used by
@@ -165,6 +169,33 @@
Path to PID lock file for the merger process.
``pidfile=/var/run/zuul-merger/merger.pid``
+executor
+""""""""
+
+The zuul-executor process configuration.
+
+**finger_port**
+ Port to use for finger log streamer.
+ ``finger_port=79``
+
+**git_dir**
+ Directory that Zuul should clone local git repositories to.
+ ``git_dir=/var/lib/zuul/git``
+
+**log_config**
+ Path to log config file for the executor process.
+ ``log_config=/etc/zuul/logging.yaml``
+
+**private_key_file**
+ SSH private key file to be used when logging into worker nodes.
+ ``private_key_file=~/.ssh/id_rsa``
+
+**user**
+ User ID for the zuul-executor process. In normal operation as a daemon,
+ the executor should be started as the ``root`` user, but it will drop
+ privileges to this user during startup.
+ ``user=zuul``
+
.. _connection:
connection ArbitraryName
diff --git a/etc/status/public_html/index.html b/etc/status/public_html/index.html
index 97025a6..cc3d40a 100644
--- a/etc/status/public_html/index.html
+++ b/etc/status/public_html/index.html
@@ -30,8 +30,10 @@
<script src="jquery.zuul.js"></script>
<script src="zuul.app.js"></script>
<script>
+ // @license magnet:?xt=urn:btih:8e4f440f4c65981c5bf93c76d35135ba5064d8b7&dn=apache-2.0.txt Apache 2.0
zuul_build_dom(jQuery, '#zuul_container');
zuul_start(jQuery);
+ // @license-end
</script>
</body>
</html>
diff --git a/etc/status/public_html/jquery.zuul.js b/etc/status/public_html/jquery.zuul.js
index d973948..aec7a46 100644
--- a/etc/status/public_html/jquery.zuul.js
+++ b/etc/status/public_html/jquery.zuul.js
@@ -1,5 +1,8 @@
// jquery plugin for Zuul status page
//
+// @licstart The following is the entire license notice for the
+// JavaScript code in this page.
+//
// Copyright 2012 OpenStack Foundation
// Copyright 2013 Timo Tijhof
// Copyright 2013 Wikimedia Foundation
@@ -16,6 +19,9 @@
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
+//
+// @licend The above is the entire license notice
+// for the JavaScript code in this page.
(function ($) {
'use strict';
@@ -52,6 +58,10 @@
var collapsed_exceptions = [];
var current_filter = read_cookie('zuul_filter_string', '');
+ var change_set_in_url = window.location.href.split('#')[1];
+ if (change_set_in_url) {
+ current_filter = change_set_in_url;
+ }
var $jq;
var xhr,
@@ -269,7 +279,16 @@
var $change_link = $('<small />');
if (change.url !== null) {
- if (/^[0-9a-f]{40}$/.test(change.id)) {
+ var github_id = change.id.match(/^([0-9]+),([0-9a-f]{40})$/);
+ if (github_id) {
+ $change_link.append(
+ $('<a />').attr('href', change.url).append(
+ $('<abbr />')
+ .attr('title', change.id)
+ .text('#' + github_id[1])
+ )
+ );
+ } else if (/^[0-9a-f]{40}$/.test(change.id)) {
var change_id_short = change.id.slice(0, 7);
$change_link.append(
$('<a />').attr('href', change.url).append(
diff --git a/etc/status/public_html/zuul.app.js b/etc/status/public_html/zuul.app.js
index 6321af8..ae950e8 100644
--- a/etc/status/public_html/zuul.app.js
+++ b/etc/status/public_html/zuul.app.js
@@ -1,5 +1,8 @@
// Client script for Zuul status page
//
+// @licstart The following is the entire license notice for the
+// JavaScript code in this page.
+//
// Copyright 2013 OpenStack Foundation
// Copyright 2013 Timo Tijhof
// Copyright 2013 Wikimedia Foundation
@@ -16,6 +19,9 @@
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
+//
+// @licend The above is the entire license notice
+// for the JavaScript code in this page.
/*exported zuul_build_dom, zuul_start */
diff --git a/playbooks/base/post.yaml b/playbooks/base/post.yaml
deleted file mode 100644
index ed3f7b8..0000000
--- a/playbooks/base/post.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
-- hosts: all
- tasks:
- - name: Collect console log.
- synchronize:
- dest: "{{ zuul.executor.log_root }}"
- mode: pull
- src: "/tmp/console.log"
-
- - name: Publish logs.
- copy:
- dest: "/opt/zuul-logs/{{ zuul.uuid}}"
- src: "{{ zuul.executor.log_root }}/"
- delegate_to: 127.0.0.1
diff --git a/playbooks/base/pre.yaml b/playbooks/base/pre.yaml
deleted file mode 100644
index 1a2e699..0000000
--- a/playbooks/base/pre.yaml
+++ /dev/null
@@ -1,3 +0,0 @@
-- hosts: all
- roles:
- - prepare-workspace
diff --git a/playbooks/base/roles b/playbooks/base/roles
deleted file mode 120000
index 7b9ade8..0000000
--- a/playbooks/base/roles
+++ /dev/null
@@ -1 +0,0 @@
-../roles/
\ No newline at end of file
diff --git a/playbooks/roles/extra-test-setup/tasks/main.yaml b/playbooks/roles/extra-test-setup/tasks/main.yaml
deleted file mode 100644
index da4259e..0000000
--- a/playbooks/roles/extra-test-setup/tasks/main.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- name: Check if projects tools/test-setup.sh exists.
- stat:
- path: "{{ zuul_workspace_root }}/src/{{ zuul.project }}/tools/test-setup.sh"
- register: p
-
-- name: Run tools/test-setup.sh.
- shell: tools/test-setup.sh
- args:
- chdir: "{{ zuul_workspace_root }}/src/{{ zuul.project }}"
- when:
- - p.stat.exists
- - p.stat.executable
diff --git a/playbooks/roles/prepare-workspace/tasks/main.yaml b/playbooks/roles/prepare-workspace/tasks/main.yaml
deleted file mode 100644
index 4d42b2d..0000000
--- a/playbooks/roles/prepare-workspace/tasks/main.yaml
+++ /dev/null
@@ -1,22 +0,0 @@
-- name: Ensure console.log does not exist.
- file:
- path: /tmp/console.log
- state: absent
-
-- name: Start zuul_console daemon.
- zuul_console:
- path: /tmp/console.log
- port: 19885
-
-- name: Create workspace directory.
- file:
- path: "{{ zuul_workspace_root }}"
- owner: zuul
- group: zuul
- state: directory
-
-- name: Synchronize src repos to workspace directory.
- synchronize:
- dest: "{{ zuul_workspace_root }}"
- src: "{{ zuul.executor.src_root }}"
- no_log: true
diff --git a/playbooks/roles/revoke-sudo/tasks/main.yaml b/playbooks/roles/revoke-sudo/tasks/main.yaml
deleted file mode 100644
index 1c18187..0000000
--- a/playbooks/roles/revoke-sudo/tasks/main.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
-- name: Remove sudo access for zuul user.
- become: yes
- file:
- path: /etc/sudoers.d/zuul-sudo
- state: absent
-
-- name: Prove that general sudo access is actually revoked.
- shell: ! sudo -n true
diff --git a/playbooks/roles/run-bindep/tasks/main.yaml b/playbooks/roles/run-bindep/tasks/main.yaml
deleted file mode 100644
index 5a9d33e..0000000
--- a/playbooks/roles/run-bindep/tasks/main.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-- name: Run install-distro-packages.sh
- shell: /usr/local/jenkins/slave_scripts/install-distro-packages.sh
- args:
- chdir: "{{ zuul_workspace_root }}/src/{{ zuul.project }}"
diff --git a/playbooks/roles/run-cover/defaults/main.yaml b/playbooks/roles/run-cover/defaults/main.yaml
deleted file mode 100644
index 2e32efe..0000000
--- a/playbooks/roles/run-cover/defaults/main.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-run_cover_envlist: cover
diff --git a/playbooks/roles/run-cover/tasks/main.yaml b/playbooks/roles/run-cover/tasks/main.yaml
deleted file mode 100644
index caed13c..0000000
--- a/playbooks/roles/run-cover/tasks/main.yaml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Execute run-cover.sh.
- shell: "/usr/local/jenkins/slave_scripts/run-cover.sh {{ run_cover_envlist }}"
- args:
- chdir: "{{ zuul_workspace_root }}/src/{{ zuul.project }}"
diff --git a/playbooks/roles/run-docs/defaults/main.yaml b/playbooks/roles/run-docs/defaults/main.yaml
deleted file mode 100644
index 5855a3d..0000000
--- a/playbooks/roles/run-docs/defaults/main.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-run_docs_envlist: venv
diff --git a/playbooks/roles/run-docs/tasks/main.yaml b/playbooks/roles/run-docs/tasks/main.yaml
deleted file mode 100644
index 2250593..0000000
--- a/playbooks/roles/run-docs/tasks/main.yaml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Execute run-docs.sh.
- shell: "/usr/local/jenkins/slave_scripts/run-docs.sh {{ run_docs_envlist }}"
- args:
- chdir: "{{ zuul_workspace_root }}/src/{{ zuul.project }}"
diff --git a/playbooks/roles/run-tarball/defaults/main.yaml b/playbooks/roles/run-tarball/defaults/main.yaml
deleted file mode 100644
index 072828a..0000000
--- a/playbooks/roles/run-tarball/defaults/main.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-run_tarball_envlist: venv
diff --git a/playbooks/roles/run-tarball/tasks/main.yaml b/playbooks/roles/run-tarball/tasks/main.yaml
deleted file mode 100644
index e21c4c8..0000000
--- a/playbooks/roles/run-tarball/tasks/main.yaml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Execute run-tarball.sh.
- shell: "/usr/local/jenkins/slave_scripts/run-tarball.sh {{ run_tarball_envlist }}"
- args:
- chdir: "{{ zuul_workspace_root }}/src/{{ zuul.project }}"
diff --git a/playbooks/roles/run-tox/defaults/main.yaml b/playbooks/roles/run-tox/defaults/main.yaml
deleted file mode 100644
index 9cb1477..0000000
--- a/playbooks/roles/run-tox/defaults/main.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-run_tox_envlist:
diff --git a/playbooks/roles/run-tox/tasks/main.yaml b/playbooks/roles/run-tox/tasks/main.yaml
deleted file mode 100644
index 29a4cc4..0000000
--- a/playbooks/roles/run-tox/tasks/main.yaml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Run tox
- shell: "/usr/local/jenkins/slave_scripts/run-tox.sh {{ run_tox_envlist }}"
- args:
- chdir: "{{ zuul_workspace_root }}/src/{{ zuul.project }}"
diff --git a/playbooks/roles/run-wheel/defaults/main.yaml b/playbooks/roles/run-wheel/defaults/main.yaml
deleted file mode 100644
index 8645d33..0000000
--- a/playbooks/roles/run-wheel/defaults/main.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-run_wheel_envlist: venv
diff --git a/playbooks/roles/run-wheel/tasks/main.yaml b/playbooks/roles/run-wheel/tasks/main.yaml
deleted file mode 100644
index f5aaf54..0000000
--- a/playbooks/roles/run-wheel/tasks/main.yaml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Execute run-wheel.sh.
- shell: "/usr/local/jenkins/slave_scripts/run-wheel.sh {{ run_wheel_envlist }}"
- args:
- chdir: "{{ zuul_workspace_root }}/src/{{ zuul.project }}"
diff --git a/playbooks/tox/cover.yaml b/playbooks/tox/cover.yaml
deleted file mode 100644
index 642eb4e..0000000
--- a/playbooks/tox/cover.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
-- hosts: all
- roles:
- - extra-test-setup
- - revoke-sudo
- - run-cover
diff --git a/playbooks/tox/docs.yaml b/playbooks/tox/docs.yaml
deleted file mode 100644
index 028e1c5..0000000
--- a/playbooks/tox/docs.yaml
+++ /dev/null
@@ -1,4 +0,0 @@
-- hosts: all
- roles:
- - revoke-sudo
- - run-docs
diff --git a/playbooks/tox/linters.yaml b/playbooks/tox/linters.yaml
deleted file mode 100644
index d1e7f13..0000000
--- a/playbooks/tox/linters.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-- hosts: all
- vars:
- run_tox_envlist: pep8
- roles:
- - revoke-sudo
- - run-tox
diff --git a/playbooks/tox/post.yaml b/playbooks/tox/post.yaml
deleted file mode 100644
index 3b035f8..0000000
--- a/playbooks/tox/post.yaml
+++ /dev/null
@@ -1,17 +0,0 @@
-- hosts: all
- tasks:
- - name: Find tox directories to synchrionize.
- find:
- file_type: directory
- paths: "{{ zuul_workspace_root }}/src/{{ zuul.project }}/.tox"
- # NOTE(pabelanger): The .tox/log folder is empty, ignore it.
- patterns: ^(?!log).*$
- use_regex: yes
- register: result
-
- - name: Collect tox logs.
- synchronize:
- dest: "{{ zuul.executor.log_root }}/tox"
- mode: pull
- src: "{{ item.path }}/log/"
- with_items: "{{ result.files }}"
diff --git a/playbooks/tox/pre.yaml b/playbooks/tox/pre.yaml
deleted file mode 100644
index 0bf9b3c..0000000
--- a/playbooks/tox/pre.yaml
+++ /dev/null
@@ -1,3 +0,0 @@
-- hosts: all
- roles:
- - run-bindep
diff --git a/playbooks/tox/py27.yaml b/playbooks/tox/py27.yaml
deleted file mode 100644
index fd45f27..0000000
--- a/playbooks/tox/py27.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
-- hosts: all
- vars:
- run_tox_envlist: py27
- roles:
- - extra-test-setup
- - revoke-sudo
- - run-tox
diff --git a/playbooks/tox/roles b/playbooks/tox/roles
deleted file mode 120000
index 7b9ade8..0000000
--- a/playbooks/tox/roles
+++ /dev/null
@@ -1 +0,0 @@
-../roles/
\ No newline at end of file
diff --git a/playbooks/tox/tarball-post.yaml b/playbooks/tox/tarball-post.yaml
deleted file mode 100644
index fb41707..0000000
--- a/playbooks/tox/tarball-post.yaml
+++ /dev/null
@@ -1,10 +0,0 @@
-- hosts: all
- tasks:
- - name: Collect tarball artifacts.
- synchronize:
- dest: "{{ zuul.executor.src_root }}/tarballs"
- mode: pull
- src: "{{ zuul_workspace_root }}/src/{{ zuul.project }}/dist/{{ item }}"
- with_items:
- - "*.tar.gz"
- - "*.whl"
diff --git a/playbooks/tox/tarball.yaml b/playbooks/tox/tarball.yaml
deleted file mode 100644
index 4d5a8f6..0000000
--- a/playbooks/tox/tarball.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
-- hosts: all
- roles:
- - revoke-sudo
- - run-tarball
- - run-wheel
diff --git a/requirements.txt b/requirements.txt
index 974b77f..746bbcb 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,5 +1,8 @@
pbr>=1.1.0
+# pull from master until https://github.com/sigmavirus24/github3.py/pull/671
+# is in a release
+-e git+https://github.com/sigmavirus24/github3.py.git@develop#egg=Github3.py
PyYAML>=3.1.0
Paste
WebOb>=1.2.3
@@ -10,7 +13,7 @@
extras
statsd>=1.0.0,<3.0
voluptuous>=0.10.2
-gear>=0.5.7,<1.0.0
+gear>=0.9.0,<1.0.0
apscheduler>=3.0
PrettyTable>=0.6,<0.8
babel>=1.0
@@ -20,3 +23,6 @@
sqlalchemy
alembic
cryptography>=1.6
+cachecontrol
+pyjwt
+iso8601
diff --git a/setup.cfg b/setup.cfg
index 9ee64f3..5ae0903 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -26,6 +26,7 @@
zuul = zuul.cmd.client:main
zuul-cloner = zuul.cmd.cloner:main
zuul-executor = zuul.cmd.executor:main
+ zuul-bwrap = zuul.driver.bubblewrap:main
[build_sphinx]
source-dir = doc/source
diff --git a/test-requirements.txt b/test-requirements.txt
index 6262a02..baf6cad 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,7 +1,9 @@
-hacking>=0.12.0,!=0.13.0,<0.14 # Apache-2.0
+pep8
+pyflakes
+flake8
coverage>=3.6
-sphinx>=1.5.1
+sphinx>=1.5.1,<1.6
sphinxcontrib-blockdiag>=1.1.0
fixtures>=0.3.14
python-keystoneclient>=0.4.2
diff --git a/tests/base.py b/tests/base.py
index a0a986a..48716e2 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -16,6 +16,7 @@
# under the License.
from six.moves import configparser as ConfigParser
+import datetime
import gc
import hashlib
import json
@@ -28,13 +29,17 @@
import select
import shutil
from six.moves import reload_module
-from six import StringIO
+try:
+ from cStringIO import StringIO
+except Exception:
+ from six import StringIO
import socket
import string
import subprocess
import sys
import tempfile
import threading
+import traceback
import time
import uuid
@@ -54,6 +59,7 @@
import zuul.driver.gerrit.gerritsource as gerritsource
import zuul.driver.gerrit.gerritconnection as gerritconnection
+import zuul.driver.github.githubconnection as githubconnection
import zuul.scheduler
import zuul.webapp
import zuul.rpclistener
@@ -65,6 +71,7 @@
import zuul.merger.server
import zuul.nodepool
import zuul.zk
+from zuul.exceptions import MergeFailure
FIXTURE_DIR = os.path.join(os.path.dirname(__file__),
'fixtures')
@@ -84,7 +91,7 @@
def random_sha1():
- return hashlib.sha1(str(random.random())).hexdigest()
+ return hashlib.sha1(str(random.random()).encode('ascii')).hexdigest()
def iterate_timeout(max_seconds, purpose):
@@ -97,12 +104,37 @@
raise Exception("Timeout waiting for %s" % purpose)
-class ChangeReference(git.Reference):
+def simple_layout(path, driver='gerrit'):
+ """Specify a layout file for use by a test method.
+
+ :arg str path: The path to the layout file.
+ :arg str driver: The source driver to use, defaults to gerrit.
+
+ Some tests require only a very simple configuration. For those,
+ establishing a complete config directory hierachy is too much
+ work. In those cases, you can add a simple zuul.yaml file to the
+ test fixtures directory (in fixtures/layouts/foo.yaml) and use
+ this decorator to indicate the test method should use that rather
+ than the tenant config file specified by the test class.
+
+ The decorator will cause that layout file to be added to a
+ config-project called "common-config" and each "project" instance
+ referenced in the layout file will have a git repo automatically
+ initialized.
+ """
+
+ def decorator(test):
+ test.__simple_layout__ = (path, driver)
+ return test
+ return decorator
+
+
+class GerritChangeReference(git.Reference):
_common_path_default = "refs/changes"
_points_to_commits_only = True
-class FakeChange(object):
+class FakeGerritChange(object):
categories = {'approved': ('Approved', -1, 1),
'code-review': ('Code-Review', -2, 2),
'verified': ('Verified', -2, 2)}
@@ -110,6 +142,7 @@
def __init__(self, gerrit, number, project, branch, subject,
status='NEW', upstream_root=None, files={}):
self.gerrit = gerrit
+ self.source = gerrit
self.reported = 0
self.queried = 0
self.patchsets = []
@@ -149,9 +182,9 @@
def addFakeChangeToRepo(self, msg, files, large):
path = os.path.join(self.upstream_root, self.project)
repo = git.Repo(path)
- ref = ChangeReference.create(repo, '1/%s/%s' % (self.number,
- self.latest_patchset),
- 'refs/tags/init')
+ ref = GerritChangeReference.create(
+ repo, '1/%s/%s' % (self.number, self.latest_patchset),
+ 'refs/tags/init')
repo.head.reference = ref
zuul.merger.merger.reset_repo_to_head(repo)
repo.git.clean('-x', '-f', '-d')
@@ -440,9 +473,9 @@
files=None):
"""Add a change to the fake Gerrit."""
self.change_number += 1
- c = FakeChange(self, self.change_number, project, branch, subject,
- upstream_root=self.upstream_root,
- status=status, files=files)
+ c = FakeGerritChange(self, self.change_number, project, branch,
+ subject, upstream_root=self.upstream_root,
+ status=status, files=files)
self.changes[self.change_number] = c
return c
@@ -464,11 +497,6 @@
if cat != 'submit':
change.addApproval(cat, action[cat], username=self.user)
- # TODOv3(jeblair): can this be removed?
- if 'label' in action:
- parts = action['label'].split('=')
- change.addApproval(parts[0], parts[2], username=self.user)
-
change.messages.append(message)
if 'submit' in action:
@@ -507,6 +535,508 @@
return os.path.join(self.upstream_root, project.name)
+class GithubChangeReference(git.Reference):
+ _common_path_default = "refs/pull"
+ _points_to_commits_only = True
+
+
+class FakeGithubPullRequest(object):
+
+ def __init__(self, github, number, project, branch,
+ subject, upstream_root, files=[], number_of_commits=1,
+ writers=[]):
+ """Creates a new PR with several commits.
+ Sends an event about opened PR."""
+ self.github = github
+ self.source = github
+ self.number = number
+ self.project = project
+ self.branch = branch
+ self.subject = subject
+ self.number_of_commits = 0
+ self.upstream_root = upstream_root
+ self.files = []
+ self.comments = []
+ self.labels = []
+ self.statuses = {}
+ self.reviews = []
+ self.writers = []
+ self.updated_at = None
+ self.head_sha = None
+ self.is_merged = False
+ self.merge_message = None
+ self.state = 'open'
+ self._createPRRef()
+ self._addCommitToRepo(files=files)
+ self._updateTimeStamp()
+
+ def addCommit(self, files=[]):
+ """Adds a commit on top of the actual PR head."""
+ self._addCommitToRepo(files=files)
+ self._updateTimeStamp()
+
+ def forcePush(self, files=[]):
+ """Clears actual commits and add a commit on top of the base."""
+ self._addCommitToRepo(files=files, reset=True)
+ self._updateTimeStamp()
+
+ def getPullRequestOpenedEvent(self):
+ return self._getPullRequestEvent('opened')
+
+ def getPullRequestSynchronizeEvent(self):
+ return self._getPullRequestEvent('synchronize')
+
+ def getPullRequestReopenedEvent(self):
+ return self._getPullRequestEvent('reopened')
+
+ def getPullRequestClosedEvent(self):
+ return self._getPullRequestEvent('closed')
+
+ def getPushEvent(self, old_sha, ref='refs/heads/master'):
+ name = 'push'
+ data = {
+ 'ref': ref,
+ 'before': old_sha,
+ 'after': self.head_sha,
+ 'repository': {
+ 'full_name': self.project
+ },
+ 'sender': {
+ 'login': 'ghuser'
+ }
+ }
+ return (name, data)
+
+ def addComment(self, message):
+ self.comments.append(message)
+ self._updateTimeStamp()
+
+ def getCommentAddedEvent(self, text):
+ name = 'issue_comment'
+ data = {
+ 'action': 'created',
+ 'issue': {
+ 'number': self.number
+ },
+ 'comment': {
+ 'body': text
+ },
+ 'repository': {
+ 'full_name': self.project
+ },
+ 'sender': {
+ 'login': 'ghuser'
+ }
+ }
+ return (name, data)
+
+ def getReviewAddedEvent(self, review):
+ name = 'pull_request_review'
+ data = {
+ 'action': 'submitted',
+ 'pull_request': {
+ 'number': self.number,
+ 'title': self.subject,
+ 'updated_at': self.updated_at,
+ 'base': {
+ 'ref': self.branch,
+ 'repo': {
+ 'full_name': self.project
+ }
+ },
+ 'head': {
+ 'sha': self.head_sha
+ }
+ },
+ 'review': {
+ 'state': review
+ },
+ 'repository': {
+ 'full_name': self.project
+ },
+ 'sender': {
+ 'login': 'ghuser'
+ }
+ }
+ return (name, data)
+
+ def addLabel(self, name):
+ if name not in self.labels:
+ self.labels.append(name)
+ self._updateTimeStamp()
+ return self._getLabelEvent(name)
+
+ def removeLabel(self, name):
+ if name in self.labels:
+ self.labels.remove(name)
+ self._updateTimeStamp()
+ return self._getUnlabelEvent(name)
+
+ def _getLabelEvent(self, label):
+ name = 'pull_request'
+ data = {
+ 'action': 'labeled',
+ 'pull_request': {
+ 'number': self.number,
+ 'updated_at': self.updated_at,
+ 'base': {
+ 'ref': self.branch,
+ 'repo': {
+ 'full_name': self.project
+ }
+ },
+ 'head': {
+ 'sha': self.head_sha
+ }
+ },
+ 'label': {
+ 'name': label
+ },
+ 'sender': {
+ 'login': 'ghuser'
+ }
+ }
+ return (name, data)
+
+ def _getUnlabelEvent(self, label):
+ name = 'pull_request'
+ data = {
+ 'action': 'unlabeled',
+ 'pull_request': {
+ 'number': self.number,
+ 'title': self.subject,
+ 'updated_at': self.updated_at,
+ 'base': {
+ 'ref': self.branch,
+ 'repo': {
+ 'full_name': self.project
+ }
+ },
+ 'head': {
+ 'sha': self.head_sha,
+ 'repo': {
+ 'full_name': self.project
+ }
+ }
+ },
+ 'label': {
+ 'name': label
+ },
+ 'sender': {
+ 'login': 'ghuser'
+ }
+ }
+ return (name, data)
+
+ def _getRepo(self):
+ repo_path = os.path.join(self.upstream_root, self.project)
+ return git.Repo(repo_path)
+
+ def _createPRRef(self):
+ repo = self._getRepo()
+ GithubChangeReference.create(
+ repo, self._getPRReference(), 'refs/tags/init')
+
+ def _addCommitToRepo(self, files=[], reset=False):
+ repo = self._getRepo()
+ ref = repo.references[self._getPRReference()]
+ if reset:
+ self.number_of_commits = 0
+ ref.set_object('refs/tags/init')
+ self.number_of_commits += 1
+ repo.head.reference = ref
+ zuul.merger.merger.reset_repo_to_head(repo)
+ repo.git.clean('-x', '-f', '-d')
+
+ if files:
+ fn = files[0]
+ self.files = files
+ else:
+ fn = '%s-%s' % (self.branch.replace('/', '_'), self.number)
+ self.files = [fn]
+ msg = self.subject + '-' + str(self.number_of_commits)
+ fn = os.path.join(repo.working_dir, fn)
+ f = open(fn, 'w')
+ with open(fn, 'w') as f:
+ f.write("test %s %s\n" %
+ (self.branch, self.number))
+ repo.index.add([fn])
+
+ self.head_sha = repo.index.commit(msg).hexsha
+ # Create an empty set of statuses for the given sha,
+ # each sha on a PR may have a status set on it
+ self.statuses[self.head_sha] = []
+ repo.head.reference = 'master'
+ zuul.merger.merger.reset_repo_to_head(repo)
+ repo.git.clean('-x', '-f', '-d')
+ repo.heads['master'].checkout()
+
+ def _updateTimeStamp(self):
+ self.updated_at = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.localtime())
+
+ def getPRHeadSha(self):
+ repo = self._getRepo()
+ return repo.references[self._getPRReference()].commit.hexsha
+
+ def setStatus(self, sha, state, url, description, context, user='zuul'):
+ # Since we're bypassing github API, which would require a user, we
+ # hard set the user as 'zuul' here.
+ # insert the status at the top of the list, to simulate that it
+ # is the most recent set status
+ self.statuses[sha].insert(0, ({
+ 'state': state,
+ 'url': url,
+ 'description': description,
+ 'context': context,
+ 'creator': {
+ 'login': user
+ }
+ }))
+
+ def addReview(self, user, state, granted_on=None):
+ gh_time_format = '%Y-%m-%dT%H:%M:%SZ'
+ # convert the timestamp to a str format that would be returned
+ # from github as 'submitted_at' in the API response
+
+ if granted_on:
+ granted_on = datetime.datetime.utcfromtimestamp(granted_on)
+ submitted_at = time.strftime(
+ gh_time_format, granted_on.timetuple())
+ else:
+ # github timestamps only down to the second, so we need to make
+ # sure reviews that tests add appear to be added over a period of
+ # time in the past and not all at once.
+ if not self.reviews:
+ # the first review happens 10 mins ago
+ offset = 600
+ else:
+ # subsequent reviews happen 1 minute closer to now
+ offset = 600 - (len(self.reviews) * 60)
+
+ granted_on = datetime.datetime.utcfromtimestamp(
+ time.time() - offset)
+ submitted_at = time.strftime(
+ gh_time_format, granted_on.timetuple())
+
+ self.reviews.append({
+ 'state': state,
+ 'user': {
+ 'login': user,
+ 'email': user + "@derp.com",
+ },
+ 'submitted_at': submitted_at,
+ })
+
+ def _getPRReference(self):
+ return '%s/head' % self.number
+
+ def _getPullRequestEvent(self, action):
+ name = 'pull_request'
+ data = {
+ 'action': action,
+ 'number': self.number,
+ 'pull_request': {
+ 'number': self.number,
+ 'title': self.subject,
+ 'updated_at': self.updated_at,
+ 'base': {
+ 'ref': self.branch,
+ 'repo': {
+ 'full_name': self.project
+ }
+ },
+ 'head': {
+ 'sha': self.head_sha,
+ 'repo': {
+ 'full_name': self.project
+ }
+ }
+ },
+ 'sender': {
+ 'login': 'ghuser'
+ }
+ }
+ return (name, data)
+
+ def getCommitStatusEvent(self, context, state='success', user='zuul'):
+ name = 'status'
+ data = {
+ 'state': state,
+ 'sha': self.head_sha,
+ 'description': 'Test results for %s: %s' % (self.head_sha, state),
+ 'target_url': 'http://zuul/%s' % self.head_sha,
+ 'branches': [],
+ 'context': context,
+ 'sender': {
+ 'login': user
+ }
+ }
+ return (name, data)
+
+
+class FakeGithubConnection(githubconnection.GithubConnection):
+ log = logging.getLogger("zuul.test.FakeGithubConnection")
+
+ def __init__(self, driver, connection_name, connection_config,
+ upstream_root=None):
+ super(FakeGithubConnection, self).__init__(driver, connection_name,
+ connection_config)
+ self.connection_name = connection_name
+ self.pr_number = 0
+ self.pull_requests = []
+ self.upstream_root = upstream_root
+ self.merge_failure = False
+ self.merge_not_allowed_count = 0
+
+ def openFakePullRequest(self, project, branch, subject, files=[]):
+ self.pr_number += 1
+ pull_request = FakeGithubPullRequest(
+ self, self.pr_number, project, branch, subject, self.upstream_root,
+ files=files)
+ self.pull_requests.append(pull_request)
+ return pull_request
+
+ def getPushEvent(self, project, ref, old_rev=None, new_rev=None):
+ if not old_rev:
+ old_rev = '00000000000000000000000000000000'
+ if not new_rev:
+ new_rev = random_sha1()
+ name = 'push'
+ data = {
+ 'ref': ref,
+ 'before': old_rev,
+ 'after': new_rev,
+ 'repository': {
+ 'full_name': project
+ }
+ }
+ return (name, data)
+
+ def emitEvent(self, event):
+ """Emulates sending the GitHub webhook event to the connection."""
+ port = self.webapp.server.socket.getsockname()[1]
+ name, data = event
+ payload = json.dumps(data).encode('utf8')
+ headers = {'X-Github-Event': name}
+ req = urllib.request.Request(
+ 'http://localhost:%s/connection/%s/payload'
+ % (port, self.connection_name),
+ data=payload, headers=headers)
+ urllib.request.urlopen(req)
+
+ def getPull(self, project, number):
+ pr = self.pull_requests[number - 1]
+ data = {
+ 'number': number,
+ 'title': pr.subject,
+ 'updated_at': pr.updated_at,
+ 'base': {
+ 'repo': {
+ 'full_name': pr.project
+ },
+ 'ref': pr.branch,
+ },
+ 'mergeable': True,
+ 'state': pr.state,
+ 'head': {
+ 'sha': pr.head_sha,
+ 'repo': {
+ 'full_name': pr.project
+ }
+ }
+ }
+ return data
+
+ def getPullBySha(self, sha):
+ prs = list(set([p for p in self.pull_requests if sha == p.head_sha]))
+ if len(prs) > 1:
+ raise Exception('Multiple pulls found with head sha: %s' % sha)
+ pr = prs[0]
+ return self.getPull(pr.project, pr.number)
+
+ def getPullFileNames(self, project, number):
+ pr = self.pull_requests[number - 1]
+ return pr.files
+
+ def _getPullReviews(self, owner, project, number):
+ pr = self.pull_requests[number - 1]
+ return pr.reviews
+
+ def getUser(self, login):
+ data = {
+ 'username': login,
+ 'name': 'Github User',
+ 'email': 'github.user@example.com'
+ }
+ return data
+
+ def getRepoPermission(self, project, login):
+ owner, proj = project.split('/')
+ for pr in self.pull_requests:
+ pr_owner, pr_project = pr.project.split('/')
+ if (pr_owner == owner and proj == pr_project):
+ if login in pr.writers:
+ return 'write'
+ else:
+ return 'read'
+
+ def getGitUrl(self, project):
+ return os.path.join(self.upstream_root, str(project))
+
+ def real_getGitUrl(self, project):
+ return super(FakeGithubConnection, self).getGitUrl(project)
+
+ def getProjectBranches(self, project):
+ """Masks getProjectBranches since we don't have a real github"""
+
+ # just returns master for now
+ return ['master']
+
+ def commentPull(self, project, pr_number, message):
+ pull_request = self.pull_requests[pr_number - 1]
+ pull_request.addComment(message)
+
+ def mergePull(self, project, pr_number, commit_message='', sha=None):
+ pull_request = self.pull_requests[pr_number - 1]
+ if self.merge_failure:
+ raise Exception('Pull request was not merged')
+ if self.merge_not_allowed_count > 0:
+ self.merge_not_allowed_count -= 1
+ raise MergeFailure('Merge was not successful due to mergeability'
+ ' conflict')
+ pull_request.is_merged = True
+ pull_request.merge_message = commit_message
+
+ def getCommitStatuses(self, project, sha):
+ owner, proj = project.split('/')
+ for pr in self.pull_requests:
+ pr_owner, pr_project = pr.project.split('/')
+ # This is somewhat risky, if the same commit exists in multiple
+ # PRs, we might grab the wrong one that doesn't have a status
+ # that is expected to be there. Maybe re-work this so that there
+ # is a global registry of commit statuses like with github.
+ if (pr_owner == owner and pr_project == proj and
+ sha in pr.statuses):
+ return pr.statuses[sha]
+
+ def setCommitStatus(self, project, sha, state,
+ url='', description='', context=''):
+ owner, proj = project.split('/')
+ for pr in self.pull_requests:
+ pr_owner, pr_project = pr.project.split('/')
+ if (pr_owner == owner and pr_project == proj and
+ pr.head_sha == sha):
+ pr.setStatus(sha, state, url, description, context)
+
+ def labelPull(self, project, pr_number, label):
+ pull_request = self.pull_requests[pr_number - 1]
+ pull_request.addLabel(label)
+
+ def unlabelPull(self, project, pr_number, label):
+ pull_request = self.pull_requests[pr_number - 1]
+ pull_request.removeLabel(label)
+
+
class BuildHistory(object):
def __init__(self, **kw):
self.__dict__.update(kw)
@@ -564,7 +1094,7 @@
return
def stop(self):
- os.write(self.wake_write, '1\n')
+ os.write(self.wake_write, b'1\n')
class FakeBuild(object):
@@ -672,7 +1202,8 @@
"""
for change in changes:
- path = os.path.join(self.jobdir.src_root, change.project)
+ hostname = change.source.canonical_hostname
+ path = os.path.join(self.jobdir.src_root, hostname, change.project)
try:
repo = git.Repo(path)
except NoSuchPathError as e:
@@ -690,6 +1221,25 @@
self.log.debug(" OK")
return True
+ def getWorkspaceRepos(self, projects):
+ """Return workspace git repo objects for the listed projects
+
+ :arg list projects: A list of strings, each the canonical name
+ of a project.
+
+ :returns: A dictionary of {name: repo} for every listed
+ project.
+ :rtype: dict
+
+ """
+
+ repos = {}
+ for project in projects:
+ path = os.path.join(self.jobdir.src_root, project)
+ repo = git.Repo(path)
+ repos[project] = repo
+ return repos
+
class RecordingExecutorServer(zuul.executor.server.ExecutorServer):
"""An Ansible executor to be used in tests.
@@ -769,11 +1319,17 @@
build.release()
super(RecordingExecutorServer, self).stopJob(job)
+ def stop(self):
+ for build in self.running_builds:
+ build.release()
+ super(RecordingExecutorServer, self).stop()
+
class RecordingAnsibleJob(zuul.executor.server.AnsibleJob):
- def doMergeChanges(self, items):
+ def doMergeChanges(self, merger, items, repo_state):
# Get a merger in order to update the repos involved in this job.
- commit = super(RecordingAnsibleJob, self).doMergeChanges(items)
+ commit = super(RecordingAnsibleJob, self).doMergeChanges(
+ merger, items, repo_state)
if not commit: # merge conflict
self.recordResult('MERGER_FAILURE')
return commit
@@ -842,7 +1398,7 @@
for queue in [self.high_queue, self.normal_queue, self.low_queue]:
for job in queue:
if not hasattr(job, 'waiting'):
- if job.name.startswith('executor:execute'):
+ if job.name.startswith(b'executor:execute'):
job.waiting = self.hold_jobs_in_queue
else:
job.waiting = False
@@ -869,9 +1425,9 @@
len(self.low_queue))
self.log.debug("releasing queued job %s (%s)" % (regex, qlen))
for job in self.getQueue():
- if job.name != 'executor:execute':
+ if job.name != b'executor:execute':
continue
- parameters = json.loads(job.arguments)
+ parameters = json.loads(job.arguments.decode('utf8'))
if not regex or re.match(regex, parameters.get('job')):
self.log.debug("releasing queued job %s" %
job.unique)
@@ -941,7 +1497,10 @@
def run(self):
while self._running:
- self._run()
+ try:
+ self._run()
+ except Exception:
+ self.log.exception("Error in fake nodepool:")
time.sleep(0.1)
def _run(self):
@@ -960,7 +1519,7 @@
path = self.REQUEST_ROOT + '/' + oid
try:
data, stat = self.client.get(path)
- data = json.loads(data)
+ data = json.loads(data.decode('utf8'))
data['_oid'] = oid
reqs.append(data)
except kazoo.exceptions.NoNodeError:
@@ -976,7 +1535,7 @@
for oid in sorted(nodeids):
path = self.NODE_ROOT + '/' + oid
data, stat = self.client.get(path)
- data = json.loads(data)
+ data = json.loads(data.decode('utf8'))
data['_oid'] = oid
try:
lockfiles = self.client.get_children(path + '/lock')
@@ -1008,7 +1567,7 @@
image_id=None,
host_keys=["fake-key1", "fake-key2"],
executor='fake-nodepool')
- data = json.dumps(data)
+ data = json.dumps(data).encode('utf8')
path = self.client.create(path, data,
makepath=True,
sequence=True)
@@ -1037,9 +1596,12 @@
request['state_time'] = time.time()
path = self.REQUEST_ROOT + '/' + oid
- data = json.dumps(request)
+ data = json.dumps(request).encode('utf8')
self.log.debug("Fulfilling node request: %s %s" % (oid, data))
- self.client.set(path, data)
+ try:
+ self.client.set(path, data)
+ except kazoo.exceptions.NoNodeError:
+ self.log.debug("Node request %s %s disappeared" % (oid, data))
class ChrootedKazooFixture(fixtures.Fixture):
@@ -1135,7 +1697,7 @@
class BaseTestCase(testtools.TestCase):
log = logging.getLogger("zuul.test")
- wait_timeout = 20
+ wait_timeout = 30
def attachLogs(self, *args):
def reader():
@@ -1177,6 +1739,18 @@
else:
self._log_stream = sys.stdout
+ # NOTE(jeblair): this is temporary extra debugging to try to
+ # track down a possible leak.
+ orig_git_repo_init = git.Repo.__init__
+
+ def git_repo_init(myself, *args, **kw):
+ orig_git_repo_init(myself, *args, **kw)
+ self.log.debug("Created git repo 0x%x %s" %
+ (id(myself), repr(myself)))
+
+ self.useFixture(fixtures.MonkeyPatch('git.Repo.__init__',
+ git_repo_init))
+
handler = logging.StreamHandler(self._log_stream)
formatter = logging.Formatter('%(asctime)s %(name)-32s '
'%(levelname)-8s %(message)s')
@@ -1186,13 +1760,19 @@
logger.setLevel(logging.DEBUG)
logger.addHandler(handler)
+ # Make sure we don't carry old handlers around in process state
+ # which slows down test runs
+ self.addCleanup(logger.removeHandler, handler)
+ self.addCleanup(handler.close)
+ self.addCleanup(handler.flush)
+
# NOTE(notmorgan): Extract logging overrides for specific
# libraries from the OS_LOG_DEFAULTS env and create loggers
# for each. This is used to limit the output during test runs
# from libraries that zuul depends on such as gear.
log_defaults_from_env = os.environ.get(
'OS_LOG_DEFAULTS',
- 'git.cmd=INFO,kazoo.client=WARNING,gear=INFO')
+ 'git.cmd=INFO,kazoo.client=WARNING,gear=INFO,paste=INFO')
if log_defaults_from_env:
for default in log_defaults_from_env.split(','):
@@ -1226,7 +1806,8 @@
be loaded). It defaults to the value specified in
`config_file` but can be overidden by subclasses to obtain a
different tenant/project layout while using the standard main
- configuration.
+ configuration. See also the :py:func:`simple_layout`
+ decorator.
:cvar bool create_project_keys: Indicates whether Zuul should
auto-generate keys for each project, or whether the test
@@ -1297,29 +1878,20 @@
# Make per test copy of Configuration.
self.setup_config()
+ self.private_key_file = os.path.join(self.test_root, 'test_id_rsa')
+ if not os.path.exists(self.private_key_file):
+ src_private_key_file = os.path.join(FIXTURE_DIR, 'test_id_rsa')
+ shutil.copy(src_private_key_file, self.private_key_file)
+ shutil.copy('{}.pub'.format(src_private_key_file),
+ '{}.pub'.format(self.private_key_file))
+ os.chmod(self.private_key_file, 0o0600)
self.config.set('zuul', 'tenant_config',
os.path.join(FIXTURE_DIR,
self.config.get('zuul', 'tenant_config')))
self.config.set('merger', 'git_dir', self.merger_src_root)
self.config.set('executor', 'git_dir', self.executor_src_root)
self.config.set('zuul', 'state_dir', self.state_root)
-
- # For each project in config:
- # TODOv3(jeblair): remove these and replace with new git
- # filesystem fixtures
- self.init_repo("org/project3")
- self.init_repo("org/project4")
- self.init_repo("org/project5")
- self.init_repo("org/project6")
- self.init_repo("org/one-job-project")
- self.init_repo("org/nonvoting-project")
- self.init_repo("org/templated-project")
- self.init_repo("org/layered-project")
- self.init_repo("org/node-project")
- self.init_repo("org/conflict-project")
- self.init_repo("org/noop-project")
- self.init_repo("org/experimental-project")
- self.init_repo("org/no-jobs-project")
+ self.config.set('executor', 'private_key_file', self.private_key_file)
self.statsd = FakeStatsd()
# note, use 127.0.0.1 rather than localhost to avoid getting ipv6
@@ -1343,6 +1915,9 @@
self.sched = zuul.scheduler.Scheduler(self.config)
+ self.webapp = zuul.webapp.WebApp(
+ self.sched, port=0, listen_address='127.0.0.1')
+
self.event_queues = [
self.sched.result_event_queue,
self.sched.trigger_event_queue,
@@ -1350,7 +1925,7 @@
]
self.configure_connections()
- self.sched.registerConnections(self.connections)
+ self.sched.registerConnections(self.connections, self.webapp)
def URLOpenerFactory(*args, **kw):
if isinstance(args[0], urllib.request.Request):
@@ -1360,8 +1935,6 @@
old_urlopen = urllib.request.urlopen
urllib.request.urlopen = URLOpenerFactory
- self._startMerger()
-
self.executor_server = RecordingExecutorServer(
self.config, self.connections,
jobdir_root=self.test_root,
@@ -1390,24 +1963,21 @@
self.sched.setNodepool(self.nodepool)
self.sched.setZooKeeper(self.zk)
- self.webapp = zuul.webapp.WebApp(
- self.sched, port=0, listen_address='127.0.0.1')
self.rpc = zuul.rpclistener.RPCListener(self.config, self.sched)
self.sched.start()
self.webapp.start()
self.rpc.start()
self.executor_client.gearman.waitForServer()
+ # Cleanups are run in reverse order
+ self.addCleanup(self.assertCleanShutdown)
self.addCleanup(self.shutdown)
+ self.addCleanup(self.assertFinalState)
self.sched.reconfigure(self.config)
self.sched.resume()
- def tearDown(self):
- super(ZuulTestCase, self).tearDown()
- self.assertFinalState()
-
- def configure_connections(self):
+ def configure_connections(self, source_only=False):
# Set up gerrit related fakes
# Set a changes database so multiple FakeGerrit's can report back to
# a virtual canonical database given by the configured hostname
@@ -1426,6 +1996,16 @@
'zuul.driver.gerrit.GerritDriver.getConnection',
getGerritConnection))
+ def getGithubConnection(driver, name, config):
+ con = FakeGithubConnection(driver, name, config,
+ upstream_root=self.upstream_root)
+ setattr(self, 'fake_' + name, con)
+ return con
+
+ self.useFixture(fixtures.MonkeyPatch(
+ 'zuul.driver.github.GithubDriver.getConnection',
+ getGithubConnection))
+
# Set up smtp related fakes
# TODO(jhesketh): This should come from lib.connections for better
# coverage
@@ -1440,7 +2020,7 @@
# Register connections from the config using fakes
self.connections = zuul.lib.connections.ConnectionRegistry()
- self.connections.configure(self.config)
+ self.connections.configure(self.config, source_only=source_only)
def setup_config(self):
# This creates the per-test configuration object. It can be
@@ -1448,19 +2028,74 @@
# obeys the config_file and tenant_config_file attributes.
self.config = ConfigParser.ConfigParser()
self.config.read(os.path.join(FIXTURE_DIR, self.config_file))
- if hasattr(self, 'tenant_config_file'):
- self.config.set('zuul', 'tenant_config', self.tenant_config_file)
- git_path = os.path.join(
- os.path.dirname(
- os.path.join(FIXTURE_DIR, self.tenant_config_file)),
- 'git')
- if os.path.exists(git_path):
- for reponame in os.listdir(git_path):
- project = reponame.replace('_', '/')
- self.copyDirToRepo(project,
- os.path.join(git_path, reponame))
+
+ if not self.setupSimpleLayout():
+ if hasattr(self, 'tenant_config_file'):
+ self.config.set('zuul', 'tenant_config',
+ self.tenant_config_file)
+ git_path = os.path.join(
+ os.path.dirname(
+ os.path.join(FIXTURE_DIR, self.tenant_config_file)),
+ 'git')
+ if os.path.exists(git_path):
+ for reponame in os.listdir(git_path):
+ project = reponame.replace('_', '/')
+ self.copyDirToRepo(project,
+ os.path.join(git_path, reponame))
self.setupAllProjectKeys()
+ def setupSimpleLayout(self):
+ # If the test method has been decorated with a simple_layout,
+ # use that instead of the class tenant_config_file. Set up a
+ # single config-project with the specified layout, and
+ # initialize repos for all of the 'project' entries which
+ # appear in the layout.
+ test_name = self.id().split('.')[-1]
+ test = getattr(self, test_name)
+ if hasattr(test, '__simple_layout__'):
+ path, driver = getattr(test, '__simple_layout__')
+ else:
+ return False
+
+ files = {}
+ path = os.path.join(FIXTURE_DIR, path)
+ with open(path) as f:
+ data = f.read()
+ layout = yaml.safe_load(data)
+ files['zuul.yaml'] = data
+ untrusted_projects = []
+ for item in layout:
+ if 'project' in item:
+ name = item['project']['name']
+ untrusted_projects.append(name)
+ self.init_repo(name)
+ self.addCommitToRepo(name, 'initial commit',
+ files={'README': ''},
+ branch='master', tag='init')
+ if 'job' in item:
+ jobname = item['job']['name']
+ files['playbooks/%s.yaml' % jobname] = ''
+
+ root = os.path.join(self.test_root, "config")
+ if not os.path.exists(root):
+ os.makedirs(root)
+ f = tempfile.NamedTemporaryFile(dir=root, delete=False)
+ config = [{'tenant':
+ {'name': 'tenant-one',
+ 'source': {driver:
+ {'config-projects': ['common-config'],
+ 'untrusted-projects': untrusted_projects}}}}]
+ f.write(yaml.dump(config).encode('utf8'))
+ f.close()
+ self.config.set('zuul', 'tenant_config',
+ os.path.join(FIXTURE_DIR, f.name))
+
+ self.init_repo('common-config')
+ self.addCommitToRepo('common-config', 'add content from fixture',
+ files, branch='master', tag='init')
+
+ return True
+
def setupAllProjectKeys(self):
if self.create_project_keys:
return
@@ -1471,9 +2106,9 @@
for tenant in tenant_config:
sources = tenant['tenant']['source']
for source, conf in sources.items():
- for project in conf.get('config-repos', []):
+ for project in conf.get('config-projects', []):
self.setupProjectKeys(source, project)
- for project in conf.get('project-repos', []):
+ for project in conf.get('untrusted-projects', []):
self.setupProjectKeys(source, project)
def setupProjectKeys(self, source, project):
@@ -1545,14 +2180,24 @@
self.assertEqual(test_key, f.read())
def assertFinalState(self):
+ self.log.debug("Assert final state")
+ # Make sure no jobs are running
+ self.assertEqual({}, self.executor_server.job_workers)
# Make sure that git.Repo objects have been garbage collected.
repos = []
+ gc.disable()
gc.collect()
for obj in gc.get_objects():
if isinstance(obj, git.Repo):
- self.log.debug("Leaked git repo object: %s" % repr(obj))
+ self.log.debug("Leaked git repo object: 0x%x %s" %
+ (id(obj), repr(obj)))
+ for ref in gc.get_referrers(obj):
+ self.log.debug(" Referrer %s" % (repr(ref)))
repos.append(obj)
- self.assertEqual(len(repos), 0)
+ if repos:
+ for obj in gc.garbage:
+ self.log.debug(" Garbage %s" % (repr(obj)))
+ gc.enable()
self.assertEmptyQueues()
self.assertNodepoolState()
self.assertNoGeneratedKeys()
@@ -1564,9 +2209,9 @@
def shutdown(self):
self.log.debug("Shutting down after tests")
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
self.executor_client.stop()
- self.merge_server.stop()
- self.merge_server.join()
self.merge_client.stop()
self.executor_server.stop()
self.sched.stop()
@@ -1580,12 +2225,30 @@
self.gearman_server.shutdown()
self.fake_nodepool.stop()
self.zk.disconnect()
- threads = threading.enumerate()
- if len(threads) > 1:
- self.log.error("More than one thread is running: %s" % threads)
self.printHistory()
+ # We whitelist watchdog threads as they have relatively long delays
+ # before noticing they should exit, but they should exit on their own.
+ # Further the pydevd threads also need to be whitelisted so debugging
+ # e.g. in PyCharm is possible without breaking shutdown.
+ whitelist = ['executor-watchdog',
+ 'pydevd.CommandThread',
+ 'pydevd.Reader',
+ 'pydevd.Writer',
+ ]
+ threads = [t for t in threading.enumerate()
+ if t.name not in whitelist]
+ if len(threads) > 1:
+ log_str = ""
+ for thread_id, stack_frame in sys._current_frames().items():
+ log_str += "Thread: %s\n" % thread_id
+ log_str += "".join(traceback.format_stack(stack_frame))
+ self.log.debug(log_str)
+ raise Exception("More than one thread is running: %s" % threads)
- def init_repo(self, project):
+ def assertCleanShutdown(self):
+ pass
+
+ def init_repo(self, project, tag=None):
parts = project.split('/')
path = os.path.join(self.upstream_root, *parts[:-1])
if not os.path.exists(path):
@@ -1599,6 +2262,8 @@
repo.index.commit('initial commit')
master = repo.create_head('master')
+ if tag:
+ repo.create_tag(tag)
repo.head.reference = master
zuul.merger.merger.reset_repo_to_head(repo)
@@ -1632,11 +2297,15 @@
commit = repo.index.commit('Creating a fake commit')
return commit.hexsha
- def orderedRelease(self):
+ def orderedRelease(self, count=None):
# Run one build at a time to ensure non-race order:
+ i = 0
while len(self.builds):
self.release(self.builds[0])
self.waitUntilSettled()
+ i += 1
+ if count is not None and i >= count:
+ break
def release(self, job):
if isinstance(job, FakeBuild):
@@ -1668,14 +2337,17 @@
# It hasn't been reported yet.
return False
# Make sure that none of the worker connections are in GRAB_WAIT
- for connection in self.executor_server.worker.active_connections:
+ worker = self.executor_server.executor_worker
+ for connection in worker.active_connections:
if connection.state == 'GRAB_WAIT':
return False
return True
def areAllBuildsWaiting(self):
builds = self.executor_client.builds.values()
+ seen_builds = set()
for build in builds:
+ seen_builds.add(build.uuid)
client_job = None
for conn in self.executor_client.gearman.active_connections:
for j in conn.related_jobs.values():
@@ -1702,8 +2374,9 @@
if build.url is None:
self.log.debug("%s has not reported start" % build)
return False
+ # using internal ServerJob which offers no Text interface
worker_build = self.executor_server.job_builds.get(
- server_job.unique)
+ server_job.unique.decode('utf8'))
if worker_build:
if worker_build.isWaiting():
continue
@@ -1713,6 +2386,11 @@
else:
self.log.debug("%s is unassigned" % server_job)
return False
+ for (build_uuid, job_worker) in \
+ self.executor_server.job_workers.items():
+ if build_uuid not in seen_builds:
+ self.log.debug("%s is not finalized" % build_uuid)
+ return False
return True
def areAllNodeRequestsComplete(self):
@@ -1777,7 +2455,13 @@
def countJobResults(self, jobs, result):
jobs = filter(lambda x: x.result == result, jobs)
- return len(jobs)
+ return len(list(jobs))
+
+ def getBuildByName(self, name):
+ for build in self.builds:
+ if build.name == name:
+ return build
+ raise Exception("Unable to find build %s" % name)
def getJobFromHistory(self, name, project=None):
for job in self.history:
@@ -1802,7 +2486,7 @@
start = time.time()
while time.time() < (start + 5):
for stat in self.statsd.stats:
- k, v = stat.split(':')
+ k, v = stat.decode('utf-8').split(':')
if key == k:
if value is None and kind is None:
return
@@ -1916,9 +2600,12 @@
name: openstack
source:
gerrit:
- config-repos:
+ config-projects:
- %s
- """ % path)
+ untrusted-projects:
+ - org/project
+ - org/project1
+ - org/project2\n""" % path)
f.close()
self.config.set('zuul', 'tenant_config',
os.path.join(FIXTURE_DIR, f.name))
@@ -1949,23 +2636,37 @@
repo.create_tag(tag)
return before
- def commitLayoutUpdate(self, orig_name, source_name):
- source_path = os.path.join(self.test_root, 'upstream',
- source_name)
- to_copy = ['zuul.yaml']
- for playbook in os.listdir(os.path.join(source_path, 'playbooks')):
- to_copy.append('playbooks/{}'.format(playbook))
- commit_data = {}
- for source_file in to_copy:
- source_file_path = os.path.join(source_path, source_file)
- with open(source_file_path, 'r') as nt:
- commit_data[source_file] = nt.read()
+ def commitConfigUpdate(self, project_name, source_name):
+ """Commit an update to zuul.yaml
+
+ This overwrites the zuul.yaml in the specificed project with
+ the contents specified.
+
+ :arg str project_name: The name of the project containing
+ zuul.yaml (e.g., common-config)
+
+ :arg str source_name: The path to the file (underneath the
+ test fixture directory) whose contents should be used to
+ replace zuul.yaml.
+ """
+
+ source_path = os.path.join(FIXTURE_DIR, source_name)
+ files = {}
+ with open(source_path, 'r') as f:
+ data = f.read()
+ layout = yaml.safe_load(data)
+ files['zuul.yaml'] = data
+ for item in layout:
+ if 'job' in item:
+ jobname = item['job']['name']
+ files['playbooks/%s.yaml' % jobname] = ''
before = self.addCommitToRepo(
- orig_name, 'Pulling content from %s' % source_name,
- commit_data)
+ project_name, 'Pulling content from %s' % source_name,
+ files)
return before
def addEvent(self, connection, event):
+
"""Inject a Fake (Gerrit) event.
This method accepts a JSON-encoded event and simulates Zuul
@@ -1995,6 +2696,29 @@
specified_conn.server == conn.server):
conn.addEvent(event)
+ def getUpstreamRepos(self, projects):
+ """Return upstream git repo objects for the listed projects
+
+ :arg list projects: A list of strings, each the canonical name
+ of a project.
+
+ :returns: A dictionary of {name: repo} for every listed
+ project.
+ :rtype: dict
+
+ """
+
+ repos = {}
+ for project in projects:
+ # FIXME(jeblair): the upstream root does not yet have a
+ # hostname component; that needs to be added, and this
+ # line removed:
+ tmp_project_name = '/'.join(project.split('/')[1:])
+ path = os.path.join(self.upstream_root, tmp_project_name)
+ repo = git.Repo(path)
+ repos[project] = repo
+ return repos
+
class AnsibleZuulTestCase(ZuulTestCase):
"""ZuulTestCase but with an actual ansible executor running"""
diff --git a/tests/encrypt_secret.py b/tests/encrypt_secret.py
index b8524a0..0b0cf19 100644
--- a/tests/encrypt_secret.py
+++ b/tests/encrypt_secret.py
@@ -30,5 +30,6 @@
ciphertext = encryption.encrypt_pkcs1_oaep(sys.argv[1], public_key)
print(ciphertext.encode('base64'))
+
if __name__ == '__main__':
main()
diff --git a/tests/fixtures/config/ansible/git/common-config/playbooks/check-vars.yaml b/tests/fixtures/config/ansible/git/common-config/playbooks/check-vars.yaml
index 92c66d1..1f8fdf3 100644
--- a/tests/fixtures/config/ansible/git/common-config/playbooks/check-vars.yaml
+++ b/tests/fixtures/config/ansible/git/common-config/playbooks/check-vars.yaml
@@ -13,3 +13,10 @@
- zuul.executor.hostname is defined
- zuul.executor.src_root is defined
- zuul.executor.log_root is defined
+
+ - name: Assert zuul.project variables are valid.
+ assert:
+ that:
+ - zuul.project.name == 'org/project'
+ - zuul.project.canonical_hostname == 'review.example.com'
+ - zuul.project.canonical_name == 'review.example.com/org/project'
diff --git a/tests/fixtures/config/ansible/git/common-config/playbooks/hello-post.yaml b/tests/fixtures/config/ansible/git/common-config/playbooks/hello-post.yaml
new file mode 100644
index 0000000..d528be1
--- /dev/null
+++ b/tests/fixtures/config/ansible/git/common-config/playbooks/hello-post.yaml
@@ -0,0 +1,12 @@
+- hosts: all
+ tasks:
+ - name: Register hello-world.txt file.
+ stat:
+ path: "{{zuul.executor.log_root}}/hello-world.txt"
+ register: st
+
+ - name: Assert hello-world.txt file.
+ assert:
+ that:
+ - st.stat.exists
+ - st.stat.isreg
diff --git a/tests/fixtures/config/ansible/git/common-config/zuul.yaml b/tests/fixtures/config/ansible/git/common-config/zuul.yaml
index 0980bc1..02b87bd 100644
--- a/tests/fixtures/config/ansible/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/ansible/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
allow-secrets: true
trigger:
gerrit:
@@ -17,7 +16,6 @@
name: gate
manager: dependent
success-message: Build succeeded (gate).
- source: gerrit
trigger:
gerrit:
- event: comment-added
@@ -50,6 +48,7 @@
Z3QSO1NjbBxWnaHKZYT7nkrJm8AMCgZU0ZArFLpaufKCeiK5ECSsDxic4FIsY1OkWT42qEUfL0Wd
+150AKGNZpPJnnP3QYY4W/MWcKH/zdO400+zWN52WevbSqZy90tqKDJrBkMl1ydqbuw1E4ZHvIs=
+
- job:
name: python27
pre-run: pre
@@ -73,3 +72,7 @@
nodes:
- name: ubuntu-xenial
image: ubuntu-xenial
+
+- job:
+ name: hello
+ post-run: hello-post
diff --git a/tests/fixtures/config/ansible/git/org_project/.zuul.yaml b/tests/fixtures/config/ansible/git/org_project/.zuul.yaml
index a2d9c6f..ca734c5 100644
--- a/tests/fixtures/config/ansible/git/org_project/.zuul.yaml
+++ b/tests/fixtures/config/ansible/git/org_project/.zuul.yaml
@@ -2,6 +2,10 @@
parent: python27
name: faillocal
+- job:
+ parent: hello
+ name: hello-world
+
- project:
name: org/project
check:
@@ -10,3 +14,4 @@
- faillocal
- check-vars
- timeout
+ - hello-world
diff --git a/tests/fixtures/config/ansible/git/org_project/playbooks/hello-world.yaml b/tests/fixtures/config/ansible/git/org_project/playbooks/hello-world.yaml
new file mode 100644
index 0000000..373de02
--- /dev/null
+++ b/tests/fixtures/config/ansible/git/org_project/playbooks/hello-world.yaml
@@ -0,0 +1,5 @@
+- hosts: all
+ tasks:
+ - copy:
+ content: "hello world"
+ dest: "{{zuul.executor.log_root}}/hello-world.txt"
diff --git a/tests/fixtures/config/ansible/main.yaml b/tests/fixtures/config/ansible/main.yaml
index 8df99f4..9ccece9 100644
--- a/tests/fixtures/config/ansible/main.yaml
+++ b/tests/fixtures/config/ansible/main.yaml
@@ -2,8 +2,8 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
- project-repos:
+ untrusted-projects:
- org/project
- bare-role
diff --git a/tests/fixtures/config/broken/git/common-config/zuul.yaml b/tests/fixtures/config/broken/git/common-config/zuul.yaml
index 6abb87f..162a982 100644
--- a/tests/fixtures/config/broken/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/broken/git/common-config/zuul.yaml
@@ -1,8 +1,6 @@
- pipeline:
name: check
manager: independent
- source:
- gerrit
trigger:
gerrit:
- event: patchset-created
diff --git a/tests/fixtures/config/broken/main.yaml b/tests/fixtures/config/broken/main.yaml
index a22ed5c..9d01f54 100644
--- a/tests/fixtures/config/broken/main.yaml
+++ b/tests/fixtures/config/broken/main.yaml
@@ -2,5 +2,5 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
diff --git a/tests/fixtures/config/dependency-graph/git/common-config/zuul.yaml b/tests/fixtures/config/dependency-graph/git/common-config/zuul.yaml
index 60f3651..cdf989e 100644
--- a/tests/fixtures/config/dependency-graph/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/dependency-graph/git/common-config/zuul.yaml
@@ -2,7 +2,6 @@
name: gate
manager: dependent
success-message: Build succeeded (gate).
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/dependency-graph/main.yaml b/tests/fixtures/config/dependency-graph/main.yaml
index d9868fa..208e274 100644
--- a/tests/fixtures/config/dependency-graph/main.yaml
+++ b/tests/fixtures/config/dependency-graph/main.yaml
@@ -2,7 +2,7 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
- project-repos:
+ untrusted-projects:
- org/project
diff --git a/tests/fixtures/config/duplicate-pipeline/git/common-config/zuul.yaml b/tests/fixtures/config/duplicate-pipeline/git/common-config/zuul.yaml
index 5005108..60d7363 100755
--- a/tests/fixtures/config/duplicate-pipeline/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/duplicate-pipeline/git/common-config/zuul.yaml
@@ -2,7 +2,6 @@
name: dup1
manager: independent
success-message: Build succeeded (dup1).
- source: gerrit
trigger:
gerrit:
- event: change-restored
@@ -17,7 +16,6 @@
name: dup2
manager: independent
success-message: Build succeeded (dup2).
- source: gerrit
trigger:
gerrit:
- event: change-restored
diff --git a/tests/fixtures/config/duplicate-pipeline/main.yaml b/tests/fixtures/config/duplicate-pipeline/main.yaml
index ba2d8f5..d28df0d 100755
--- a/tests/fixtures/config/duplicate-pipeline/main.yaml
+++ b/tests/fixtures/config/duplicate-pipeline/main.yaml
@@ -2,5 +2,7 @@
name: tenant-duplicate
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/project
diff --git a/tests/fixtures/config/git-driver/git/common-config/zuul.yaml b/tests/fixtures/config/git-driver/git/common-config/zuul.yaml
index 0e332e4..8fe8749 100644
--- a/tests/fixtures/config/git-driver/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/git-driver/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
diff --git a/tests/fixtures/config/git-driver/main.yaml b/tests/fixtures/config/git-driver/main.yaml
index 5b9b3d9..2a2b204 100644
--- a/tests/fixtures/config/git-driver/main.yaml
+++ b/tests/fixtures/config/git-driver/main.yaml
@@ -2,8 +2,8 @@
name: tenant-one
source:
git:
- config-repos:
+ config-projects:
- common-config
gerrit:
- project-repos:
+ untrusted-projects:
- org/project
diff --git a/tests/fixtures/config/in-repo/git/common-config/zuul.yaml b/tests/fixtures/config/in-repo/git/common-config/zuul.yaml
index 55169ce..1fdaf2e 100644
--- a/tests/fixtures/config/in-repo/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/in-repo/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -16,7 +15,6 @@
name: tenant-one-gate
manager: dependent
success-message: Build succeeded (tenant-one-gate).
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/single-tenant/git/org_unknown/README b/tests/fixtures/config/in-repo/git/org_project1/README
similarity index 100%
copy from tests/fixtures/config/single-tenant/git/org_unknown/README
copy to tests/fixtures/config/in-repo/git/org_project1/README
diff --git a/tests/fixtures/config/in-repo/main.yaml b/tests/fixtures/config/in-repo/main.yaml
index d9868fa..5f57245 100644
--- a/tests/fixtures/config/in-repo/main.yaml
+++ b/tests/fixtures/config/in-repo/main.yaml
@@ -2,7 +2,8 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
- project-repos:
+ untrusted-projects:
- org/project
+ - org/project1
diff --git a/tests/fixtures/config/one-job-project/git/common-config/playbooks/one-job-project-post.yaml b/tests/fixtures/config/inventory/git/common-config/playbooks/group-inventory.yaml
similarity index 100%
copy from tests/fixtures/config/one-job-project/git/common-config/playbooks/one-job-project-post.yaml
copy to tests/fixtures/config/inventory/git/common-config/playbooks/group-inventory.yaml
diff --git a/tests/fixtures/config/one-job-project/git/common-config/playbooks/one-job-project-post.yaml b/tests/fixtures/config/inventory/git/common-config/playbooks/single-inventory.yaml
similarity index 100%
copy from tests/fixtures/config/one-job-project/git/common-config/playbooks/one-job-project-post.yaml
copy to tests/fixtures/config/inventory/git/common-config/playbooks/single-inventory.yaml
diff --git a/tests/fixtures/config/inventory/git/common-config/zuul.yaml b/tests/fixtures/config/inventory/git/common-config/zuul.yaml
new file mode 100644
index 0000000..184bd80
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/common-config/zuul.yaml
@@ -0,0 +1,42 @@
+- pipeline:
+ name: check
+ manager: independent
+ allow-secrets: true
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+- nodeset:
+ name: nodeset1
+ nodes:
+ - name: controller
+ image: controller-image
+ - name: compute1
+ image: compute-image
+ - name: compute2
+ image: compute-image
+ groups:
+ - name: ceph-osd
+ nodes:
+ - controller
+ - name: ceph-monitor
+ nodes:
+ - controller
+ - compute1
+ - compute2
+
+- job:
+ name: single-inventory
+ nodes:
+ - name: ubuntu-xenial
+ image: ubuntu-xenial
+
+- job:
+ name: group-inventory
+ nodes: nodeset1
diff --git a/tests/fixtures/config/inventory/git/org_project/.zuul.yaml b/tests/fixtures/config/inventory/git/org_project/.zuul.yaml
new file mode 100644
index 0000000..26310a0
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/org_project/.zuul.yaml
@@ -0,0 +1,6 @@
+- project:
+ name: org/project
+ check:
+ jobs:
+ - single-inventory
+ - group-inventory
diff --git a/tests/fixtures/config/single-tenant/git/org_noop-project/README b/tests/fixtures/config/inventory/git/org_project/README
similarity index 100%
rename from tests/fixtures/config/single-tenant/git/org_noop-project/README
rename to tests/fixtures/config/inventory/git/org_project/README
diff --git a/tests/fixtures/config/inventory/main.yaml b/tests/fixtures/config/inventory/main.yaml
new file mode 100644
index 0000000..208e274
--- /dev/null
+++ b/tests/fixtures/config/inventory/main.yaml
@@ -0,0 +1,8 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - common-config
+ untrusted-projects:
+ - org/project
diff --git a/tests/fixtures/config/merges/git/common-config/zuul.yaml b/tests/fixtures/config/merges/git/common-config/zuul.yaml
index ab4e24c..1309b3f 100644
--- a/tests/fixtures/config/merges/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/merges/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -16,7 +15,6 @@
name: gate
manager: dependent
success-message: Build succeeded (gate).
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/merges/main.yaml b/tests/fixtures/config/merges/main.yaml
index a22ed5c..3ec47ea 100644
--- a/tests/fixtures/config/merges/main.yaml
+++ b/tests/fixtures/config/merges/main.yaml
@@ -2,5 +2,11 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/project-cherry-pick
+ - org/project-merge
+ - org/project-merge-branches
+ - org/project-merge-resolve
+
diff --git a/tests/fixtures/config/one-job-project/git/common-config/playbooks/one-job-project-post.yaml b/tests/fixtures/config/multi-driver/git/common-config/playbooks/project-gerrit.yaml
similarity index 100%
rename from tests/fixtures/config/one-job-project/git/common-config/playbooks/one-job-project-post.yaml
rename to tests/fixtures/config/multi-driver/git/common-config/playbooks/project-gerrit.yaml
diff --git a/tests/fixtures/config/one-job-project/git/common-config/playbooks/one-job-project-post.yaml b/tests/fixtures/config/multi-driver/git/common-config/playbooks/project1-github.yaml
similarity index 100%
copy from tests/fixtures/config/one-job-project/git/common-config/playbooks/one-job-project-post.yaml
copy to tests/fixtures/config/multi-driver/git/common-config/playbooks/project1-github.yaml
diff --git a/tests/fixtures/config/multi-driver/git/common-config/zuul.yaml b/tests/fixtures/config/multi-driver/git/common-config/zuul.yaml
new file mode 100644
index 0000000..2dab845
--- /dev/null
+++ b/tests/fixtures/config/multi-driver/git/common-config/zuul.yaml
@@ -0,0 +1,46 @@
+- pipeline:
+ name: check_github
+ manager: independent
+ trigger:
+ github:
+ - event: pull_request
+ action:
+ - opened
+ - changed
+ - reopened
+ success:
+ github:
+ status: 'success'
+ failure:
+ github:
+ status: 'failure'
+
+- pipeline:
+ name: check_gerrit
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verify: 1
+ failure:
+ gerrit:
+ verify: 1
+
+- job:
+ name: project-gerrit
+- job:
+ name: project1-github
+
+- project:
+ name: org/project
+ check_gerrit:
+ jobs:
+ - project-gerrit
+
+- project:
+ name: org/project1
+ check_github:
+ jobs:
+ - project1-github
diff --git a/tests/fixtures/config/single-tenant/git/org_noop-project/README b/tests/fixtures/config/multi-driver/git/org_project/README
similarity index 100%
copy from tests/fixtures/config/single-tenant/git/org_noop-project/README
copy to tests/fixtures/config/multi-driver/git/org_project/README
diff --git a/tests/fixtures/config/single-tenant/git/org_unknown/README b/tests/fixtures/config/multi-driver/git/org_project1/README
similarity index 100%
copy from tests/fixtures/config/single-tenant/git/org_unknown/README
copy to tests/fixtures/config/multi-driver/git/org_project1/README
diff --git a/tests/fixtures/config/multi-driver/main.yaml b/tests/fixtures/config/multi-driver/main.yaml
new file mode 100644
index 0000000..301df38
--- /dev/null
+++ b/tests/fixtures/config/multi-driver/main.yaml
@@ -0,0 +1,11 @@
+- tenant:
+ name: tenant-one
+ source:
+ github:
+ config-projects:
+ - common-config
+ untrusted-projects:
+ - org/project1
+ gerrit:
+ untrusted-projects:
+ - org/project
diff --git a/tests/fixtures/config/multi-tenant-semaphore/git/common-config/zuul.yaml b/tests/fixtures/config/multi-tenant-semaphore/git/common-config/zuul.yaml
index d18ed46..ba91fb5 100644
--- a/tests/fixtures/config/multi-tenant-semaphore/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/multi-tenant-semaphore/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
diff --git a/tests/fixtures/config/multi-tenant-semaphore/main.yaml b/tests/fixtures/config/multi-tenant-semaphore/main.yaml
index b1c47b1..59422a0 100644
--- a/tests/fixtures/config/multi-tenant-semaphore/main.yaml
+++ b/tests/fixtures/config/multi-tenant-semaphore/main.yaml
@@ -2,14 +2,20 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
- tenant-one-config
+ untrusted-projects:
+ - org/project1
+ - org/project2
- tenant:
name: tenant-two
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
- tenant-two-config
+ untrusted-projects:
+ - org/project1
+ - org/project2
diff --git a/tests/fixtures/config/multi-tenant/git/common-config/zuul.yaml b/tests/fixtures/config/multi-tenant/git/common-config/zuul.yaml
index 004f2df..ec9c6dd 100644
--- a/tests/fixtures/config/multi-tenant/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/multi-tenant/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
diff --git a/tests/fixtures/config/multi-tenant/git/tenant-one-config/zuul.yaml b/tests/fixtures/config/multi-tenant/git/tenant-one-config/zuul.yaml
index 5769cf5..63a19e2 100644
--- a/tests/fixtures/config/multi-tenant/git/tenant-one-config/zuul.yaml
+++ b/tests/fixtures/config/multi-tenant/git/tenant-one-config/zuul.yaml
@@ -2,7 +2,6 @@
name: tenant-one-gate
manager: dependent
success-message: Build succeeded (tenant-one-gate).
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/multi-tenant/git/tenant-two-config/zuul.yaml b/tests/fixtures/config/multi-tenant/git/tenant-two-config/zuul.yaml
index 19782ce..4feb9f5 100644
--- a/tests/fixtures/config/multi-tenant/git/tenant-two-config/zuul.yaml
+++ b/tests/fixtures/config/multi-tenant/git/tenant-two-config/zuul.yaml
@@ -2,7 +2,6 @@
name: tenant-two-gate
manager: dependent
success-message: Build succeeded (tenant-two-gate).
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/multi-tenant/main.yaml b/tests/fixtures/config/multi-tenant/main.yaml
index b1c47b1..3ae7756 100644
--- a/tests/fixtures/config/multi-tenant/main.yaml
+++ b/tests/fixtures/config/multi-tenant/main.yaml
@@ -2,14 +2,18 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
- tenant-one-config
+ untrusted-projects:
+ - org/project1
- tenant:
name: tenant-two
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
- tenant-two-config
+ untrusted-projects:
+ - org/project2
diff --git a/tests/fixtures/config/one-job-project/git/common-config/playbooks/one-job-project-merge.yaml b/tests/fixtures/config/one-job-project/git/common-config/playbooks/one-job-project-merge.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/one-job-project/git/common-config/playbooks/one-job-project-merge.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/one-job-project/main.yaml b/tests/fixtures/config/one-job-project/main.yaml
deleted file mode 100644
index a22ed5c..0000000
--- a/tests/fixtures/config/one-job-project/main.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-- tenant:
- name: tenant-one
- source:
- gerrit:
- config-repos:
- - common-config
diff --git a/tests/fixtures/config/openstack/git/project-config/zuul.yaml b/tests/fixtures/config/openstack/git/project-config/zuul.yaml
index 760adb8..aff2046 100644
--- a/tests/fixtures/config/openstack/git/project-config/zuul.yaml
+++ b/tests/fixtures/config/openstack/git/project-config/zuul.yaml
@@ -2,7 +2,6 @@
name: check
manager: independent
success-message: Build succeeded (check).
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -17,7 +16,6 @@
name: gate
manager: dependent
success-message: Build succeeded (gate).
- source: gerrit
trigger:
gerrit:
- event: comment-added
@@ -68,7 +66,7 @@
- job:
name: dsvm
parent: base
- repos:
+ required-projects:
- openstack/keystone
- openstack/nova
diff --git a/tests/fixtures/config/openstack/main.yaml b/tests/fixtures/config/openstack/main.yaml
index 95a0952..f794093 100644
--- a/tests/fixtures/config/openstack/main.yaml
+++ b/tests/fixtures/config/openstack/main.yaml
@@ -2,5 +2,8 @@
name: openstack
source:
gerrit:
- config-repos:
+ config-projects:
- project-config
+ untrusted-projects:
+ - openstack/nova
+ - openstack/keystone
\ No newline at end of file
diff --git a/tests/fixtures/config/single-tenant/git/layout-tags/playbooks/test1.yaml b/tests/fixtures/config/push-reqs/git/common-config/playbooks/job1.yaml
similarity index 100%
rename from tests/fixtures/config/single-tenant/git/layout-tags/playbooks/test1.yaml
rename to tests/fixtures/config/push-reqs/git/common-config/playbooks/job1.yaml
diff --git a/tests/fixtures/config/push-reqs/git/common-config/zuul.yaml b/tests/fixtures/config/push-reqs/git/common-config/zuul.yaml
new file mode 100644
index 0000000..6569966
--- /dev/null
+++ b/tests/fixtures/config/push-reqs/git/common-config/zuul.yaml
@@ -0,0 +1,119 @@
+- pipeline:
+ name: current
+ manager: independent
+ require:
+ github:
+ current-patchset: true
+ gerrit:
+ current-patchset: true
+ trigger:
+ github:
+ - event: push
+ gerrit:
+ - event: ref-updated
+
+- pipeline:
+ name: open
+ manager: independent
+ require:
+ github:
+ open: true
+ gerrit:
+ open: true
+ trigger:
+ github:
+ - event: push
+ gerrit:
+ - event: ref-updated
+
+- pipeline:
+ name: review
+ manager: independent
+ require:
+ github:
+ review:
+ - type: approval
+ gerrit:
+ approval:
+ - email: herp@derp.invalid
+ trigger:
+ github:
+ - event: push
+ gerrit:
+ - event: ref-updated
+
+- pipeline:
+ name: status
+ manager: independent
+ require:
+ github:
+ status: 'zuul:check:success'
+ trigger:
+ github:
+ - event: push
+
+- pipeline:
+ name: pushhub
+ manager: independent
+ require:
+ gerrit:
+ open: true
+ trigger:
+ github:
+ - event: push
+ gerrit:
+ - event: ref-updated
+
+- pipeline:
+ name: pushgerrit
+ manager: independent
+ require:
+ github:
+ open: true
+ trigger:
+ github:
+ - event: push
+ gerrit:
+ - event: ref-updated
+
+- job:
+ name: job1
+
+- project:
+ name: org/project1
+ current:
+ jobs:
+ - job1
+ open:
+ jobs:
+ - job1
+ review:
+ jobs:
+ - job1
+ status:
+ jobs:
+ - job1
+ pushhub:
+ jobs:
+ - job1
+ pushgerrit:
+ jobs:
+ - job1
+
+- project:
+ name: org/project2
+ current:
+ jobs:
+ - job1
+ open:
+ jobs:
+ - job1
+ review:
+ jobs:
+ - job1
+ pushhub:
+ jobs:
+ - job1
+ pushgerrit:
+ jobs:
+ - job1
diff --git a/tests/fixtures/config/single-tenant/git/org_unknown/README b/tests/fixtures/config/push-reqs/git/org_project1/README
similarity index 100%
copy from tests/fixtures/config/single-tenant/git/org_unknown/README
copy to tests/fixtures/config/push-reqs/git/org_project1/README
diff --git a/tests/fixtures/config/single-tenant/git/org_unknown/README b/tests/fixtures/config/push-reqs/git/org_project2/README
similarity index 100%
copy from tests/fixtures/config/single-tenant/git/org_unknown/README
copy to tests/fixtures/config/push-reqs/git/org_project2/README
diff --git a/tests/fixtures/config/push-reqs/main.yaml b/tests/fixtures/config/push-reqs/main.yaml
new file mode 100644
index 0000000..d9f1a42
--- /dev/null
+++ b/tests/fixtures/config/push-reqs/main.yaml
@@ -0,0 +1,11 @@
+- tenant:
+ name: tenant-one
+ source:
+ github:
+ config-projects:
+ - common-config
+ untrusted-projects:
+ - org/project1
+ gerrit:
+ untrusted-projects:
+ - org/project2
diff --git a/tests/fixtures/config/requirements/email/git/common-config/zuul.yaml b/tests/fixtures/config/requirements/email/git/common-config/zuul.yaml
index 78d2a18..efc3b32 100644
--- a/tests/fixtures/config/requirements/email/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/requirements/email/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: pipeline
manager: independent
- source: gerrit
trigger:
gerrit:
- event: comment-added
@@ -12,13 +11,13 @@
gerrit:
verified: -1
require:
- approval:
- - email: jenkins@example.com
+ gerrit:
+ approval:
+ - email: jenkins@example.com
- pipeline:
name: trigger
manager: independent
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/requirements/email/main.yaml b/tests/fixtures/config/requirements/email/main.yaml
index a22ed5c..950b117 100644
--- a/tests/fixtures/config/requirements/email/main.yaml
+++ b/tests/fixtures/config/requirements/email/main.yaml
@@ -2,5 +2,8 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/project1
+ - org/project2
diff --git a/tests/fixtures/config/requirements/newer-than/git/common-config/zuul.yaml b/tests/fixtures/config/requirements/newer-than/git/common-config/zuul.yaml
index 1e84e18..6f0601d 100644
--- a/tests/fixtures/config/requirements/newer-than/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/requirements/newer-than/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: pipeline
manager: independent
- source: gerrit
trigger:
gerrit:
- event: comment-added
@@ -12,14 +11,14 @@
gerrit:
verified: -1
require:
- approval:
- - username: jenkins
- newer-than: 48h
+ gerrit:
+ approval:
+ - username: jenkins
+ newer-than: 48h
- pipeline:
name: trigger
manager: independent
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/requirements/newer-than/main.yaml b/tests/fixtures/config/requirements/newer-than/main.yaml
index a22ed5c..950b117 100644
--- a/tests/fixtures/config/requirements/newer-than/main.yaml
+++ b/tests/fixtures/config/requirements/newer-than/main.yaml
@@ -2,5 +2,8 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/project1
+ - org/project2
diff --git a/tests/fixtures/config/requirements/older-than/git/common-config/zuul.yaml b/tests/fixtures/config/requirements/older-than/git/common-config/zuul.yaml
index efbd79a..77ee388 100644
--- a/tests/fixtures/config/requirements/older-than/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/requirements/older-than/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: pipeline
manager: independent
- source: gerrit
trigger:
gerrit:
- event: comment-added
@@ -12,14 +11,14 @@
gerrit:
verified: -1
require:
- approval:
- - username: jenkins
- older-than: 48h
+ gerrit:
+ approval:
+ - username: jenkins
+ older-than: 48h
- pipeline:
name: trigger
manager: independent
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/requirements/older-than/main.yaml b/tests/fixtures/config/requirements/older-than/main.yaml
index a22ed5c..950b117 100644
--- a/tests/fixtures/config/requirements/older-than/main.yaml
+++ b/tests/fixtures/config/requirements/older-than/main.yaml
@@ -2,5 +2,8 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/project1
+ - org/project2
diff --git a/tests/fixtures/config/requirements/reject-username/git/common-config/zuul.yaml b/tests/fixtures/config/requirements/reject-username/git/common-config/zuul.yaml
index 7212944..9e9d000 100644
--- a/tests/fixtures/config/requirements/reject-username/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/requirements/reject-username/git/common-config/zuul.yaml
@@ -1,10 +1,10 @@
- pipeline:
name: pipeline
manager: independent
- source: gerrit
reject:
- approval:
- - username: jenkins
+ gerrit:
+ approval:
+ - username: jenkins
trigger:
gerrit:
- event: comment-added
@@ -18,7 +18,6 @@
- pipeline:
name: trigger
manager: independent
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/requirements/reject-username/main.yaml b/tests/fixtures/config/requirements/reject-username/main.yaml
index a22ed5c..950b117 100644
--- a/tests/fixtures/config/requirements/reject-username/main.yaml
+++ b/tests/fixtures/config/requirements/reject-username/main.yaml
@@ -2,5 +2,8 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/project1
+ - org/project2
diff --git a/tests/fixtures/config/requirements/reject/git/common-config/zuul.yaml b/tests/fixtures/config/requirements/reject/git/common-config/zuul.yaml
index 9f5b125..b08a105 100644
--- a/tests/fixtures/config/requirements/reject/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/requirements/reject/git/common-config/zuul.yaml
@@ -1,18 +1,19 @@
- pipeline:
name: pipeline
manager: independent
- source: gerrit
require:
- approval:
- - username: jenkins
- verified:
- - 1
- - 2
+ gerrit:
+ approval:
+ - username: jenkins
+ verified:
+ - 1
+ - 2
reject:
- approval:
- - verified:
- - -1
- - -2
+ gerrit:
+ approval:
+ - verified:
+ - -1
+ - -2
trigger:
gerrit:
- event: comment-added
@@ -26,7 +27,6 @@
- pipeline:
name: trigger
manager: independent
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/requirements/reject/main.yaml b/tests/fixtures/config/requirements/reject/main.yaml
index a22ed5c..950b117 100644
--- a/tests/fixtures/config/requirements/reject/main.yaml
+++ b/tests/fixtures/config/requirements/reject/main.yaml
@@ -2,5 +2,8 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/project1
+ - org/project2
diff --git a/tests/fixtures/config/requirements/state/git/common-config/zuul.yaml b/tests/fixtures/config/requirements/state/git/common-config/zuul.yaml
index 01ceb46..bd9dc8f 100644
--- a/tests/fixtures/config/requirements/state/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/requirements/state/git/common-config/zuul.yaml
@@ -1,9 +1,9 @@
- pipeline:
name: current-check
manager: independent
- source: gerrit
require:
- current-patchset: true
+ gerrit:
+ current-patchset: true
trigger:
gerrit:
- event: patchset-created
@@ -18,9 +18,9 @@
- pipeline:
name: open-check
manager: independent
- source: gerrit
require:
- open: true
+ gerrit:
+ open: true
trigger:
gerrit:
- event: patchset-created
@@ -35,9 +35,9 @@
- pipeline:
name: status-check
manager: independent
- source: gerrit
require:
- status: NEW
+ gerrit:
+ status: NEW
trigger:
gerrit:
- event: patchset-created
diff --git a/tests/fixtures/config/requirements/state/main.yaml b/tests/fixtures/config/requirements/state/main.yaml
index a22ed5c..99756fb 100644
--- a/tests/fixtures/config/requirements/state/main.yaml
+++ b/tests/fixtures/config/requirements/state/main.yaml
@@ -2,5 +2,9 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - current-project
+ - open-project
+ - status-project
diff --git a/tests/fixtures/config/requirements/username/git/common-config/zuul.yaml b/tests/fixtures/config/requirements/username/git/common-config/zuul.yaml
index 9789e71..455d9de 100644
--- a/tests/fixtures/config/requirements/username/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/requirements/username/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: pipeline
manager: independent
- source: gerrit
trigger:
gerrit:
- event: comment-added
@@ -12,13 +11,13 @@
gerrit:
verified: -1
require:
- approval:
- - username: ^(jenkins|zuul)$
+ gerrit:
+ approval:
+ - username: ^(jenkins|zuul)$
- pipeline:
name: trigger
manager: independent
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/requirements/username/main.yaml b/tests/fixtures/config/requirements/username/main.yaml
index a22ed5c..950b117 100644
--- a/tests/fixtures/config/requirements/username/main.yaml
+++ b/tests/fixtures/config/requirements/username/main.yaml
@@ -2,5 +2,8 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/project1
+ - org/project2
diff --git a/tests/fixtures/config/requirements/vote1/git/common-config/zuul.yaml b/tests/fixtures/config/requirements/vote1/git/common-config/zuul.yaml
index 7989363..799282d 100644
--- a/tests/fixtures/config/requirements/vote1/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/requirements/vote1/git/common-config/zuul.yaml
@@ -2,10 +2,10 @@
name: pipeline
manager: independent
require:
- approval:
- - username: jenkins
- verified: 1
- source: gerrit
+ gerrit:
+ approval:
+ - username: jenkins
+ verified: 1
trigger:
gerrit:
- event: comment-added
@@ -19,7 +19,6 @@
- pipeline:
name: trigger
manager: independent
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/requirements/vote1/main.yaml b/tests/fixtures/config/requirements/vote1/main.yaml
index a22ed5c..950b117 100644
--- a/tests/fixtures/config/requirements/vote1/main.yaml
+++ b/tests/fixtures/config/requirements/vote1/main.yaml
@@ -2,5 +2,8 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/project1
+ - org/project2
diff --git a/tests/fixtures/config/requirements/vote2/git/common-config/zuul.yaml b/tests/fixtures/config/requirements/vote2/git/common-config/zuul.yaml
index 9348afb..f337371 100644
--- a/tests/fixtures/config/requirements/vote2/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/requirements/vote2/git/common-config/zuul.yaml
@@ -2,12 +2,12 @@
name: pipeline
manager: independent
require:
- approval:
- - username: jenkins
- verified:
- - 1
- - 2
- source: gerrit
+ gerrit:
+ approval:
+ - username: jenkins
+ verified:
+ - 1
+ - 2
trigger:
gerrit:
- event: comment-added
@@ -21,7 +21,6 @@
- pipeline:
name: trigger
manager: independent
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/requirements/vote2/main.yaml b/tests/fixtures/config/requirements/vote2/main.yaml
index a22ed5c..950b117 100644
--- a/tests/fixtures/config/requirements/vote2/main.yaml
+++ b/tests/fixtures/config/requirements/vote2/main.yaml
@@ -2,5 +2,8 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/project1
+ - org/project2
diff --git a/tests/fixtures/config/single-tenant/git/layout-idle/playbooks/project-test1.yaml b/tests/fixtures/config/semaphore/git/common-config/playbooks/project-test1.yaml
similarity index 100%
rename from tests/fixtures/config/single-tenant/git/layout-idle/playbooks/project-test1.yaml
rename to tests/fixtures/config/semaphore/git/common-config/playbooks/project-test1.yaml
diff --git a/tests/fixtures/config/single-tenant/git/layout-semaphore/playbooks/semaphore-one-test1.yaml b/tests/fixtures/config/semaphore/git/common-config/playbooks/semaphore-one-test1.yaml
similarity index 100%
rename from tests/fixtures/config/single-tenant/git/layout-semaphore/playbooks/semaphore-one-test1.yaml
rename to tests/fixtures/config/semaphore/git/common-config/playbooks/semaphore-one-test1.yaml
diff --git a/tests/fixtures/config/single-tenant/git/layout-semaphore/playbooks/semaphore-one-test2.yaml b/tests/fixtures/config/semaphore/git/common-config/playbooks/semaphore-one-test2.yaml
similarity index 100%
rename from tests/fixtures/config/single-tenant/git/layout-semaphore/playbooks/semaphore-one-test2.yaml
rename to tests/fixtures/config/semaphore/git/common-config/playbooks/semaphore-one-test2.yaml
diff --git a/tests/fixtures/config/single-tenant/git/layout-semaphore/playbooks/semaphore-two-test1.yaml b/tests/fixtures/config/semaphore/git/common-config/playbooks/semaphore-two-test1.yaml
similarity index 100%
rename from tests/fixtures/config/single-tenant/git/layout-semaphore/playbooks/semaphore-two-test1.yaml
rename to tests/fixtures/config/semaphore/git/common-config/playbooks/semaphore-two-test1.yaml
diff --git a/tests/fixtures/config/single-tenant/git/layout-semaphore/playbooks/semaphore-two-test2.yaml b/tests/fixtures/config/semaphore/git/common-config/playbooks/semaphore-two-test2.yaml
similarity index 100%
rename from tests/fixtures/config/single-tenant/git/layout-semaphore/playbooks/semaphore-two-test2.yaml
rename to tests/fixtures/config/semaphore/git/common-config/playbooks/semaphore-two-test2.yaml
diff --git a/tests/fixtures/config/single-tenant/git/layout-semaphore/zuul.yaml b/tests/fixtures/config/semaphore/git/common-config/zuul.yaml
similarity index 85%
rename from tests/fixtures/config/single-tenant/git/layout-semaphore/zuul.yaml
rename to tests/fixtures/config/semaphore/git/common-config/zuul.yaml
index f935112..9d1cacf 100644
--- a/tests/fixtures/config/single-tenant/git/layout-semaphore/zuul.yaml
+++ b/tests/fixtures/config/semaphore/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -12,6 +11,14 @@
gerrit:
verified: -1
+# TODOv3(jeblair, tobiash): make semaphore definitions required, which
+# will cause these tests to fail until we define test-semaphore
+# here.
+
+- semaphore:
+ name: test-semaphore-two
+ max: 2
+
- job:
name: project-test1
@@ -46,7 +53,3 @@
- project-test1
- semaphore-two-test1
- semaphore-two-test2
-
-- semaphore:
- name: test-semaphore-two
- max: 2
diff --git a/tests/fixtures/config/single-tenant/git/org_noop-project/README b/tests/fixtures/config/semaphore/git/org_project/README
similarity index 100%
copy from tests/fixtures/config/single-tenant/git/org_noop-project/README
copy to tests/fixtures/config/semaphore/git/org_project/README
diff --git a/tests/fixtures/config/single-tenant/git/org_unknown/README b/tests/fixtures/config/semaphore/git/org_project1/README
similarity index 100%
rename from tests/fixtures/config/single-tenant/git/org_unknown/README
rename to tests/fixtures/config/semaphore/git/org_project1/README
diff --git a/tests/fixtures/config/semaphore/main.yaml b/tests/fixtures/config/semaphore/main.yaml
new file mode 100644
index 0000000..5f57245
--- /dev/null
+++ b/tests/fixtures/config/semaphore/main.yaml
@@ -0,0 +1,9 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - common-config
+ untrusted-projects:
+ - org/project
+ - org/project1
diff --git a/tests/fixtures/config/single-tenant/git/layout-semaphore-reconfiguration/zuul.yaml b/tests/fixtures/config/semaphore/zuul-reconfiguration.yaml
similarity index 93%
rename from tests/fixtures/config/single-tenant/git/layout-semaphore-reconfiguration/zuul.yaml
rename to tests/fixtures/config/semaphore/zuul-reconfiguration.yaml
index 0e332e4..8fe8749 100644
--- a/tests/fixtures/config/single-tenant/git/layout-semaphore-reconfiguration/zuul.yaml
+++ b/tests/fixtures/config/semaphore/zuul-reconfiguration.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
diff --git a/tests/fixtures/config/single-tenant/git/common-config/playbooks/experimental-project-test.yaml b/tests/fixtures/config/single-tenant/git/common-config/playbooks/experimental-project-test.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/common-config/playbooks/experimental-project-test.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/common-config/zuul.yaml b/tests/fixtures/config/single-tenant/git/common-config/zuul.yaml
index dff18de..2bb61ee 100644
--- a/tests/fixtures/config/single-tenant/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/single-tenant/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -16,7 +15,6 @@
name: gate
manager: dependent
success-message: Build succeeded (gate).
- source: gerrit
trigger:
gerrit:
- event: comment-added
@@ -37,24 +35,11 @@
- pipeline:
name: post
manager: independent
- source: gerrit
trigger:
gerrit:
- event: ref-updated
ref: ^(?!refs/).*$
-- pipeline:
- name: experimental
- manager: independent
- source: gerrit
- trigger:
- gerrit:
- - event: patchset-created
- success:
- gerrit: {}
- failure:
- gerrit: {}
-
- job:
name: project-merge
hold-following-changes: true
@@ -84,21 +69,6 @@
- job:
name: project1-project2-integration
- queue-name: integration
-
-- job:
- name: experimental-project-test
-
-- job:
- name: nonvoting-project-merge
- hold-following-changes: true
-
-- job:
- name: nonvoting-project-test1
-
-- job:
- name: nonvoting-project-test2
- voting: false
- job:
name: project-testfile
@@ -170,68 +140,3 @@
dependencies: project-merge
- project1-project2-integration:
dependencies: project-merge
-
-- project:
- name: org/project3
- check:
- jobs:
- - project-merge
- - project-test1:
- dependencies: project-merge
- - project-test2:
- dependencies: project-merge
- - project1-project2-integration:
- dependencies: project-merge
- gate:
- queue: integrated
- jobs:
- - project-merge
- - project-test1:
- dependencies: project-merge
- - project-test2:
- dependencies: project-merge
- - project1-project2-integration:
- dependencies: project-merge
- post:
- jobs:
- - project-post
-
-- project:
- name: org/experimental-project
- experimental:
- jobs:
- - project-merge
- - experimental-project-test:
- dependencies: project-merge
-
-- project:
- name: org/noop-project
- check:
- jobs:
- - noop
- gate:
- jobs:
- - noop
-
-- project:
- name: org/nonvoting-project
- check:
- jobs:
- - nonvoting-project-merge
- - nonvoting-project-test1:
- dependencies: nonvoting-project-merge
- - nonvoting-project-test2:
- dependencies: nonvoting-project-merge
- gate:
- jobs:
- - nonvoting-project-merge
- - nonvoting-project-test1:
- dependencies: nonvoting-project-merge
- - nonvoting-project-test2:
- dependencies: nonvoting-project-merge
-
-- project:
- name: org/no-jobs-project
- check:
- jobs:
- - project-testfile
diff --git a/tests/fixtures/config/single-tenant/git/layout-disabled-at/playbooks/project-test1.yaml b/tests/fixtures/config/single-tenant/git/layout-disabled-at/playbooks/project-test1.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-disabled-at/playbooks/project-test1.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-dont-ignore-ref-deletes/playbooks/project-post.yaml b/tests/fixtures/config/single-tenant/git/layout-dont-ignore-ref-deletes/playbooks/project-post.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-dont-ignore-ref-deletes/playbooks/project-post.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-footer-message/playbooks/project-test1.yaml b/tests/fixtures/config/single-tenant/git/layout-footer-message/playbooks/project-test1.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-footer-message/playbooks/project-test1.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-idle/playbooks/project-bitrot-stable-old.yaml b/tests/fixtures/config/single-tenant/git/layout-idle/playbooks/project-bitrot-stable-old.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-idle/playbooks/project-bitrot-stable-old.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-idle/playbooks/project-bitrot-stable-older.yaml b/tests/fixtures/config/single-tenant/git/layout-idle/playbooks/project-bitrot-stable-older.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-idle/playbooks/project-bitrot-stable-older.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-inheritance/playbooks/project-test-irrelevant-starts-empty.yaml b/tests/fixtures/config/single-tenant/git/layout-inheritance/playbooks/project-test-irrelevant-starts-empty.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-inheritance/playbooks/project-test-irrelevant-starts-empty.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-inheritance/playbooks/project-test-irrelevant-starts-full.yaml b/tests/fixtures/config/single-tenant/git/layout-inheritance/playbooks/project-test-irrelevant-starts-full.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-inheritance/playbooks/project-test-irrelevant-starts-full.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-inheritance/playbooks/project-test-nomatch-starts-empty.yaml b/tests/fixtures/config/single-tenant/git/layout-inheritance/playbooks/project-test-nomatch-starts-empty.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-inheritance/playbooks/project-test-nomatch-starts-empty.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-inheritance/playbooks/project-test-nomatch-starts-full.yaml b/tests/fixtures/config/single-tenant/git/layout-inheritance/playbooks/project-test-nomatch-starts-full.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-inheritance/playbooks/project-test-nomatch-starts-full.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-irrelevant-files/playbooks/project-test-irrelevant-files.yaml b/tests/fixtures/config/single-tenant/git/layout-irrelevant-files/playbooks/project-test-irrelevant-files.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-irrelevant-files/playbooks/project-test-irrelevant-files.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/playbooks/project-merge.yaml b/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/playbooks/project-merge.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/playbooks/project-merge.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/playbooks/project-test1.yaml b/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/playbooks/project-test1.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/playbooks/project-test1.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/playbooks/project-test2.yaml b/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/playbooks/project-test2.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/playbooks/project-test2.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/playbooks/project-testfile.yaml b/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/playbooks/project-testfile.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/playbooks/project-testfile.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-no-jobs/playbooks/gate-noop.yaml b/tests/fixtures/config/single-tenant/git/layout-no-jobs/playbooks/gate-noop.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-no-jobs/playbooks/gate-noop.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-no-timer/playbooks/project-bitrot-stable-old.yaml b/tests/fixtures/config/single-tenant/git/layout-no-timer/playbooks/project-bitrot-stable-old.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-no-timer/playbooks/project-bitrot-stable-old.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-no-timer/playbooks/project-bitrot-stable-older.yaml b/tests/fixtures/config/single-tenant/git/layout-no-timer/playbooks/project-bitrot-stable-older.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-no-timer/playbooks/project-bitrot-stable-older.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-no-timer/playbooks/project-test1.yaml b/tests/fixtures/config/single-tenant/git/layout-no-timer/playbooks/project-test1.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-no-timer/playbooks/project-test1.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-rate-limit/playbooks/project-merge.yaml b/tests/fixtures/config/single-tenant/git/layout-rate-limit/playbooks/project-merge.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-rate-limit/playbooks/project-merge.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-rate-limit/playbooks/project-test1.yaml b/tests/fixtures/config/single-tenant/git/layout-rate-limit/playbooks/project-test1.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-rate-limit/playbooks/project-test1.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-rate-limit/playbooks/project-test2.yaml b/tests/fixtures/config/single-tenant/git/layout-rate-limit/playbooks/project-test2.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-rate-limit/playbooks/project-test2.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-repo-deleted/playbooks/project-merge.yaml b/tests/fixtures/config/single-tenant/git/layout-repo-deleted/playbooks/project-merge.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-repo-deleted/playbooks/project-merge.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-repo-deleted/playbooks/project-test1.yaml b/tests/fixtures/config/single-tenant/git/layout-repo-deleted/playbooks/project-test1.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-repo-deleted/playbooks/project-test1.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-repo-deleted/playbooks/project-test2.yaml b/tests/fixtures/config/single-tenant/git/layout-repo-deleted/playbooks/project-test2.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-repo-deleted/playbooks/project-test2.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-semaphore-reconfiguration/playbooks/project-test1.yaml b/tests/fixtures/config/single-tenant/git/layout-semaphore-reconfiguration/playbooks/project-test1.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-semaphore-reconfiguration/playbooks/project-test1.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-semaphore/playbooks/project-test1.yaml b/tests/fixtures/config/single-tenant/git/layout-semaphore/playbooks/project-test1.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-semaphore/playbooks/project-test1.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-smtp/playbooks/experimental-project-test.yaml b/tests/fixtures/config/single-tenant/git/layout-smtp/playbooks/experimental-project-test.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-smtp/playbooks/experimental-project-test.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-smtp/playbooks/project-merge.yaml b/tests/fixtures/config/single-tenant/git/layout-smtp/playbooks/project-merge.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-smtp/playbooks/project-merge.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-smtp/playbooks/project-test1.yaml b/tests/fixtures/config/single-tenant/git/layout-smtp/playbooks/project-test1.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-smtp/playbooks/project-test1.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-smtp/playbooks/project-test2.yaml b/tests/fixtures/config/single-tenant/git/layout-smtp/playbooks/project-test2.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-smtp/playbooks/project-test2.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-tags/playbooks/integration.yaml b/tests/fixtures/config/single-tenant/git/layout-tags/playbooks/integration.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-tags/playbooks/integration.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-tags/playbooks/merge.yaml b/tests/fixtures/config/single-tenant/git/layout-tags/playbooks/merge.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-tags/playbooks/merge.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-tags/playbooks/test2.yaml b/tests/fixtures/config/single-tenant/git/layout-tags/playbooks/test2.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-tags/playbooks/test2.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-tags/zuul.yaml b/tests/fixtures/config/single-tenant/git/layout-tags/zuul.yaml
deleted file mode 100644
index 07f0657..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-tags/zuul.yaml
+++ /dev/null
@@ -1,55 +0,0 @@
-- pipeline:
- name: check
- manager: independent
- source: gerrit
- trigger:
- gerrit:
- - event: patchset-created
- success:
- gerrit:
- verified: 1
- failure:
- gerrit:
- verified: -1
-
-- job:
- name: merge
- failure-message: Unable to merge change
- hold-following-changes: true
- tags:
- - merge
-
-- job:
- name: test1
-
-- job:
- name: test2
-
-- job:
- name: integration
-
-- project:
- name: org/project1
- check:
- jobs:
- - merge:
- tags:
- - extratag
- - test1:
- dependencies: merge
- - test2:
- dependencies: merge
- - integration:
- dependencies: merge
-
-- project:
- name: org/project2
- check:
- jobs:
- - merge
- - test1:
- dependencies: merge
- - test2:
- dependencies: merge
- - integration:
- dependencies: merge
diff --git a/tests/fixtures/config/single-tenant/git/layout-timer-smtp/playbooks/project-bitrot-stable-old.yaml b/tests/fixtures/config/single-tenant/git/layout-timer-smtp/playbooks/project-bitrot-stable-old.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-timer-smtp/playbooks/project-bitrot-stable-old.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-timer-smtp/playbooks/project-bitrot-stable-older.yaml b/tests/fixtures/config/single-tenant/git/layout-timer-smtp/playbooks/project-bitrot-stable-older.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-timer-smtp/playbooks/project-bitrot-stable-older.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-timer/playbooks/project-bitrot-stable-old.yaml b/tests/fixtures/config/single-tenant/git/layout-timer/playbooks/project-bitrot-stable-old.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-timer/playbooks/project-bitrot-stable-old.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-timer/playbooks/project-bitrot-stable-older.yaml b/tests/fixtures/config/single-tenant/git/layout-timer/playbooks/project-bitrot-stable-older.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-timer/playbooks/project-bitrot-stable-older.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-timer/playbooks/project-test1.yaml b/tests/fixtures/config/single-tenant/git/layout-timer/playbooks/project-test1.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-timer/playbooks/project-test1.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/layout-timer/playbooks/project-test2.yaml b/tests/fixtures/config/single-tenant/git/layout-timer/playbooks/project-test2.yaml
deleted file mode 100644
index f679dce..0000000
--- a/tests/fixtures/config/single-tenant/git/layout-timer/playbooks/project-test2.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-- hosts: all
- tasks: []
diff --git a/tests/fixtures/config/single-tenant/git/org_delete-project/README b/tests/fixtures/config/single-tenant/git/org_delete-project/README
deleted file mode 100644
index 9daeafb..0000000
--- a/tests/fixtures/config/single-tenant/git/org_delete-project/README
+++ /dev/null
@@ -1 +0,0 @@
-test
diff --git a/tests/fixtures/config/single-tenant/git/org_experimental-project/README b/tests/fixtures/config/single-tenant/git/org_experimental-project/README
deleted file mode 100644
index 9daeafb..0000000
--- a/tests/fixtures/config/single-tenant/git/org_experimental-project/README
+++ /dev/null
@@ -1 +0,0 @@
-test
diff --git a/tests/fixtures/config/single-tenant/git/org_no-jobs-project/README b/tests/fixtures/config/single-tenant/git/org_no-jobs-project/README
deleted file mode 100644
index 44f3bac..0000000
--- a/tests/fixtures/config/single-tenant/git/org_no-jobs-project/README
+++ /dev/null
@@ -1 +0,0 @@
-staypuft
diff --git a/tests/fixtures/config/single-tenant/git/org_nonvoting-project/README b/tests/fixtures/config/single-tenant/git/org_nonvoting-project/README
deleted file mode 100644
index 2cc3865..0000000
--- a/tests/fixtures/config/single-tenant/git/org_nonvoting-project/README
+++ /dev/null
@@ -1 +0,0 @@
-dont tread on me
diff --git a/tests/fixtures/config/single-tenant/git/org_project3/README b/tests/fixtures/config/single-tenant/git/org_project3/README
deleted file mode 100644
index 234496b..0000000
--- a/tests/fixtures/config/single-tenant/git/org_project3/README
+++ /dev/null
@@ -1 +0,0 @@
-third
diff --git a/tests/fixtures/config/single-tenant/main.yaml b/tests/fixtures/config/single-tenant/main.yaml
index d9868fa..83ed092 100644
--- a/tests/fixtures/config/single-tenant/main.yaml
+++ b/tests/fixtures/config/single-tenant/main.yaml
@@ -2,7 +2,9 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
- project-repos:
+ untrusted-projects:
- org/project
+ - org/project1
+ - org/project2
diff --git a/tests/fixtures/config/sql-driver/git/common-config/zuul.yaml b/tests/fixtures/config/sql-driver/git/common-config/zuul.yaml
index 36c7602..dd80d08 100644
--- a/tests/fixtures/config/sql-driver/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/sql-driver/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
diff --git a/tests/fixtures/config/sql-driver/main.yaml b/tests/fixtures/config/sql-driver/main.yaml
index d9868fa..208e274 100644
--- a/tests/fixtures/config/sql-driver/main.yaml
+++ b/tests/fixtures/config/sql-driver/main.yaml
@@ -2,7 +2,7 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
- project-repos:
+ untrusted-projects:
- org/project
diff --git a/tests/fixtures/config/success-url/git/common-config/zuul.yaml b/tests/fixtures/config/success-url/git/common-config/zuul.yaml
index b3ecf6d..7082b8c 100644
--- a/tests/fixtures/config/success-url/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/success-url/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
diff --git a/tests/fixtures/config/success-url/main.yaml b/tests/fixtures/config/success-url/main.yaml
index a22ed5c..0027ae1 100644
--- a/tests/fixtures/config/success-url/main.yaml
+++ b/tests/fixtures/config/success-url/main.yaml
@@ -2,5 +2,7 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/docs
diff --git a/tests/fixtures/config/templated-project/git/common-config/zuul.yaml b/tests/fixtures/config/templated-project/git/common-config/zuul.yaml
index 8d2c8a0..251a3cd 100644
--- a/tests/fixtures/config/templated-project/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/templated-project/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -16,7 +15,6 @@
name: gate
manager: dependent
success-message: Build succeeded (gate).
- source: gerrit
trigger:
gerrit:
- event: comment-added
@@ -37,7 +35,6 @@
- pipeline:
name: post
manager: independent
- source: gerrit
trigger:
gerrit:
- event: ref-updated
diff --git a/tests/fixtures/config/templated-project/main.yaml b/tests/fixtures/config/templated-project/main.yaml
index a22ed5c..e59b396 100644
--- a/tests/fixtures/config/templated-project/main.yaml
+++ b/tests/fixtures/config/templated-project/main.yaml
@@ -2,5 +2,8 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/templated-project
+ - org/layered-project
diff --git a/tests/fixtures/config/zuul-connections-multiple-gerrits/git/common-config/zuul.yaml b/tests/fixtures/config/zuul-connections-multiple-gerrits/git/common-config/zuul.yaml
index 302dfcf..8f858cd 100644
--- a/tests/fixtures/config/zuul-connections-multiple-gerrits/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/zuul-connections-multiple-gerrits/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: review_check
manager: independent
- source: review_gerrit
trigger:
review_gerrit:
- event: patchset-created
@@ -15,7 +14,6 @@
- pipeline:
name: another_check
manager: independent
- source: another_gerrit
trigger:
another_gerrit:
- event: patchset-created
@@ -26,6 +24,25 @@
another_gerrit:
verified: -1
+- pipeline:
+ name: common_check
+ manager: independent
+ trigger:
+ another_gerrit:
+ - event: patchset-created
+ review_gerrit:
+ - event: patchset-created
+ success:
+ review_gerrit:
+ verified: 1
+ another_gerrit:
+ verified: 1
+ failure:
+ review_gerrit:
+ verified: -1
+ another_gerrit:
+ verified: -1
+
- job:
name: project-test1
@@ -33,10 +50,26 @@
name: project-test2
- project:
- name: org/project1
+ name: review.example.com/org/project1
review_check:
jobs:
- project-test1
+
+- project:
+ name: another.example.com/org/project1
another_check:
jobs:
- project-test2
+
+
+- project:
+ name: review.example.com/org/project2
+ common_check:
+ jobs:
+ - project-test1
+
+- project:
+ name: another.example.com/org/project2
+ common_check:
+ jobs:
+ - project-test2
diff --git a/tests/fixtures/config/one-job-project/git/org_one-job-project/README b/tests/fixtures/config/zuul-connections-multiple-gerrits/git/org_project2/README
similarity index 100%
rename from tests/fixtures/config/one-job-project/git/org_one-job-project/README
rename to tests/fixtures/config/zuul-connections-multiple-gerrits/git/org_project2/README
diff --git a/tests/fixtures/config/zuul-connections-multiple-gerrits/main.yaml b/tests/fixtures/config/zuul-connections-multiple-gerrits/main.yaml
index 730cc7e..38810fd 100644
--- a/tests/fixtures/config/zuul-connections-multiple-gerrits/main.yaml
+++ b/tests/fixtures/config/zuul-connections-multiple-gerrits/main.yaml
@@ -2,5 +2,12 @@
name: tenant-one
source:
review_gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/project1
+ - org/project2
+ another_gerrit:
+ untrusted-projects:
+ - org/project1
+ - org/project2
diff --git a/tests/fixtures/config/zuul-connections-same-gerrit/git/common-config/zuul.yaml b/tests/fixtures/config/zuul-connections-same-gerrit/git/common-config/zuul.yaml
index 114a4a3..adc61a3 100644
--- a/tests/fixtures/config/zuul-connections-same-gerrit/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/zuul-connections-same-gerrit/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: review_gerrit
trigger:
review_gerrit:
- event: patchset-created
diff --git a/tests/fixtures/config/zuul-connections-same-gerrit/main.yaml b/tests/fixtures/config/zuul-connections-same-gerrit/main.yaml
index 90297fb..9b2fc83 100644
--- a/tests/fixtures/config/zuul-connections-same-gerrit/main.yaml
+++ b/tests/fixtures/config/zuul-connections-same-gerrit/main.yaml
@@ -2,7 +2,7 @@
name: tenant-one
source:
review_gerrit:
- config-repos:
+ config-projects:
- common-config
- project-repos:
+ untrusted-projects:
- org/project
diff --git a/tests/fixtures/config/zuultrigger/parent-change-enqueued/git/common-config/zuul.yaml b/tests/fixtures/config/zuultrigger/parent-change-enqueued/git/common-config/zuul.yaml
index 8d63576..351092c 100644
--- a/tests/fixtures/config/zuultrigger/parent-change-enqueued/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/zuultrigger/parent-change-enqueued/git/common-config/zuul.yaml
@@ -1,10 +1,10 @@
- pipeline:
name: check
manager: independent
- source: gerrit
require:
- approval:
- - verified: -1
+ gerrit:
+ approval:
+ - verified: -1
trigger:
gerrit:
- event: patchset-created
@@ -21,10 +21,10 @@
- pipeline:
name: gate
manager: dependent
- source: gerrit
require:
- approval:
- - verified: 1
+ gerrit:
+ approval:
+ - verified: 1
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/zuultrigger/parent-change-enqueued/main.yaml b/tests/fixtures/config/zuultrigger/parent-change-enqueued/main.yaml
index a22ed5c..208e274 100644
--- a/tests/fixtures/config/zuultrigger/parent-change-enqueued/main.yaml
+++ b/tests/fixtures/config/zuultrigger/parent-change-enqueued/main.yaml
@@ -2,5 +2,7 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
+ untrusted-projects:
+ - org/project
diff --git a/tests/fixtures/config/zuultrigger/project-change-merged/git/common-config/zuul.yaml b/tests/fixtures/config/zuultrigger/project-change-merged/git/common-config/zuul.yaml
index eb6bf1c..48fdffe 100644
--- a/tests/fixtures/config/zuultrigger/project-change-merged/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/zuultrigger/project-change-merged/git/common-config/zuul.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -16,7 +15,6 @@
name: gate
manager: dependent
failure-message: Build failed. For information on how to proceed, see http://wiki.example.org/Test_Failures
- source: gerrit
trigger:
gerrit:
- event: comment-added
@@ -37,7 +35,6 @@
- pipeline:
name: merge-check
manager: independent
- source: gerrit
ignore-dependencies: true
trigger:
zuul:
diff --git a/tests/fixtures/config/zuultrigger/project-change-merged/main.yaml b/tests/fixtures/config/zuultrigger/project-change-merged/main.yaml
index a22ed5c..9d01f54 100644
--- a/tests/fixtures/config/zuultrigger/project-change-merged/main.yaml
+++ b/tests/fixtures/config/zuultrigger/project-change-merged/main.yaml
@@ -2,5 +2,5 @@
name: tenant-one
source:
gerrit:
- config-repos:
+ config-projects:
- common-config
diff --git a/tests/fixtures/layout-live-reconfiguration-add-job.yaml b/tests/fixtures/layout-live-reconfiguration-add-job.yaml
deleted file mode 100644
index e4aea6f..0000000
--- a/tests/fixtures/layout-live-reconfiguration-add-job.yaml
+++ /dev/null
@@ -1,38 +0,0 @@
-pipelines:
- - name: gate
- manager: DependentPipelineManager
- failure-message: Build failed. For information on how to proceed, see http://wiki.example.org/Test_Failures
- trigger:
- gerrit:
- - event: comment-added
- approval:
- - approved: 1
- success:
- gerrit:
- verified: 2
- submit: true
- failure:
- gerrit:
- verified: -2
- start:
- gerrit:
- verified: 0
- precedence: high
-
-jobs:
- - name: ^.*-merge$
- failure-message: Unable to merge change
- hold-following-changes: true
- - name: project-testfile
- files:
- - '.*-requires'
-
-projects:
- - name: org/project
- merge-mode: cherry-pick
- gate:
- - project-merge:
- - project-test1
- - project-test2
- - project-test3
- - project-testfile
diff --git a/tests/fixtures/layout-live-reconfiguration-failed-job.yaml b/tests/fixtures/layout-live-reconfiguration-failed-job.yaml
deleted file mode 100644
index e811af1..0000000
--- a/tests/fixtures/layout-live-reconfiguration-failed-job.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
-pipelines:
- - name: check
- manager: IndependentPipelineManager
- trigger:
- gerrit:
- - event: patchset-created
- success:
- gerrit:
- verified: 1
- failure:
- gerrit:
- verified: -1
-
-jobs:
- - name: ^.*-merge$
- failure-message: Unable to merge change
- hold-following-changes: true
-
-projects:
- - name: org/project
- merge-mode: cherry-pick
- check:
- - project-merge:
- - project-test2
- - project-testfile
diff --git a/tests/fixtures/layout-live-reconfiguration-shared-queue.yaml b/tests/fixtures/layout-live-reconfiguration-shared-queue.yaml
deleted file mode 100644
index ad3f666..0000000
--- a/tests/fixtures/layout-live-reconfiguration-shared-queue.yaml
+++ /dev/null
@@ -1,62 +0,0 @@
-pipelines:
- - name: check
- manager: IndependentPipelineManager
- trigger:
- gerrit:
- - event: patchset-created
- success:
- gerrit:
- verified: 1
- failure:
- gerrit:
- verified: -1
-
- - name: gate
- manager: DependentPipelineManager
- failure-message: Build failed. For information on how to proceed, see http://wiki.example.org/Test_Failures
- trigger:
- gerrit:
- - event: comment-added
- approval:
- - approved: 1
- success:
- gerrit:
- verified: 2
- submit: true
- failure:
- gerrit:
- verified: -2
- start:
- gerrit:
- verified: 0
- precedence: high
-
-jobs:
- - name: ^.*-merge$
- failure-message: Unable to merge change
- hold-following-changes: true
- - name: project1-project2-integration
- queue-name: integration
-
-projects:
- - name: org/project1
- check:
- - project1-merge:
- - project1-test1
- - project1-test2
- gate:
- - project1-merge:
- - project1-test1
- - project1-test2
-
- - name: org/project2
- check:
- - project2-merge:
- - project2-test1
- - project2-test2
- - project1-project2-integration
- gate:
- - project2-merge:
- - project2-test1
- - project2-test2
- - project1-project2-integration
diff --git a/tests/fixtures/layouts/basic-github.yaml b/tests/fixtures/layouts/basic-github.yaml
new file mode 100644
index 0000000..709fd02
--- /dev/null
+++ b/tests/fixtures/layouts/basic-github.yaml
@@ -0,0 +1,30 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ github:
+ - event: pull_request
+ action:
+ - opened
+ - changed
+ - reopened
+ branch: '^master$'
+ - event: pull_request
+ action: comment
+ comment: 'test me'
+ success:
+ github: {}
+ failure:
+ github: {}
+
+- job:
+ name: project-test1
+- job:
+ name: project-test2
+
+- project:
+ name: org/project
+ check:
+ jobs:
+ - project-test1
+ - project-test2
diff --git a/tests/fixtures/layouts/dependent-github.yaml b/tests/fixtures/layouts/dependent-github.yaml
new file mode 100644
index 0000000..46cc7b3
--- /dev/null
+++ b/tests/fixtures/layouts/dependent-github.yaml
@@ -0,0 +1,35 @@
+- pipeline:
+ name: gate
+ description: Gatekeeping
+ manager: dependent
+ trigger:
+ github:
+ - event: pull_request
+ action: labeled
+ label: 'merge'
+ success:
+ github:
+ merge: true
+ unlabel: 'merge'
+ failure:
+ github:
+ unlabel: 'merge'
+
+- job:
+ name: project-test1
+- job:
+ name: project-test2
+- job:
+ name: project-merge
+ failure-message: Unable to merge change
+ hold-following-changes: true
+
+- project:
+ name: org/project
+ gate:
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
diff --git a/tests/fixtures/layouts/dequeue-github.yaml b/tests/fixtures/layouts/dequeue-github.yaml
new file mode 100644
index 0000000..25e92c9
--- /dev/null
+++ b/tests/fixtures/layouts/dequeue-github.yaml
@@ -0,0 +1,18 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ github:
+ - event: pull_request
+ action:
+ - opened
+ - changed
+
+- job:
+ name: one-job-project-merge
+
+- project:
+ name: org/one-job-project
+ check:
+ jobs:
+ - one-job-project-merge
diff --git a/tests/fixtures/config/single-tenant/git/layout-disabled-at/zuul.yaml b/tests/fixtures/layouts/disable_at.yaml
similarity index 95%
rename from tests/fixtures/config/single-tenant/git/layout-disabled-at/zuul.yaml
rename to tests/fixtures/layouts/disable_at.yaml
index bdc19ac..2956ebf 100644
--- a/tests/fixtures/config/single-tenant/git/layout-disabled-at/zuul.yaml
+++ b/tests/fixtures/layouts/disable_at.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
diff --git a/tests/fixtures/config/single-tenant/git/layout-dont-ignore-ref-deletes/zuul.yaml b/tests/fixtures/layouts/dont-ignore-ref-deletes.yaml
similarity index 93%
rename from tests/fixtures/config/single-tenant/git/layout-dont-ignore-ref-deletes/zuul.yaml
rename to tests/fixtures/layouts/dont-ignore-ref-deletes.yaml
index 334d9ac..aee5ac6 100644
--- a/tests/fixtures/config/single-tenant/git/layout-dont-ignore-ref-deletes/zuul.yaml
+++ b/tests/fixtures/layouts/dont-ignore-ref-deletes.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: post
manager: independent
- source: gerrit
trigger:
gerrit:
- event: ref-updated
diff --git a/tests/fixtures/layouts/files-github.yaml b/tests/fixtures/layouts/files-github.yaml
new file mode 100644
index 0000000..734b945
--- /dev/null
+++ b/tests/fixtures/layouts/files-github.yaml
@@ -0,0 +1,18 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ github:
+ - event: pull_request
+ action: opened
+
+- job:
+ name: project-test1
+ files:
+ - '.*-requires'
+
+- project:
+ name: org/project
+ check:
+ jobs:
+ - project-test1
diff --git a/tests/fixtures/config/single-tenant/git/layout-footer-message/zuul.yaml b/tests/fixtures/layouts/footer-message.yaml
similarity index 97%
rename from tests/fixtures/config/single-tenant/git/layout-footer-message/zuul.yaml
rename to tests/fixtures/layouts/footer-message.yaml
index c698378..1261902 100644
--- a/tests/fixtures/config/single-tenant/git/layout-footer-message/zuul.yaml
+++ b/tests/fixtures/layouts/footer-message.yaml
@@ -2,7 +2,6 @@
name: gate
manager: dependent
success-message: Build succeeded (gate).
- source: gerrit
failure-message: Build failed. For information on how to proceed, see http://wiki.example.org/Test_Failures
footer-message: For CI problems and help debugging, contact ci@example.org
trigger:
@@ -29,6 +28,7 @@
- job:
name: project-test1
# success-url: http://logs.exxxample.com/{change.number}/{change.patchset}/{pipeline.name}/{job.name}
+
- project:
name: org/project
gate:
diff --git a/tests/fixtures/config/single-tenant/git/layout-idle/zuul.yaml b/tests/fixtures/layouts/idle.yaml
similarity index 95%
rename from tests/fixtures/config/single-tenant/git/layout-idle/zuul.yaml
rename to tests/fixtures/layouts/idle.yaml
index d1fa04b..ff33842 100644
--- a/tests/fixtures/config/single-tenant/git/layout-idle/zuul.yaml
+++ b/tests/fixtures/layouts/idle.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: periodic
manager: independent
- source: gerrit
trigger:
timer:
- time: '* * * * * */1'
diff --git a/tests/fixtures/config/single-tenant/git/layout-ignore-dependencies/zuul.yaml b/tests/fixtures/layouts/ignore-dependencies.yaml
similarity index 95%
rename from tests/fixtures/config/single-tenant/git/layout-ignore-dependencies/zuul.yaml
rename to tests/fixtures/layouts/ignore-dependencies.yaml
index 4010372..86fe674 100644
--- a/tests/fixtures/config/single-tenant/git/layout-ignore-dependencies/zuul.yaml
+++ b/tests/fixtures/layouts/ignore-dependencies.yaml
@@ -2,7 +2,6 @@
name: check
manager: independent
ignore-dependencies: true
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -33,7 +32,6 @@
- job:
name: project1-project2-integration
- queue-name: integration
- project:
name: org/project1
diff --git a/tests/fixtures/config/single-tenant/git/layout-inheritance/zuul.yaml b/tests/fixtures/layouts/inheritance.yaml
similarity index 97%
rename from tests/fixtures/config/single-tenant/git/layout-inheritance/zuul.yaml
rename to tests/fixtures/layouts/inheritance.yaml
index ab8c9a5..65dddab 100644
--- a/tests/fixtures/config/single-tenant/git/layout-inheritance/zuul.yaml
+++ b/tests/fixtures/layouts/inheritance.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
diff --git a/tests/fixtures/config/single-tenant/git/layout-irrelevant-files/zuul.yaml b/tests/fixtures/layouts/irrelevant-files.yaml
similarity index 95%
rename from tests/fixtures/config/single-tenant/git/layout-irrelevant-files/zuul.yaml
rename to tests/fixtures/layouts/irrelevant-files.yaml
index 5d72fc0..3d086dc 100644
--- a/tests/fixtures/config/single-tenant/git/layout-irrelevant-files/zuul.yaml
+++ b/tests/fixtures/layouts/irrelevant-files.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
diff --git a/tests/fixtures/layouts/labeling-github.yaml b/tests/fixtures/layouts/labeling-github.yaml
new file mode 100644
index 0000000..33ce993
--- /dev/null
+++ b/tests/fixtures/layouts/labeling-github.yaml
@@ -0,0 +1,29 @@
+- pipeline:
+ name: labels
+ description: Trigger on labels
+ manager: independent
+ trigger:
+ github:
+ - event: pull_request
+ action: labeled
+ label:
+ - 'test'
+ - event: pull_request
+ action: unlabeled
+ label:
+ - 'do not test'
+ success:
+ github:
+ label:
+ - 'tests passed'
+ unlabel:
+ - 'test'
+
+- job:
+ name: project-labels
+
+- project:
+ name: org/project
+ labels:
+ jobs:
+ - project-labels
diff --git a/tests/fixtures/layouts/live-reconfiguration-add-job.yaml b/tests/fixtures/layouts/live-reconfiguration-add-job.yaml
new file mode 100644
index 0000000..5916282
--- /dev/null
+++ b/tests/fixtures/layouts/live-reconfiguration-add-job.yaml
@@ -0,0 +1,57 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ failure-message: Build failed. For information on how to proceed, see http://wiki.example.org/Test_Failures
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - approved: 1
+ success:
+ gerrit:
+ verified: 2
+ submit: true
+ failure:
+ gerrit:
+ verified: -2
+ start:
+ gerrit:
+ verified: 0
+ precedence: high
+
+- job:
+ name: project-merge
+ hold-following-changes: true
+
+- job:
+ name: project-test1
+
+- job:
+ name: project-test2
+
+- job:
+ name: project-test3
+
+- job:
+ name: project-testfile
+ files:
+ - '.*-requires'
+
+- project:
+ name: org/project
+ merge-mode: cherry-pick
+ gate:
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies:
+ - project-merge
+ - project-test2:
+ dependencies:
+ - project-merge
+ - project-test3:
+ dependencies:
+ - project-merge
+ - project-testfile:
+ dependencies:
+ - project-merge
diff --git a/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/zuul.yaml b/tests/fixtures/layouts/live-reconfiguration-del-project.yaml
similarity index 96%
rename from tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/zuul.yaml
rename to tests/fixtures/layouts/live-reconfiguration-del-project.yaml
index a6d6599..299c612 100644
--- a/tests/fixtures/config/single-tenant/git/layout-live-reconfiguration-del-project/zuul.yaml
+++ b/tests/fixtures/layouts/live-reconfiguration-del-project.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
diff --git a/tests/fixtures/layouts/live-reconfiguration-failed-job.yaml b/tests/fixtures/layouts/live-reconfiguration-failed-job.yaml
new file mode 100644
index 0000000..0907880
--- /dev/null
+++ b/tests/fixtures/layouts/live-reconfiguration-failed-job.yaml
@@ -0,0 +1,35 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+- job:
+ name: project-merge
+ hold-following-changes: true
+
+- job:
+ name: project-test2
+
+- job:
+ name: project-testfile
+
+- project:
+ name: org/project
+ merge-mode: cherry-pick
+ check:
+ jobs:
+ - project-merge
+ - project-test2:
+ dependencies:
+ - project-merge
+ - project-testfile:
+ dependencies:
+ - project-merge
diff --git a/tests/fixtures/layouts/live-reconfiguration-shared-queue.yaml b/tests/fixtures/layouts/live-reconfiguration-shared-queue.yaml
new file mode 100644
index 0000000..bf4416a
--- /dev/null
+++ b/tests/fixtures/layouts/live-reconfiguration-shared-queue.yaml
@@ -0,0 +1,86 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+- pipeline:
+ name: gate
+ manager: dependent
+ failure-message: Build failed. For information on how to proceed, see http://wiki.example.org/Test_Failures
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - approved: 1
+ success:
+ gerrit:
+ verified: 2
+ submit: true
+ failure:
+ gerrit:
+ verified: -2
+ start:
+ gerrit:
+ verified: 0
+ precedence: high
+
+- job:
+ name: project-merge
+ hold-following-changes: true
+
+- job:
+ name: project-test1
+
+- job:
+ name: project-test2
+
+- job:
+ name: project1-project2-integration
+
+- project:
+ name: org/project1
+ check:
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
+ gate:
+ queue: integrated
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
+
+- project:
+ name: org/project2
+ check:
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
+ - project1-project2-integration:
+ dependencies: project-merge
+ gate:
+ queue: integrated
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
+ - project1-project2-integration:
+ dependencies: project-merge
diff --git a/tests/fixtures/layouts/merging-github.yaml b/tests/fixtures/layouts/merging-github.yaml
new file mode 100644
index 0000000..9f43f75
--- /dev/null
+++ b/tests/fixtures/layouts/merging-github.yaml
@@ -0,0 +1,20 @@
+- pipeline:
+ name: merge
+ description: Pipeline for merging the pull request
+ manager: independent
+ merge-failure-message: 'Merge failed'
+ trigger:
+ github:
+ - event: pull_request
+ action: comment
+ comment: 'merge me'
+ success:
+ github:
+ merge: true
+ comment: false
+
+- project:
+ name: org/project
+ merge:
+ jobs:
+ - noop
diff --git a/tests/fixtures/config/single-tenant/git/layout-semaphore-reconfiguration/zuul.yaml b/tests/fixtures/layouts/no-jobs-project.yaml
similarity index 68%
copy from tests/fixtures/config/single-tenant/git/layout-semaphore-reconfiguration/zuul.yaml
copy to tests/fixtures/layouts/no-jobs-project.yaml
index 0e332e4..803e5a0 100644
--- a/tests/fixtures/config/single-tenant/git/layout-semaphore-reconfiguration/zuul.yaml
+++ b/tests/fixtures/layouts/no-jobs-project.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -13,10 +12,12 @@
verified: -1
- job:
- name: project-test1
+ name: project-testfile
+ files:
+ - .*-requires
- project:
- name: org/project
+ name: org/no-jobs-project
check:
jobs:
- - project-test1
+ - project-testfile
diff --git a/tests/fixtures/config/single-tenant/git/layout-no-jobs/zuul.yaml b/tests/fixtures/layouts/no-jobs.yaml
similarity index 93%
rename from tests/fixtures/config/single-tenant/git/layout-no-jobs/zuul.yaml
rename to tests/fixtures/layouts/no-jobs.yaml
index 5894440..66193b0 100644
--- a/tests/fixtures/config/single-tenant/git/layout-no-jobs/zuul.yaml
+++ b/tests/fixtures/layouts/no-jobs.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -16,8 +15,6 @@
name: gate
manager: dependent
success-message: Build succeeded (gate).
- source:
- gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/config/single-tenant/git/layout-no-timer/zuul.yaml b/tests/fixtures/layouts/no-timer.yaml
similarity index 95%
rename from tests/fixtures/config/single-tenant/git/layout-no-timer/zuul.yaml
rename to tests/fixtures/layouts/no-timer.yaml
index ab919a4..c8ced62 100644
--- a/tests/fixtures/config/single-tenant/git/layout-no-timer/zuul.yaml
+++ b/tests/fixtures/layouts/no-timer.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -17,7 +16,6 @@
manager: independent
# Trigger is required, set it to one that is a noop
# during tests that check the timer trigger.
- source: gerrit
trigger:
gerrit:
- event: ref-updated
diff --git a/tests/fixtures/layouts/nonvoting-job.yaml b/tests/fixtures/layouts/nonvoting-job.yaml
new file mode 100644
index 0000000..fee5043
--- /dev/null
+++ b/tests/fixtures/layouts/nonvoting-job.yaml
@@ -0,0 +1,41 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ success-message: Build succeeded (gate).
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - approved: 1
+ success:
+ gerrit:
+ verified: 2
+ submit: true
+ failure:
+ gerrit:
+ verified: -2
+ start:
+ gerrit:
+ verified: 0
+ precedence: high
+
+- job:
+ name: nonvoting-project-merge
+ hold-following-changes: true
+
+- job:
+ name: nonvoting-project-test1
+
+- job:
+ name: nonvoting-project-test2
+ voting: false
+
+- project:
+ name: org/nonvoting-project
+ gate:
+ jobs:
+ - nonvoting-project-merge
+ - nonvoting-project-test1:
+ dependencies: nonvoting-project-merge
+ - nonvoting-project-test2:
+ dependencies: nonvoting-project-merge
diff --git a/tests/fixtures/layouts/nonvoting-pipeline.yaml b/tests/fixtures/layouts/nonvoting-pipeline.yaml
new file mode 100644
index 0000000..be5d5af
--- /dev/null
+++ b/tests/fixtures/layouts/nonvoting-pipeline.yaml
@@ -0,0 +1,25 @@
+- pipeline:
+ name: experimental
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit: {}
+ failure:
+ gerrit: {}
+
+- job:
+ name: project-merge
+ hold-following-changes: true
+
+- job:
+ name: experimental-project-test
+
+- project:
+ name: org/experimental-project
+ experimental:
+ jobs:
+ - project-merge
+ - experimental-project-test:
+ dependencies: project-merge
diff --git a/tests/fixtures/layouts/noop-job.yaml b/tests/fixtures/layouts/noop-job.yaml
new file mode 100644
index 0000000..8081216
--- /dev/null
+++ b/tests/fixtures/layouts/noop-job.yaml
@@ -0,0 +1,26 @@
+- pipeline:
+ name: gate
+ manager: dependent
+ success-message: Build succeeded (gate).
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - approved: 1
+ success:
+ gerrit:
+ verified: 2
+ submit: true
+ failure:
+ gerrit:
+ verified: -2
+ start:
+ gerrit:
+ verified: 0
+ precedence: high
+
+- project:
+ name: org/noop-project
+ gate:
+ jobs:
+ - noop
diff --git a/tests/fixtures/config/one-job-project/git/common-config/zuul.yaml b/tests/fixtures/layouts/one-job-project.yaml
similarity index 94%
rename from tests/fixtures/config/one-job-project/git/common-config/zuul.yaml
rename to tests/fixtures/layouts/one-job-project.yaml
index 4579062..b293269 100644
--- a/tests/fixtures/config/one-job-project/git/common-config/zuul.yaml
+++ b/tests/fixtures/layouts/one-job-project.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -16,7 +15,6 @@
name: gate
manager: dependent
success-message: Build succeeded (gate).
- source: gerrit
trigger:
gerrit:
- event: comment-added
@@ -37,7 +35,6 @@
- pipeline:
name: post
manager: independent
- source: gerrit
trigger:
gerrit:
- event: ref-updated
diff --git a/tests/fixtures/layouts/push-tag-github.yaml b/tests/fixtures/layouts/push-tag-github.yaml
new file mode 100644
index 0000000..54683e9
--- /dev/null
+++ b/tests/fixtures/layouts/push-tag-github.yaml
@@ -0,0 +1,29 @@
+- pipeline:
+ name: post
+ manager: independent
+ trigger:
+ github:
+ - event: push
+ ref: '^refs/heads/master$'
+
+- pipeline:
+ name: tag
+ manager: independent
+ trigger:
+ github:
+ - event: push
+ ref: ^refs/tags/.*$
+
+- job:
+ name: project-post
+- job:
+ name: project-tag
+
+- project:
+ name: org/project
+ post:
+ jobs:
+ - project-post
+ tag:
+ jobs:
+ - project-tag
diff --git a/tests/fixtures/config/single-tenant/git/layout-rate-limit/zuul.yaml b/tests/fixtures/layouts/rate-limit.yaml
similarity index 97%
rename from tests/fixtures/config/single-tenant/git/layout-rate-limit/zuul.yaml
rename to tests/fixtures/layouts/rate-limit.yaml
index c4e00f6..283354e 100644
--- a/tests/fixtures/config/single-tenant/git/layout-rate-limit/zuul.yaml
+++ b/tests/fixtures/layouts/rate-limit.yaml
@@ -2,7 +2,6 @@
name: gate
manager: dependent
failure-message: Build failed. For information on how to proceed, see http://wiki.example.org/Test_Failures
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/layouts/repo-checkout-four-project.yaml b/tests/fixtures/layouts/repo-checkout-four-project.yaml
new file mode 100644
index 0000000..392931a
--- /dev/null
+++ b/tests/fixtures/layouts/repo-checkout-four-project.yaml
@@ -0,0 +1,81 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+- pipeline:
+ name: gate
+ manager: dependent
+ success-message: Build succeeded (gate).
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - approved: 1
+ success:
+ gerrit:
+ verified: 2
+ submit: true
+ failure:
+ gerrit:
+ verified: -2
+ start:
+ gerrit:
+ verified: 0
+ precedence: high
+
+- job:
+ name: integration
+ required-projects:
+ - org/project1
+ - org/project2
+ - org/project3
+ - org/project4
+
+- project:
+ name: org/project1
+ check:
+ jobs:
+ - integration
+ gate:
+ queue: integrated
+ jobs:
+ - integration
+
+- project:
+ name: org/project2
+ check:
+ jobs:
+ - integration
+ gate:
+ queue: integrated
+ jobs:
+ - integration
+
+- project:
+ name: org/project3
+ check:
+ jobs:
+ - integration
+ gate:
+ queue: integrated
+ jobs:
+ - integration
+
+- project:
+ name: org/project4
+ check:
+ jobs:
+ - integration
+ gate:
+ queue: integrated
+ jobs:
+ - integration
diff --git a/tests/fixtures/layouts/repo-checkout-no-timer.yaml b/tests/fixtures/layouts/repo-checkout-no-timer.yaml
new file mode 100644
index 0000000..2b65850
--- /dev/null
+++ b/tests/fixtures/layouts/repo-checkout-no-timer.yaml
@@ -0,0 +1,20 @@
+- pipeline:
+ name: periodic
+ manager: independent
+ # Trigger is required, set it to one that is a noop
+ # during tests that check the timer trigger.
+ trigger:
+ gerrit:
+ - event: ref-updated
+
+- job:
+ name: integration
+ override-branch: stable/havana
+ required-projects:
+ - org/project1
+
+- project:
+ name: org/project1
+ periodic:
+ jobs:
+ - integration
diff --git a/tests/fixtures/layouts/repo-checkout-post.yaml b/tests/fixtures/layouts/repo-checkout-post.yaml
new file mode 100644
index 0000000..9698289
--- /dev/null
+++ b/tests/fixtures/layouts/repo-checkout-post.yaml
@@ -0,0 +1,25 @@
+- pipeline:
+ name: post
+ manager: independent
+ trigger:
+ gerrit:
+ - event: ref-updated
+ ref: ^(?!refs/).*$
+
+- job:
+ name: integration
+ required-projects:
+ - org/project1
+ - org/project2
+
+- project:
+ name: org/project1
+ post:
+ jobs:
+ - integration
+
+- project:
+ name: org/project2
+ post:
+ jobs:
+ - integration
diff --git a/tests/fixtures/layouts/repo-checkout-six-project.yaml b/tests/fixtures/layouts/repo-checkout-six-project.yaml
new file mode 100644
index 0000000..93a64ea
--- /dev/null
+++ b/tests/fixtures/layouts/repo-checkout-six-project.yaml
@@ -0,0 +1,104 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+- pipeline:
+ name: gate
+ manager: dependent
+ success-message: Build succeeded (gate).
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - approved: 1
+ success:
+ gerrit:
+ verified: 2
+ submit: true
+ failure:
+ gerrit:
+ verified: -2
+ start:
+ gerrit:
+ verified: 0
+ precedence: high
+
+- job:
+ name: integration
+ required-projects:
+ - org/project1
+ - org/project2
+ - org/project3
+ - name: org/project4
+ override-branch: master
+ - org/project5
+ - org/project6
+
+- project:
+ name: org/project1
+ check:
+ jobs:
+ - integration
+ gate:
+ queue: integrated
+ jobs:
+ - integration
+
+- project:
+ name: org/project2
+ check:
+ jobs:
+ - integration
+ gate:
+ queue: integrated
+ jobs:
+ - integration
+
+- project:
+ name: org/project3
+ check:
+ jobs:
+ - integration
+ gate:
+ queue: integrated
+ jobs:
+ - integration
+
+- project:
+ name: org/project4
+ check:
+ jobs:
+ - integration
+ gate:
+ queue: integrated
+ jobs:
+ - integration
+
+- project:
+ name: org/project5
+ check:
+ jobs:
+ - integration
+ gate:
+ queue: integrated
+ jobs:
+ - integration
+
+- project:
+ name: org/project6
+ check:
+ jobs:
+ - integration
+ gate:
+ queue: integrated
+ jobs:
+ - integration
diff --git a/tests/fixtures/layouts/repo-checkout-timer.yaml b/tests/fixtures/layouts/repo-checkout-timer.yaml
new file mode 100644
index 0000000..d5917d1
--- /dev/null
+++ b/tests/fixtures/layouts/repo-checkout-timer.yaml
@@ -0,0 +1,18 @@
+- pipeline:
+ name: periodic
+ manager: independent
+ trigger:
+ timer:
+ - time: '* * * * * */1'
+
+- job:
+ name: integration
+ override-branch: stable/havana
+ required-projects:
+ - org/project1
+
+- project:
+ name: org/project1
+ periodic:
+ jobs:
+ - integration
diff --git a/tests/fixtures/config/single-tenant/git/layout-no-jobs/zuul.yaml b/tests/fixtures/layouts/repo-checkout-two-project.yaml
similarity index 66%
copy from tests/fixtures/config/single-tenant/git/layout-no-jobs/zuul.yaml
copy to tests/fixtures/layouts/repo-checkout-two-project.yaml
index 5894440..239d80c 100644
--- a/tests/fixtures/config/single-tenant/git/layout-no-jobs/zuul.yaml
+++ b/tests/fixtures/layouts/repo-checkout-two-project.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -16,8 +15,6 @@
name: gate
manager: dependent
success-message: Build succeeded (gate).
- source:
- gerrit
trigger:
gerrit:
- event: comment-added
@@ -36,14 +33,27 @@
precedence: high
- job:
- name: gate-noop
+ name: integration
+ required-projects:
+ - org/project1
+ - org/project2
- project:
- name: org/project
- merge-mode: cherry-pick
+ name: org/project1
check:
jobs:
- - gate-noop
+ - integration
gate:
+ queue: integrated
jobs:
- - gate-noop
+ - integration
+
+- project:
+ name: org/project2
+ check:
+ jobs:
+ - integration
+ gate:
+ queue: integrated
+ jobs:
+ - integration
diff --git a/tests/fixtures/config/single-tenant/git/layout-repo-deleted/zuul.yaml b/tests/fixtures/layouts/repo-deleted.yaml
similarity index 96%
rename from tests/fixtures/config/single-tenant/git/layout-repo-deleted/zuul.yaml
rename to tests/fixtures/layouts/repo-deleted.yaml
index 5851d75..a33da77 100644
--- a/tests/fixtures/config/single-tenant/git/layout-repo-deleted/zuul.yaml
+++ b/tests/fixtures/layouts/repo-deleted.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -16,7 +15,6 @@
name: gate
manager: dependent
success-message: Build succeeded (gate).
- source: gerrit
trigger:
gerrit:
- event: comment-added
diff --git a/tests/fixtures/layouts/reporting-github.yaml b/tests/fixtures/layouts/reporting-github.yaml
new file mode 100644
index 0000000..8dd35b0
--- /dev/null
+++ b/tests/fixtures/layouts/reporting-github.yaml
@@ -0,0 +1,47 @@
+- pipeline:
+ name: check
+ description: Standard check
+ manager: independent
+ trigger:
+ github:
+ - event: pull_request
+ action: opened
+ start:
+ github:
+ status: 'pending'
+ comment: false
+ success:
+ github:
+ status: 'success'
+
+- pipeline:
+ name: reporting
+ description: Uncommon reporting
+ manager: independent
+ trigger:
+ github:
+ - event: pull_request
+ action: comment
+ comment: 'reporting check'
+ start:
+ github: {}
+ success:
+ github:
+ comment: false
+ status: 'success'
+ status-url: http://logs.example.com/{pipeline.name}/{change.project}/{change.number}/{change.patchset}/
+ failure:
+ github:
+ comment: false
+
+- job:
+ name: project-test1
+
+- project:
+ name: org/project
+ check:
+ jobs:
+ - project-test1
+ reporting:
+ jobs:
+ - project-test1
diff --git a/tests/fixtures/layouts/requirements-github.yaml b/tests/fixtures/layouts/requirements-github.yaml
new file mode 100644
index 0000000..9933f27
--- /dev/null
+++ b/tests/fixtures/layouts/requirements-github.yaml
@@ -0,0 +1,245 @@
+- pipeline:
+ name: pipeline
+ manager: independent
+ require:
+ github:
+ status: "zuul:check:success"
+ trigger:
+ github:
+ - event: pull_request
+ action: comment
+ comment: 'test me'
+ success:
+ github:
+ comment: true
+
+- pipeline:
+ name: trigger_status
+ manager: independent
+ trigger:
+ github:
+ - event: pull_request
+ action: comment
+ comment: 'trigger me'
+ require-status: "zuul:check:success"
+ success:
+ github:
+ comment: true
+
+- pipeline:
+ name: trigger
+ manager: independent
+ trigger:
+ github:
+ - event: pull_request
+ action: status
+ status: 'zuul:check:success'
+ success:
+ github:
+ status: 'success'
+ failure:
+ github:
+ status: 'failure'
+
+- pipeline:
+ name: reviewusername
+ manager: independent
+ require:
+ github:
+ review:
+ - username: '^(herp|derp)$'
+ type: approved
+ trigger:
+ github:
+ - event: pull_request
+ action: comment
+ comment: 'test me'
+ success:
+ github:
+ comment: true
+
+- pipeline:
+ name: reviewreq
+ manager: independent
+ require:
+ github:
+ review:
+ - type: approved
+ permission: write
+ reject:
+ github:
+ review:
+ - type: changes_requested
+ permission: write
+ trigger:
+ github:
+ - event: pull_request
+ action: comment
+ comment: 'test me'
+ success:
+ github:
+ comment: true
+
+- pipeline:
+ name: reviewuserstate
+ manager: independent
+ require:
+ github:
+ review:
+ - username: 'derp'
+ type: approved
+ permission: write
+ reject:
+ github:
+ review:
+ - type: changes_requested
+ permission: write
+ trigger:
+ github:
+ - event: pull_request
+ action: comment
+ comment: 'test me'
+ success:
+ github:
+ comment: true
+
+- pipeline:
+ name: newer_than
+ manager: independent
+ require:
+ github:
+ review:
+ - type: approved
+ permission: write
+ newer-than: 1d
+ trigger:
+ github:
+ - event: pull_request
+ action: comment
+ comment: 'test me'
+ success:
+ github:
+ comment: true
+
+- pipeline:
+ name: older_than
+ manager: independent
+ require:
+ github:
+ review:
+ - type: approved
+ permission: write
+ older-than: 1d
+ trigger:
+ github:
+ - event: pull_request
+ action: comment
+ comment: 'test me'
+ success:
+ github:
+ comment: true
+
+- pipeline:
+ name: require_open
+ manager: independent
+ require:
+ github:
+ open: true
+ trigger:
+ github:
+ - event: pull_request
+ action: comment
+ comment: 'test me'
+ success:
+ github:
+ comment: true
+
+- pipeline:
+ name: require_current
+ manager: independent
+ require:
+ github:
+ current-patchset: true
+ trigger:
+ github:
+ - event: pull_request
+ action: changed
+ success:
+ github:
+ comment: true
+
+- job:
+ name: project1-pipeline
+- job:
+ name: project2-trigger
+- job:
+ name: project3-reviewusername
+- job:
+ name: project4-reviewreq
+- job:
+ name: project5-reviewuserstate
+- job:
+ name: project6-newerthan
+- job:
+ name: project7-olderthan
+- job:
+ name: project8-requireopen
+- job:
+ name: project9-requirecurrent
+
+- project:
+ name: org/project1
+ pipeline:
+ jobs:
+ - project1-pipeline
+ trigger_status:
+ jobs:
+ - project1-pipeline
+
+- project:
+ name: org/project2
+ trigger:
+ jobs:
+ - project2-trigger
+
+- project:
+ name: org/project3
+ reviewusername:
+ jobs:
+ - project3-reviewusername
+
+- project:
+ name: org/project4
+ reviewreq:
+ jobs:
+ - project4-reviewreq
+
+- project:
+ name: org/project5
+ reviewuserstate:
+ jobs:
+ - project5-reviewuserstate
+
+- project:
+ name: org/project6
+ newer_than:
+ jobs:
+ - project6-newerthan
+
+- project:
+ name: org/project7
+ older_than:
+ jobs:
+ - project7-olderthan
+
+- project:
+ name: org/project8
+ require_open:
+ jobs:
+ - project8-requireopen
+
+- project:
+ name: org/project9
+ require_current:
+ jobs:
+ - project9-requirecurrent
diff --git a/tests/fixtures/layouts/reviews-github.yaml b/tests/fixtures/layouts/reviews-github.yaml
new file mode 100644
index 0000000..1cc887a
--- /dev/null
+++ b/tests/fixtures/layouts/reviews-github.yaml
@@ -0,0 +1,21 @@
+- pipeline:
+ name: reviews
+ manager: independent
+ trigger:
+ github:
+ - event: pull_request_review
+ action: submitted
+ state: 'approve'
+ success:
+ github:
+ label:
+ - 'tests passed'
+
+- job:
+ name: project-reviews
+
+- project:
+ name: org/project
+ reviews:
+ jobs:
+ - project-reviews
diff --git a/tests/fixtures/config/single-tenant/git/layout-smtp/zuul.yaml b/tests/fixtures/layouts/smtp.yaml
similarity index 93%
rename from tests/fixtures/config/single-tenant/git/layout-smtp/zuul.yaml
rename to tests/fixtures/layouts/smtp.yaml
index be90d48..8f53d02 100644
--- a/tests/fixtures/config/single-tenant/git/layout-smtp/zuul.yaml
+++ b/tests/fixtures/layouts/smtp.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -22,7 +21,6 @@
name: gate
manager: dependent
success-message: Build succeeded (gate).
- source: gerrit
trigger:
gerrit:
- event: comment-added
@@ -60,9 +58,6 @@
- job:
name: project-test2
-- job:
- name: experimental-project-test
-
- project:
name: org/project
check:
diff --git a/tests/fixtures/layouts/tags.yaml b/tests/fixtures/layouts/tags.yaml
new file mode 100644
index 0000000..422eca2
--- /dev/null
+++ b/tests/fixtures/layouts/tags.yaml
@@ -0,0 +1,31 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+- job:
+ name: merge
+ tags:
+ - merge
+
+- project:
+ name: org/project1
+ check:
+ jobs:
+ - merge:
+ tags:
+ - extratag
+
+- project:
+ name: org/project2
+ check:
+ jobs:
+ - merge
diff --git a/tests/fixtures/layouts/three-projects.yaml b/tests/fixtures/layouts/three-projects.yaml
new file mode 100644
index 0000000..5d10276
--- /dev/null
+++ b/tests/fixtures/layouts/three-projects.yaml
@@ -0,0 +1,112 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+- pipeline:
+ name: gate
+ manager: dependent
+ success-message: Build succeeded (gate).
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - approved: 1
+ success:
+ gerrit:
+ verified: 2
+ submit: true
+ failure:
+ gerrit:
+ verified: -2
+ start:
+ gerrit:
+ verified: 0
+ precedence: high
+
+- job:
+ name: project-merge
+ hold-following-changes: true
+
+- job:
+ name: project-test1
+
+- job:
+ name: project-test2
+
+- job:
+ name: project1-project2-integration
+
+- project:
+ name: org/project1
+ check:
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
+ - project1-project2-integration:
+ dependencies: project-merge
+ gate:
+ queue: integrated
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
+ - project1-project2-integration:
+ dependencies: project-merge
+
+- project:
+ name: org/project2
+ check:
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
+ - project1-project2-integration:
+ dependencies: project-merge
+ gate:
+ queue: integrated
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
+ - project1-project2-integration:
+ dependencies: project-merge
+
+- project:
+ name: org/project3
+ check:
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
+ - project1-project2-integration:
+ dependencies: project-merge
+ gate:
+ queue: integrated
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
+ - project1-project2-integration:
+ dependencies: project-merge
diff --git a/tests/fixtures/config/single-tenant/git/layout-timer-smtp/zuul.yaml b/tests/fixtures/layouts/timer-smtp.yaml
similarity index 96%
rename from tests/fixtures/config/single-tenant/git/layout-timer-smtp/zuul.yaml
rename to tests/fixtures/layouts/timer-smtp.yaml
index 2a2eca5..66e9aaf 100644
--- a/tests/fixtures/config/single-tenant/git/layout-timer-smtp/zuul.yaml
+++ b/tests/fixtures/layouts/timer-smtp.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: periodic
manager: independent
- source: gerrit
trigger:
timer:
- time: '* * * * * */1'
diff --git a/tests/fixtures/config/single-tenant/git/layout-timer/zuul.yaml b/tests/fixtures/layouts/timer.yaml
similarity index 94%
rename from tests/fixtures/config/single-tenant/git/layout-timer/zuul.yaml
rename to tests/fixtures/layouts/timer.yaml
index 8072644..95199e7 100644
--- a/tests/fixtures/config/single-tenant/git/layout-timer/zuul.yaml
+++ b/tests/fixtures/layouts/timer.yaml
@@ -1,7 +1,6 @@
- pipeline:
name: check
manager: independent
- source: gerrit
trigger:
gerrit:
- event: patchset-created
@@ -15,7 +14,6 @@
- pipeline:
name: periodic
manager: independent
- source: gerrit
trigger:
timer:
- time: '* * * * * */1'
diff --git a/tests/fixtures/layouts/unmanaged-project.yaml b/tests/fixtures/layouts/unmanaged-project.yaml
new file mode 100644
index 0000000..d72c26e
--- /dev/null
+++ b/tests/fixtures/layouts/unmanaged-project.yaml
@@ -0,0 +1,25 @@
+- pipeline:
+ name: check
+ manager: independent
+ require:
+ gerrit:
+ open: True
+ current-patchset: True
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+ start:
+ gerrit:
+ verified: 0
+
+- project:
+ name: org/project1
+ check:
+ jobs:
+ - noop
diff --git a/tests/fixtures/test_id_rsa b/tests/fixtures/test_id_rsa
new file mode 100644
index 0000000..a793bd0
--- /dev/null
+++ b/tests/fixtures/test_id_rsa
@@ -0,0 +1,15 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIICWwIBAAKBgQCX10EQhi7hEMk1h7/fQaEj9H2DxWR0s3RXD5UI7j1Bn21tBUus
+Y0tPC5wXES4VfilXg+EuOKsE6z8x8txP1wd1+d6Hq3SWXnOcqxxv2ueAy6Gc31E7
+a2IVDYvqVsAOtxsWddvMGTj98/lexQBX6Bh+wmuba/43lq5UPepwvfgNOQIDAQAB
+AoGADMCHNlwOk9hVDanY82cPoXVnFSn+xc5MdwNYAOgBPQGmrwFC2bd9G6Zd9ZH7
+zNJLpo3s23Tm6ALZy9gZqJrmhWDZBOqeYtmkd0yUf5bCbUzNre8+gHJY8k9PAxVM
+dPr2bq8G4PyN3yC2euTht35KLjb7hD8WiF3exgI/d8oBvgECQQDFKuWmkLtkSkGo
+1KRbeBfRePbfzhGJ1yHRyO72Z1+hVXuRmtcjTfPhMikgx9dxWbpqr/RPgs7D7N8D
+JpFlsiR/AkEAxSX4LOwovklPzCZ8FyfHhkydNgDyBw8y2Xe1OO0LBN51batf9rcl
+rJBYFvulrD+seYNRCWBFpEi4KKZh4YESRwJAKmz+mYbPK9dmpYOMEjqXNXXH+YSH
+9ZcbKd8IvHCl/Ts9qakd3fTqI2z9uJYH39Yk7MwL0Agfob0Yh78GzlE01QJACheu
+g8Y3M76XCjFyKtFLgpGLfsc/nKLnjIB3U4m3BbHJuyqJyByKHjJpgAuz6IR99N6H
+GH7IMefTHame2yd7YwJAUIGRD+iOO0RJvtEHUbsz6IxrQdubNOvzm/78eyBTcbsa
+8996D18fJF6Q0/Gg0Cm65PNOpIthP3qxFkuuduUEUg==
+-----END RSA PRIVATE KEY-----
diff --git a/tests/fixtures/test_id_rsa.pub b/tests/fixtures/test_id_rsa.pub
new file mode 100644
index 0000000..bffc726
--- /dev/null
+++ b/tests/fixtures/test_id_rsa.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQCX10EQhi7hEMk1h7/fQaEj9H2DxWR0s3RXD5UI7j1Bn21tBUusY0tPC5wXES4VfilXg+EuOKsE6z8x8txP1wd1+d6Hq3SWXnOcqxxv2ueAy6Gc31E7a2IVDYvqVsAOtxsWddvMGTj98/lexQBX6Bh+wmuba/43lq5UPepwvfgNOQ== Private Key For Zuul Tests DO NOT USE
diff --git a/tests/fixtures/zuul-connections-gerrit-and-github.conf b/tests/fixtures/zuul-connections-gerrit-and-github.conf
new file mode 100644
index 0000000..bd05c75
--- /dev/null
+++ b/tests/fixtures/zuul-connections-gerrit-and-github.conf
@@ -0,0 +1,31 @@
+[gearman]
+server=127.0.0.1
+
+[zuul]
+tenant_config=config/multi-driver/main.yaml
+job_name_in_report=true
+
+[merger]
+git_dir=/tmp/zuul-test/git
+git_user_email=zuul@example.com
+git_user_name=zuul
+zuul_url=http://zuul.example.com/p
+
+[executor]
+git_dir=/tmp/zuul-test/executor-git
+
+[connection gerrit]
+driver=gerrit
+server=review.example.com
+user=jenkins
+sshkey=none
+
+[connection github]
+driver=github
+
+[connection outgoing_smtp]
+driver=smtp
+server=localhost
+port=25
+default_from=zuul@example.com
+default_to=you@example.com
diff --git a/tests/fixtures/zuul-connections-merger.conf b/tests/fixtures/zuul-connections-merger.conf
new file mode 100644
index 0000000..7a1bc42
--- /dev/null
+++ b/tests/fixtures/zuul-connections-merger.conf
@@ -0,0 +1,35 @@
+[gearman]
+server=127.0.0.1
+
+[zuul]
+job_name_in_report=true
+status_url=http://zuul.example.com/status
+
+[merger]
+git_dir=/tmp/zuul-test/git
+git_user_email=zuul@example.com
+git_user_name=zuul
+zuul_url=http://zuul.example.com/p
+
+[executor]
+git_dir=/tmp/zuul-test/executor-git
+
+[connection github]
+driver=github
+
+[connection gerrit]
+driver=gerrit
+server=review.example.com
+user=jenkins
+sshkey=fake_id_rsa1
+
+[connection resultsdb]
+driver=sql
+dburi=$MYSQL_FIXTURE_DBURI$
+
+[connection smtp]
+driver=smtp
+server=localhost
+port=25
+default_from=zuul@example.com
+default_to=you@example.com
diff --git a/tests/fixtures/zuul-github-driver.conf b/tests/fixtures/zuul-github-driver.conf
new file mode 100644
index 0000000..dfa813d
--- /dev/null
+++ b/tests/fixtures/zuul-github-driver.conf
@@ -0,0 +1,27 @@
+[gearman]
+server=127.0.0.1
+
+[zuul]
+job_name_in_report=true
+status_url=http://zuul.example.com/status/#{change.number},{change.patchset}
+
+[merger]
+git_dir=/tmp/zuul-test/git
+git_user_email=zuul@example.com
+git_user_name=zuul
+zuul_url=http://zuul.example.com/p
+
+[executor]
+git_dir=/tmp/zuul-test/executor-git
+
+[connection github]
+driver=github
+
+[connection github_ssh]
+driver=github
+sshkey=/home/zuul/.ssh/id_rsa
+
+[connection github_ent]
+driver=github
+sshkey=/home/zuul/.ssh/id_rsa
+git_host=github.enterprise.io
diff --git a/tests/fixtures/zuul-push-reqs.conf b/tests/fixtures/zuul-push-reqs.conf
new file mode 100644
index 0000000..661ac79
--- /dev/null
+++ b/tests/fixtures/zuul-push-reqs.conf
@@ -0,0 +1,23 @@
+[gearman]
+server=127.0.0.1
+
+[zuul]
+job_name_in_report=true
+status_url=http://zuul.example.com/status
+
+[merger]
+git_dir=/tmp/zuul-test/git
+git_user_email=zuul@example.com
+git_user_name=zuul
+zuul_url=http://zuul.example.com/p
+
+[executor]
+git_dir=/tmp/zuul-test/executor-git
+
+[connection github]
+driver=github
+
+[connection gerrit]
+driver=gerrit
+server=review.example.com
+user=jenkins
diff --git a/tests/nodepool/test_nodepool_integration.py b/tests/nodepool/test_nodepool_integration.py
index 2c9a9b3..9c87a10 100644
--- a/tests/nodepool/test_nodepool_integration.py
+++ b/tests/nodepool/test_nodepool_integration.py
@@ -28,9 +28,10 @@
# fake scheduler.
def setUp(self):
- super(BaseTestCase, self).setUp()
+ super(TestNodepoolIntegration, self).setUp()
self.zk = zuul.zk.ZooKeeper()
+ self.addCleanup(self.zk.disconnect)
self.zk.connect('localhost:2181')
self.hostname = socket.gethostname()
diff --git a/tests/unit/test_bubblewrap.py b/tests/unit/test_bubblewrap.py
new file mode 100644
index 0000000..b274944
--- /dev/null
+++ b/tests/unit/test_bubblewrap.py
@@ -0,0 +1,54 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import fixtures
+import logging
+import subprocess
+import tempfile
+import testtools
+
+from zuul.driver import bubblewrap
+from zuul.executor.server import SshAgent
+
+
+class TestBubblewrap(testtools.TestCase):
+ def setUp(self):
+ super(TestBubblewrap, self).setUp()
+ self.log_fixture = self.useFixture(
+ fixtures.FakeLogger(level=logging.DEBUG))
+ self.useFixture(fixtures.NestedTempfile())
+
+ def test_bubblewrap_wraps(self):
+ bwrap = bubblewrap.BubblewrapDriver()
+ work_dir = tempfile.mkdtemp()
+ ansible_dir = tempfile.mkdtemp()
+ ssh_agent = SshAgent()
+ self.addCleanup(ssh_agent.stop)
+ ssh_agent.start()
+ po = bwrap.getPopen(work_dir=work_dir,
+ ansible_dir=ansible_dir,
+ ssh_auth_sock=ssh_agent.env['SSH_AUTH_SOCK'])
+ self.assertTrue(po.passwd_r > 2)
+ self.assertTrue(po.group_r > 2)
+ self.assertTrue(work_dir in po.command)
+ self.assertTrue(ansible_dir in po.command)
+ # Now run /usr/bin/id to verify passwd/group entries made it in
+ true_proc = po(['/usr/bin/id'], stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (output, errs) = true_proc.communicate()
+ # Make sure it printed things on stdout
+ self.assertTrue(len(output.strip()))
+ # And that it did not print things on stderr
+ self.assertEqual(0, len(errs.strip()))
+ # Make sure the _r's are closed
+ self.assertIsNone(po.passwd_r)
+ self.assertIsNone(po.group_r)
diff --git a/tests/unit/test_change_matcher.py b/tests/unit/test_change_matcher.py
index 0585322..6b161a1 100644
--- a/tests/unit/test_change_matcher.py
+++ b/tests/unit/test_change_matcher.py
@@ -125,12 +125,18 @@
def test_matches_returns_false_when_not_all_files_match(self):
self._test_matches(False, files=['/COMMIT_MSG', 'docs/foo', 'foo/bar'])
+ def test_matches_returns_true_when_single_file_does_not_match(self):
+ self._test_matches(True, files=['docs/foo'])
+
def test_matches_returns_false_when_commit_message_matches(self):
self._test_matches(False, files=['/COMMIT_MSG'])
def test_matches_returns_true_when_all_files_match(self):
self._test_matches(True, files=['/COMMIT_MSG', 'docs/foo'])
+ def test_matches_returns_true_when_single_file_matches(self):
+ self._test_matches(True, files=['docs/foo'])
+
class TestMatchAll(BaseTestMatcher):
diff --git a/tests/unit/test_cloner.py b/tests/unit/test_cloner.py
deleted file mode 100644
index e65904b..0000000
--- a/tests/unit/test_cloner.py
+++ /dev/null
@@ -1,752 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2012 Hewlett-Packard Development Company, L.P.
-# Copyright 2014 Wikimedia Foundation Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import logging
-import os
-import shutil
-import time
-
-import git
-
-import zuul.lib.cloner
-
-from tests.base import ZuulTestCase
-
-
-class TestCloner(ZuulTestCase):
-
- log = logging.getLogger("zuul.test.cloner")
- workspace_root = None
-
- def setUp(self):
- self.skip("Disabled for early v3 development")
-
- super(TestCloner, self).setUp()
- self.workspace_root = os.path.join(self.test_root, 'workspace')
-
- self.updateConfigLayout(
- 'tests/fixtures/layout-cloner.yaml')
- self.sched.reconfigure(self.config)
- self.registerJobs()
-
- def getWorkspaceRepos(self, projects):
- repos = {}
- for project in projects:
- repos[project] = git.Repo(
- os.path.join(self.workspace_root, project))
- return repos
-
- def getUpstreamRepos(self, projects):
- repos = {}
- for project in projects:
- repos[project] = git.Repo(
- os.path.join(self.upstream_root, project))
- return repos
-
- def test_cache_dir(self):
- projects = ['org/project1', 'org/project2']
- cache_root = os.path.join(self.test_root, "cache")
- for project in projects:
- upstream_repo_path = os.path.join(self.upstream_root, project)
- cache_repo_path = os.path.join(cache_root, project)
- git.Repo.clone_from(upstream_repo_path, cache_repo_path)
-
- self.worker.hold_jobs_in_build = True
- A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
- A.addApproval('CRVW', 2)
- self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
-
- self.waitUntilSettled()
-
- self.assertEquals(1, len(self.builds), "One build is running")
-
- B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
- B.setMerged()
-
- upstream = self.getUpstreamRepos(projects)
- states = [{
- 'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('master')),
- }]
-
- for number, build in enumerate(self.builds):
- self.log.debug("Build parameters: %s", build.parameters)
- cloner = zuul.lib.cloner.Cloner(
- git_base_url=self.upstream_root,
- projects=projects,
- workspace=self.workspace_root,
- zuul_project=build.parameters.get('ZUUL_PROJECT', None),
- zuul_branch=build.parameters['ZUUL_BRANCH'],
- zuul_ref=build.parameters['ZUUL_REF'],
- zuul_url=self.src_root,
- cache_dir=cache_root,
- )
- cloner.execute()
- work = self.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertEquals(state[project],
- str(work[project].commit('HEAD')),
- 'Project %s commit for build %s should '
- 'be correct' % (project, number))
-
- work = self.getWorkspaceRepos(projects)
- # project1 is the zuul_project so the origin should be set to the
- # zuul_url since that is the most up to date.
- cache_repo_path = os.path.join(cache_root, 'org/project1')
- self.assertNotEqual(
- work['org/project1'].remotes.origin.url,
- cache_repo_path,
- 'workspace repo origin should not be the cache'
- )
- zuul_url_repo_path = os.path.join(self.git_root, 'org/project1')
- self.assertEqual(
- work['org/project1'].remotes.origin.url,
- zuul_url_repo_path,
- 'workspace repo origin should be the zuul url'
- )
-
- # project2 is not the zuul_project so the origin should be set
- # to upstream since that is the best we can do
- cache_repo_path = os.path.join(cache_root, 'org/project2')
- self.assertNotEqual(
- work['org/project2'].remotes.origin.url,
- cache_repo_path,
- 'workspace repo origin should not be the cache'
- )
- upstream_repo_path = os.path.join(self.upstream_root, 'org/project2')
- self.assertEqual(
- work['org/project2'].remotes.origin.url,
- upstream_repo_path,
- 'workspace repo origin should be the upstream url'
- )
-
- self.worker.hold_jobs_in_build = False
- self.worker.release()
- self.waitUntilSettled()
-
- def test_one_branch(self):
- self.worker.hold_jobs_in_build = True
-
- projects = ['org/project1', 'org/project2']
- A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
- B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
- A.addApproval('CRVW', 2)
- B.addApproval('CRVW', 2)
- self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
- self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
-
- self.waitUntilSettled()
-
- self.assertEquals(2, len(self.builds), "Two builds are running")
-
- upstream = self.getUpstreamRepos(projects)
- states = [
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('master')),
- },
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': self.builds[1].parameters['ZUUL_COMMIT'],
- },
- ]
-
- for number, build in enumerate(self.builds):
- self.log.debug("Build parameters: %s", build.parameters)
- cloner = zuul.lib.cloner.Cloner(
- git_base_url=self.upstream_root,
- projects=projects,
- workspace=self.workspace_root,
- zuul_project=build.parameters.get('ZUUL_PROJECT', None),
- zuul_branch=build.parameters['ZUUL_BRANCH'],
- zuul_ref=build.parameters['ZUUL_REF'],
- zuul_url=self.src_root,
- )
- cloner.execute()
- work = self.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertEquals(state[project],
- str(work[project].commit('HEAD')),
- 'Project %s commit for build %s should '
- 'be correct' % (project, number))
-
- shutil.rmtree(self.workspace_root)
-
- self.worker.hold_jobs_in_build = False
- self.worker.release()
- self.waitUntilSettled()
-
- def test_multi_branch(self):
- self.worker.hold_jobs_in_build = True
- projects = ['org/project1', 'org/project2',
- 'org/project3', 'org/project4']
-
- self.create_branch('org/project2', 'stable/havana')
- self.create_branch('org/project4', 'stable/havana')
- A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
- B = self.fake_gerrit.addFakeChange('org/project2', 'stable/havana',
- 'B')
- C = self.fake_gerrit.addFakeChange('org/project3', 'master', 'C')
- A.addApproval('CRVW', 2)
- B.addApproval('CRVW', 2)
- C.addApproval('CRVW', 2)
- self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
- self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
- self.fake_gerrit.addEvent(C.addApproval('APRV', 1))
-
- self.waitUntilSettled()
-
- self.assertEquals(3, len(self.builds), "Three builds are running")
-
- upstream = self.getUpstreamRepos(projects)
- states = [
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('master')),
- 'org/project3': str(upstream['org/project3'].commit('master')),
- 'org/project4': str(upstream['org/project4'].
- commit('master')),
- },
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': self.builds[1].parameters['ZUUL_COMMIT'],
- 'org/project3': str(upstream['org/project3'].commit('master')),
- 'org/project4': str(upstream['org/project4'].
- commit('stable/havana')),
- },
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('master')),
- 'org/project3': self.builds[2].parameters['ZUUL_COMMIT'],
- 'org/project4': str(upstream['org/project4'].
- commit('master')),
- },
- ]
-
- for number, build in enumerate(self.builds):
- self.log.debug("Build parameters: %s", build.parameters)
- cloner = zuul.lib.cloner.Cloner(
- git_base_url=self.upstream_root,
- projects=projects,
- workspace=self.workspace_root,
- zuul_project=build.parameters.get('ZUUL_PROJECT', None),
- zuul_branch=build.parameters['ZUUL_BRANCH'],
- zuul_ref=build.parameters['ZUUL_REF'],
- zuul_url=self.src_root,
- )
- cloner.execute()
- work = self.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertEquals(state[project],
- str(work[project].commit('HEAD')),
- 'Project %s commit for build %s should '
- 'be correct' % (project, number))
- shutil.rmtree(self.workspace_root)
-
- self.worker.hold_jobs_in_build = False
- self.worker.release()
- self.waitUntilSettled()
-
- def test_upgrade(self):
- # Simulates an upgrade test
- self.worker.hold_jobs_in_build = True
- projects = ['org/project1', 'org/project2', 'org/project3',
- 'org/project4', 'org/project5', 'org/project6']
-
- self.create_branch('org/project2', 'stable/havana')
- self.create_branch('org/project3', 'stable/havana')
- self.create_branch('org/project4', 'stable/havana')
- self.create_branch('org/project5', 'stable/havana')
- A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
- B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
- C = self.fake_gerrit.addFakeChange('org/project3', 'stable/havana',
- 'C')
- D = self.fake_gerrit.addFakeChange('org/project3', 'master', 'D')
- E = self.fake_gerrit.addFakeChange('org/project4', 'stable/havana',
- 'E')
- A.addApproval('CRVW', 2)
- B.addApproval('CRVW', 2)
- C.addApproval('CRVW', 2)
- D.addApproval('CRVW', 2)
- E.addApproval('CRVW', 2)
- self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
- self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
- self.fake_gerrit.addEvent(C.addApproval('APRV', 1))
- self.fake_gerrit.addEvent(D.addApproval('APRV', 1))
- self.fake_gerrit.addEvent(E.addApproval('APRV', 1))
-
- self.waitUntilSettled()
-
- self.assertEquals(5, len(self.builds), "Five builds are running")
-
- # Check the old side of the upgrade first
- upstream = self.getUpstreamRepos(projects)
- states = [
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit(
- 'stable/havana')),
- 'org/project3': str(upstream['org/project3'].commit(
- 'stable/havana')),
- 'org/project4': str(upstream['org/project4'].commit(
- 'stable/havana')),
- 'org/project5': str(upstream['org/project5'].commit(
- 'stable/havana')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit(
- 'stable/havana')),
- 'org/project3': str(upstream['org/project3'].commit(
- 'stable/havana')),
- 'org/project4': str(upstream['org/project4'].commit(
- 'stable/havana')),
- 'org/project5': str(upstream['org/project5'].commit(
- 'stable/havana')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit(
- 'stable/havana')),
- 'org/project3': self.builds[2].parameters['ZUUL_COMMIT'],
- 'org/project4': str(upstream['org/project4'].commit(
- 'stable/havana')),
-
- 'org/project5': str(upstream['org/project5'].commit(
- 'stable/havana')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit(
- 'stable/havana')),
- 'org/project3': self.builds[2].parameters['ZUUL_COMMIT'],
- 'org/project4': str(upstream['org/project4'].commit(
- 'stable/havana')),
- 'org/project5': str(upstream['org/project5'].commit(
- 'stable/havana')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit(
- 'stable/havana')),
- 'org/project3': self.builds[2].parameters['ZUUL_COMMIT'],
- 'org/project4': self.builds[4].parameters['ZUUL_COMMIT'],
- 'org/project5': str(upstream['org/project5'].commit(
- 'stable/havana')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- ]
-
- for number, build in enumerate(self.builds):
- self.log.debug("Build parameters: %s", build.parameters)
- cloner = zuul.lib.cloner.Cloner(
- git_base_url=self.upstream_root,
- projects=projects,
- workspace=self.workspace_root,
- zuul_project=build.parameters.get('ZUUL_PROJECT', None),
- zuul_branch=build.parameters['ZUUL_BRANCH'],
- zuul_ref=build.parameters['ZUUL_REF'],
- zuul_url=self.src_root,
- branch='stable/havana', # Old branch for upgrade
- )
- cloner.execute()
- work = self.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertEquals(state[project],
- str(work[project].commit('HEAD')),
- 'Project %s commit for build %s should '
- 'be correct on old side of upgrade' %
- (project, number))
- shutil.rmtree(self.workspace_root)
-
- # Check the new side of the upgrade
- states = [
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('master')),
- 'org/project3': str(upstream['org/project3'].commit('master')),
- 'org/project4': str(upstream['org/project4'].commit('master')),
- 'org/project5': str(upstream['org/project5'].commit('master')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': self.builds[1].parameters['ZUUL_COMMIT'],
- 'org/project3': str(upstream['org/project3'].commit('master')),
- 'org/project4': str(upstream['org/project4'].commit('master')),
- 'org/project5': str(upstream['org/project5'].commit('master')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': self.builds[1].parameters['ZUUL_COMMIT'],
- 'org/project3': str(upstream['org/project3'].commit('master')),
- 'org/project4': str(upstream['org/project4'].commit('master')),
- 'org/project5': str(upstream['org/project5'].commit('master')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': self.builds[1].parameters['ZUUL_COMMIT'],
- 'org/project3': self.builds[3].parameters['ZUUL_COMMIT'],
- 'org/project4': str(upstream['org/project4'].commit('master')),
- 'org/project5': str(upstream['org/project5'].commit('master')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': self.builds[1].parameters['ZUUL_COMMIT'],
- 'org/project3': self.builds[3].parameters['ZUUL_COMMIT'],
- 'org/project4': str(upstream['org/project4'].commit('master')),
- 'org/project5': str(upstream['org/project5'].commit('master')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- ]
-
- for number, build in enumerate(self.builds):
- self.log.debug("Build parameters: %s", build.parameters)
- cloner = zuul.lib.cloner.Cloner(
- git_base_url=self.upstream_root,
- projects=projects,
- workspace=self.workspace_root,
- zuul_project=build.parameters.get('ZUUL_PROJECT', None),
- zuul_branch=build.parameters['ZUUL_BRANCH'],
- zuul_ref=build.parameters['ZUUL_REF'],
- zuul_url=self.src_root,
- branch='master', # New branch for upgrade
- )
- cloner.execute()
- work = self.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertEquals(state[project],
- str(work[project].commit('HEAD')),
- 'Project %s commit for build %s should '
- 'be correct on old side of upgrade' %
- (project, number))
- shutil.rmtree(self.workspace_root)
-
- self.worker.hold_jobs_in_build = False
- self.worker.release()
- self.waitUntilSettled()
-
- def test_project_override(self):
- self.worker.hold_jobs_in_build = True
- projects = ['org/project1', 'org/project2', 'org/project3',
- 'org/project4', 'org/project5', 'org/project6']
-
- self.create_branch('org/project3', 'stable/havana')
- self.create_branch('org/project4', 'stable/havana')
- self.create_branch('org/project6', 'stable/havana')
- A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
- B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
- C = self.fake_gerrit.addFakeChange('org/project2', 'master', 'C')
- D = self.fake_gerrit.addFakeChange('org/project3', 'stable/havana',
- 'D')
- A.addApproval('CRVW', 2)
- B.addApproval('CRVW', 2)
- C.addApproval('CRVW', 2)
- D.addApproval('CRVW', 2)
- self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
- self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
- self.fake_gerrit.addEvent(C.addApproval('APRV', 1))
- self.fake_gerrit.addEvent(D.addApproval('APRV', 1))
-
- self.waitUntilSettled()
-
- self.assertEquals(4, len(self.builds), "Four builds are running")
-
- upstream = self.getUpstreamRepos(projects)
- states = [
- {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('master')),
- 'org/project3': str(upstream['org/project3'].commit('master')),
- 'org/project4': str(upstream['org/project4'].commit('master')),
- 'org/project5': str(upstream['org/project5'].commit('master')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- {'org/project1': self.builds[1].parameters['ZUUL_COMMIT'],
- 'org/project2': str(upstream['org/project2'].commit('master')),
- 'org/project3': str(upstream['org/project3'].commit('master')),
- 'org/project4': str(upstream['org/project4'].commit('master')),
- 'org/project5': str(upstream['org/project5'].commit('master')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- {'org/project1': self.builds[1].parameters['ZUUL_COMMIT'],
- 'org/project2': self.builds[2].parameters['ZUUL_COMMIT'],
- 'org/project3': str(upstream['org/project3'].commit('master')),
- 'org/project4': str(upstream['org/project4'].commit('master')),
- 'org/project5': str(upstream['org/project5'].commit('master')),
- 'org/project6': str(upstream['org/project6'].commit('master')),
- },
- {'org/project1': self.builds[1].parameters['ZUUL_COMMIT'],
- 'org/project2': self.builds[2].parameters['ZUUL_COMMIT'],
- 'org/project3': self.builds[3].parameters['ZUUL_COMMIT'],
- 'org/project4': str(upstream['org/project4'].commit('master')),
- 'org/project5': str(upstream['org/project5'].commit('master')),
- 'org/project6': str(upstream['org/project6'].commit(
- 'stable/havana')),
- },
- ]
-
- for number, build in enumerate(self.builds):
- self.log.debug("Build parameters: %s", build.parameters)
- cloner = zuul.lib.cloner.Cloner(
- git_base_url=self.upstream_root,
- projects=projects,
- workspace=self.workspace_root,
- zuul_project=build.parameters.get('ZUUL_PROJECT', None),
- zuul_branch=build.parameters['ZUUL_BRANCH'],
- zuul_ref=build.parameters['ZUUL_REF'],
- zuul_url=self.src_root,
- project_branches={'org/project4': 'master'},
- )
- cloner.execute()
- work = self.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertEquals(state[project],
- str(work[project].commit('HEAD')),
- 'Project %s commit for build %s should '
- 'be correct' % (project, number))
- shutil.rmtree(self.workspace_root)
-
- self.worker.hold_jobs_in_build = False
- self.worker.release()
- self.waitUntilSettled()
-
- def test_periodic(self):
- self.worker.hold_jobs_in_build = True
- self.create_branch('org/project', 'stable/havana')
- self.updateConfigLayout(
- 'tests/fixtures/layout-timer.yaml')
- self.sched.reconfigure(self.config)
- self.registerJobs()
-
- # The pipeline triggers every second, so we should have seen
- # several by now.
- time.sleep(5)
- self.waitUntilSettled()
-
- builds = self.builds[:]
-
- self.worker.hold_jobs_in_build = False
- # Stop queuing timer triggered jobs so that the assertions
- # below don't race against more jobs being queued.
- self.updateConfigLayout(
- 'tests/fixtures/layout-no-timer.yaml')
- self.sched.reconfigure(self.config)
- self.registerJobs()
- self.worker.release()
- self.waitUntilSettled()
-
- projects = ['org/project']
-
- self.assertEquals(2, len(builds), "Two builds are running")
-
- upstream = self.getUpstreamRepos(projects)
- states = [
- {'org/project':
- str(upstream['org/project'].commit('stable/havana')),
- },
- {'org/project':
- str(upstream['org/project'].commit('stable/havana')),
- },
- ]
-
- for number, build in enumerate(builds):
- self.log.debug("Build parameters: %s", build.parameters)
- cloner = zuul.lib.cloner.Cloner(
- git_base_url=self.upstream_root,
- projects=projects,
- workspace=self.workspace_root,
- zuul_project=build.parameters.get('ZUUL_PROJECT', None),
- zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
- zuul_ref=build.parameters.get('ZUUL_REF', None),
- zuul_url=self.src_root,
- branch='stable/havana',
- )
- cloner.execute()
- work = self.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertEquals(state[project],
- str(work[project].commit('HEAD')),
- 'Project %s commit for build %s should '
- 'be correct' % (project, number))
-
- shutil.rmtree(self.workspace_root)
-
- self.worker.hold_jobs_in_build = False
- self.worker.release()
- self.waitUntilSettled()
-
- def test_periodic_update(self):
- # Test that the merger correctly updates its local repository
- # before running a periodic job.
-
- # Prime the merger with the current state
- A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
- self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
- self.waitUntilSettled()
-
- # Merge a different change
- B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
- B.setMerged()
-
- # Start a periodic job
- self.worker.hold_jobs_in_build = True
- self.executor.negative_function_cache_ttl = 0
- self.config.set('zuul', 'layout_config',
- 'tests/fixtures/layout-timer.yaml')
- self.sched.reconfigure(self.config)
- self.registerJobs()
-
- # The pipeline triggers every second, so we should have seen
- # several by now.
- time.sleep(5)
- self.waitUntilSettled()
-
- builds = self.builds[:]
-
- self.worker.hold_jobs_in_build = False
- # Stop queuing timer triggered jobs so that the assertions
- # below don't race against more jobs being queued.
- self.config.set('zuul', 'layout_config',
- 'tests/fixtures/layout-no-timer.yaml')
- self.sched.reconfigure(self.config)
- self.registerJobs()
- self.worker.release()
- self.waitUntilSettled()
-
- projects = ['org/project']
-
- self.assertEquals(2, len(builds), "Two builds are running")
-
- upstream = self.getUpstreamRepos(projects)
- self.assertEqual(upstream['org/project'].commit('master').hexsha,
- B.patchsets[0]['revision'])
- states = [
- {'org/project':
- str(upstream['org/project'].commit('master')),
- },
- {'org/project':
- str(upstream['org/project'].commit('master')),
- },
- ]
-
- for number, build in enumerate(builds):
- self.log.debug("Build parameters: %s", build.parameters)
- cloner = zuul.lib.cloner.Cloner(
- git_base_url=self.upstream_root,
- projects=projects,
- workspace=self.workspace_root,
- zuul_project=build.parameters.get('ZUUL_PROJECT', None),
- zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
- zuul_ref=build.parameters.get('ZUUL_REF', None),
- zuul_url=self.git_root,
- )
- cloner.execute()
- work = self.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertEquals(state[project],
- str(work[project].commit('HEAD')),
- 'Project %s commit for build %s should '
- 'be correct' % (project, number))
-
- shutil.rmtree(self.workspace_root)
-
- self.worker.hold_jobs_in_build = False
- self.worker.release()
- self.waitUntilSettled()
-
- def test_post_checkout(self):
- self.worker.hold_jobs_in_build = True
- project = "org/project1"
-
- A = self.fake_gerrit.addFakeChange(project, 'master', 'A')
- event = A.getRefUpdatedEvent()
- A.setMerged()
- self.fake_gerrit.addEvent(event)
- self.waitUntilSettled()
-
- build = self.builds[0]
- state = {'org/project1': build.parameters['ZUUL_COMMIT']}
-
- build.release()
- self.waitUntilSettled()
-
- cloner = zuul.lib.cloner.Cloner(
- git_base_url=self.upstream_root,
- projects=[project],
- workspace=self.workspace_root,
- zuul_project=build.parameters.get('ZUUL_PROJECT', None),
- zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
- zuul_ref=build.parameters.get('ZUUL_REF', None),
- zuul_newrev=build.parameters.get('ZUUL_NEWREV', None),
- zuul_url=self.git_root,
- )
- cloner.execute()
- work = self.getWorkspaceRepos([project])
- self.assertEquals(state[project],
- str(work[project].commit('HEAD')),
- 'Project %s commit for build %s should '
- 'be correct' % (project, 0))
- shutil.rmtree(self.workspace_root)
-
- def test_post_and_master_checkout(self):
- self.worker.hold_jobs_in_build = True
- projects = ["org/project1", "org/project2"]
-
- A = self.fake_gerrit.addFakeChange(projects[0], 'master', 'A')
- event = A.getRefUpdatedEvent()
- A.setMerged()
- self.fake_gerrit.addEvent(event)
- self.waitUntilSettled()
-
- build = self.builds[0]
- upstream = self.getUpstreamRepos(projects)
- state = {'org/project1':
- build.parameters['ZUUL_COMMIT'],
- 'org/project2':
- str(upstream['org/project2'].commit('master')),
- }
-
- build.release()
- self.waitUntilSettled()
-
- cloner = zuul.lib.cloner.Cloner(
- git_base_url=self.upstream_root,
- projects=projects,
- workspace=self.workspace_root,
- zuul_project=build.parameters.get('ZUUL_PROJECT', None),
- zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
- zuul_ref=build.parameters.get('ZUUL_REF', None),
- zuul_newrev=build.parameters.get('ZUUL_NEWREV', None),
- zuul_url=self.git_root,
- )
- cloner.execute()
- work = self.getWorkspaceRepos(projects)
-
- for project in projects:
- self.assertEquals(state[project],
- str(work[project].commit('HEAD')),
- 'Project %s commit for build %s should '
- 'be correct' % (project, 0))
- shutil.rmtree(self.workspace_root)
diff --git a/tests/unit/test_cloner_cmd.py b/tests/unit/test_cloner_cmd.py
index 2d8747f..84bd243 100644
--- a/tests/unit/test_cloner_cmd.py
+++ b/tests/unit/test_cloner_cmd.py
@@ -26,7 +26,7 @@
def test_default_cache_dir_empty(self):
self.app.parse_arguments(['base', 'repo'])
- self.assertEqual(None, self.app.args.cache_dir)
+ self.assertIsNone(self.app.args.cache_dir)
def test_default_cache_dir_environ(self):
try:
diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py
index ee9a0b0..142a248 100644
--- a/tests/unit/test_connection.py
+++ b/tests/unit/test_connection.py
@@ -147,9 +147,6 @@
def test_multiple_sql_connections(self):
"Test putting results in different databases"
- self.updateConfigLayout(
- 'tests/fixtures/layout-sql-reporter.yaml')
-
# Add a successful result
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
@@ -268,3 +265,63 @@
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
+
+ def test_multiple_project_separate_gerrits_common_pipeline(self):
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_another_gerrit.addFakeChange(
+ 'org/project2', 'master', 'A')
+ self.fake_another_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+
+ self.waitUntilSettled()
+
+ self.assertBuilds([dict(name='project-test2',
+ changes='1,1',
+ project='org/project2',
+ pipeline='common_check')])
+
+ # NOTE(jamielennox): the tests back the git repo for both connections
+ # onto the same git repo on the file system. If we just create another
+ # fake change the fake_review_gerrit will try to create another 1,1
+ # change and git will fail to create the ref. Arbitrarily set it to get
+ # around the problem.
+ self.fake_review_gerrit.change_number = 50
+
+ B = self.fake_review_gerrit.addFakeChange(
+ 'org/project2', 'master', 'B')
+ self.fake_review_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+
+ self.waitUntilSettled()
+
+ self.assertBuilds([
+ dict(name='project-test2',
+ changes='1,1',
+ project='org/project2',
+ pipeline='common_check'),
+ dict(name='project-test1',
+ changes='51,1',
+ project='org/project2',
+ pipeline='common_check'),
+ ])
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+
+class TestConnectionsMerger(ZuulTestCase):
+ config_file = 'zuul-connections-merger.conf'
+ tenant_config_file = 'config/single-tenant/main.yaml'
+
+ def configure_connections(self):
+ super(TestConnectionsMerger, self).configure_connections(True)
+
+ def test_connections_merger(self):
+ "Test merger only configures source connections"
+
+ self.assertIn("gerrit", self.connections.connections)
+ self.assertIn("github", self.connections.connections)
+ self.assertNotIn("smtp", self.connections.connections)
+ self.assertNotIn("sql", self.connections.connections)
+ self.assertNotIn("timer", self.connections.connections)
+ self.assertNotIn("zuul", self.connections.connections)
diff --git a/tests/unit/test_encryption.py b/tests/unit/test_encryption.py
index 4dda78b..b424769 100644
--- a/tests/unit/test_encryption.py
+++ b/tests/unit/test_encryption.py
@@ -41,14 +41,14 @@
def test_pkcs1_oaep(self):
"Verify encryption and decryption"
- orig_plaintext = "some text to encrypt"
+ orig_plaintext = b"some text to encrypt"
ciphertext = encryption.encrypt_pkcs1_oaep(orig_plaintext, self.public)
plaintext = encryption.decrypt_pkcs1_oaep(ciphertext, self.private)
self.assertEqual(orig_plaintext, plaintext)
def test_openssl_pkcs1_oaep(self):
"Verify that we can decrypt something encrypted with OpenSSL"
- orig_plaintext = "some text to encrypt"
+ orig_plaintext = b"some text to encrypt"
pem_public = encryption.serialize_rsa_public_key(self.public)
public_file = tempfile.NamedTemporaryFile(delete=False)
try:
diff --git a/tests/unit/test_executor.py b/tests/unit/test_executor.py
new file mode 100644
index 0000000..100e4ec
--- /dev/null
+++ b/tests/unit/test_executor.py
@@ -0,0 +1,349 @@
+#!/usr/bin/env python
+
+# Copyright 2012 Hewlett-Packard Development Company, L.P.
+# Copyright 2014 Wikimedia Foundation Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import time
+
+from tests.base import ZuulTestCase, simple_layout
+
+
+class TestExecutorRepos(ZuulTestCase):
+ tenant_config_file = 'config/single-tenant/main.yaml'
+
+ log = logging.getLogger("zuul.test.executor")
+
+ def assertRepoState(self, repo, state, project, build, number):
+ if 'branch' in state:
+ self.assertFalse(repo.head.is_detached,
+ 'Project %s commit for build %s #%s should '
+ 'not have a detached HEAD' % (
+ project, build, number))
+ self.assertEquals(repo.active_branch.name,
+ state['branch'],
+ 'Project %s commit for build %s #%s should '
+ 'be on the correct branch' % (
+ project, build, number))
+ if 'commit' in state:
+ self.assertEquals(state['commit'],
+ str(repo.commit('HEAD')),
+ 'Project %s commit for build %s #%s should '
+ 'be correct' % (
+ project, build, number))
+ ref = repo.commit('HEAD')
+ repo_messages = set(
+ [c.message.strip() for c in repo.iter_commits(ref)])
+ if 'present' in state:
+ for change in state['present']:
+ msg = '%s-1' % change.subject
+ self.assertTrue(msg in repo_messages,
+ 'Project %s for build %s #%s should '
+ 'have change %s' % (
+ project, build, number, change.subject))
+ if 'absent' in state:
+ for change in state['absent']:
+ msg = '%s-1' % change.subject
+ self.assertTrue(msg not in repo_messages,
+ 'Project %s for build %s #%s should '
+ 'not have change %s' % (
+ project, build, number, change.subject))
+
+ @simple_layout('layouts/repo-checkout-two-project.yaml')
+ def test_one_branch(self):
+ self.executor_server.hold_jobs_in_build = True
+
+ p1 = 'review.example.com/org/project1'
+ p2 = 'review.example.com/org/project2'
+ projects = [p1, p2]
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+ A.addApproval('code-review', 2)
+ B.addApproval('code-review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+ self.fake_gerrit.addEvent(B.addApproval('approved', 1))
+
+ self.waitUntilSettled()
+
+ self.assertEquals(2, len(self.builds), "Two builds are running")
+
+ upstream = self.getUpstreamRepos(projects)
+ states = [
+ {p1: dict(present=[A], absent=[B], branch='master'),
+ p2: dict(commit=str(upstream[p2].commit('master')),
+ branch='master'),
+ },
+ {p1: dict(present=[A], absent=[B], branch='master'),
+ p2: dict(present=[B], absent=[A], branch='master'),
+ },
+ ]
+
+ for number, build in enumerate(self.builds):
+ self.log.debug("Build parameters: %s", build.parameters)
+ work = build.getWorkspaceRepos(projects)
+ state = states[number]
+
+ for project in projects:
+ self.assertRepoState(work[project], state[project],
+ project, build, number)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ @simple_layout('layouts/repo-checkout-four-project.yaml')
+ def test_multi_branch(self):
+ self.executor_server.hold_jobs_in_build = True
+
+ p1 = 'review.example.com/org/project1'
+ p2 = 'review.example.com/org/project2'
+ p3 = 'review.example.com/org/project3'
+ p4 = 'review.example.com/org/project4'
+ projects = [p1, p2, p3, p4]
+
+ self.create_branch('org/project2', 'stable/havana')
+ self.create_branch('org/project4', 'stable/havana')
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project2', 'stable/havana',
+ 'B')
+ C = self.fake_gerrit.addFakeChange('org/project3', 'master', 'C')
+ A.addApproval('code-review', 2)
+ B.addApproval('code-review', 2)
+ C.addApproval('code-review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+ self.fake_gerrit.addEvent(B.addApproval('approved', 1))
+ self.fake_gerrit.addEvent(C.addApproval('approved', 1))
+
+ self.waitUntilSettled()
+
+ self.assertEquals(3, len(self.builds), "Three builds are running")
+
+ upstream = self.getUpstreamRepos(projects)
+ states = [
+ {p1: dict(present=[A], absent=[B, C], branch='master'),
+ p2: dict(commit=str(upstream[p2].commit('master')),
+ branch='master'),
+ p3: dict(commit=str(upstream[p3].commit('master')),
+ branch='master'),
+ p4: dict(commit=str(upstream[p4].commit('master')),
+ branch='master'),
+ },
+ {p1: dict(present=[A], absent=[B, C], branch='master'),
+ p2: dict(present=[B], absent=[A, C], branch='stable/havana'),
+ p3: dict(commit=str(upstream[p3].commit('master')),
+ branch='master'),
+ p4: dict(commit=str(upstream[p4].commit('stable/havana')),
+ branch='stable/havana'),
+ },
+ {p1: dict(present=[A], absent=[B, C], branch='master'),
+ p2: dict(commit=str(upstream[p2].commit('master')),
+ branch='master'),
+ p3: dict(present=[C], absent=[A, B], branch='master'),
+ p4: dict(commit=str(upstream[p4].commit('master')),
+ branch='master'),
+ },
+ ]
+
+ for number, build in enumerate(self.builds):
+ self.log.debug("Build parameters: %s", build.parameters)
+ work = build.getWorkspaceRepos(projects)
+ state = states[number]
+
+ for project in projects:
+ self.assertRepoState(work[project], state[project],
+ project, build, number)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ @simple_layout('layouts/repo-checkout-six-project.yaml')
+ def test_project_override(self):
+ self.executor_server.hold_jobs_in_build = True
+
+ p1 = 'review.example.com/org/project1'
+ p2 = 'review.example.com/org/project2'
+ p3 = 'review.example.com/org/project3'
+ p4 = 'review.example.com/org/project4'
+ p5 = 'review.example.com/org/project5'
+ p6 = 'review.example.com/org/project6'
+ projects = [p1, p2, p3, p4, p5, p6]
+
+ self.create_branch('org/project3', 'stable/havana')
+ self.create_branch('org/project4', 'stable/havana')
+ self.create_branch('org/project6', 'stable/havana')
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
+ C = self.fake_gerrit.addFakeChange('org/project2', 'master', 'C')
+ D = self.fake_gerrit.addFakeChange('org/project3', 'stable/havana',
+ 'D')
+ A.addApproval('code-review', 2)
+ B.addApproval('code-review', 2)
+ C.addApproval('code-review', 2)
+ D.addApproval('code-review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+ self.fake_gerrit.addEvent(B.addApproval('approved', 1))
+ self.fake_gerrit.addEvent(C.addApproval('approved', 1))
+ self.fake_gerrit.addEvent(D.addApproval('approved', 1))
+
+ self.waitUntilSettled()
+
+ self.assertEquals(4, len(self.builds), "Four builds are running")
+
+ upstream = self.getUpstreamRepos(projects)
+ states = [
+ {p1: dict(present=[A], absent=[B, C, D], branch='master'),
+ p2: dict(commit=str(upstream[p2].commit('master')),
+ branch='master'),
+ p3: dict(commit=str(upstream[p3].commit('master')),
+ branch='master'),
+ p4: dict(commit=str(upstream[p4].commit('master')),
+ branch='master'),
+ p5: dict(commit=str(upstream[p5].commit('master')),
+ branch='master'),
+ p6: dict(commit=str(upstream[p6].commit('master')),
+ branch='master'),
+ },
+ {p1: dict(present=[A, B], absent=[C, D], branch='master'),
+ p2: dict(commit=str(upstream[p2].commit('master')),
+ branch='master'),
+ p3: dict(commit=str(upstream[p3].commit('master')),
+ branch='master'),
+ p4: dict(commit=str(upstream[p4].commit('master')),
+ branch='master'),
+ p5: dict(commit=str(upstream[p5].commit('master')),
+ branch='master'),
+ p6: dict(commit=str(upstream[p6].commit('master')),
+ branch='master'),
+ },
+ {p1: dict(present=[A, B], absent=[C, D], branch='master'),
+ p2: dict(present=[C], absent=[A, B, D], branch='master'),
+ p3: dict(commit=str(upstream[p3].commit('master')),
+ branch='master'),
+ p4: dict(commit=str(upstream[p4].commit('master')),
+ branch='master'),
+ p5: dict(commit=str(upstream[p5].commit('master')),
+ branch='master'),
+ p6: dict(commit=str(upstream[p6].commit('master')),
+ branch='master'),
+ },
+ {p1: dict(present=[A, B], absent=[C, D], branch='master'),
+ p2: dict(present=[C], absent=[A, B, D], branch='master'),
+ p3: dict(present=[D], absent=[A, B, C],
+ branch='stable/havana'),
+ p4: dict(commit=str(upstream[p4].commit('master')),
+ branch='master'),
+ p5: dict(commit=str(upstream[p5].commit('master')),
+ branch='master'),
+ p6: dict(commit=str(upstream[p6].commit('stable/havana')),
+ branch='stable/havana'),
+ },
+ ]
+
+ for number, build in enumerate(self.builds):
+ self.log.debug("Build parameters: %s", build.parameters)
+ work = build.getWorkspaceRepos(projects)
+ state = states[number]
+
+ for project in projects:
+ self.assertRepoState(work[project], state[project],
+ project, build, number)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ def test_periodic(self):
+ # This test can not use simple_layout because it must start
+ # with a configuration which does not include a
+ # timer-triggered job so that we have an opportunity to set
+ # the hold flag before the first job.
+ self.executor_server.hold_jobs_in_build = True
+ # Start timer trigger - also org/project
+ self.commitConfigUpdate('common-config',
+ 'layouts/repo-checkout-timer.yaml')
+ self.sched.reconfigure(self.config)
+
+ p1 = 'review.example.com/org/project1'
+ projects = [p1]
+ self.create_branch('org/project1', 'stable/havana')
+
+ # The pipeline triggers every second, so we should have seen
+ # several by now.
+ time.sleep(5)
+ self.waitUntilSettled()
+
+ # Stop queuing timer triggered jobs so that the assertions
+ # below don't race against more jobs being queued.
+ self.commitConfigUpdate('common-config',
+ 'layouts/repo-checkout-no-timer.yaml')
+ self.sched.reconfigure(self.config)
+
+ self.assertEquals(1, len(self.builds), "One build is running")
+
+ upstream = self.getUpstreamRepos(projects)
+ states = [
+ {p1: dict(commit=str(upstream[p1].commit('stable/havana')),
+ branch='stable/havana'),
+ },
+ ]
+
+ for number, build in enumerate(self.builds):
+ self.log.debug("Build parameters: %s", build.parameters)
+ work = build.getWorkspaceRepos(projects)
+ state = states[number]
+
+ for project in projects:
+ self.assertRepoState(work[project], state[project],
+ project, build, number)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ @simple_layout('layouts/repo-checkout-post.yaml')
+ def test_post_and_master_checkout(self):
+ self.executor_server.hold_jobs_in_build = True
+ p1 = "review.example.com/org/project1"
+ p2 = "review.example.com/org/project2"
+ projects = [p1, p2]
+
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ event = A.getRefUpdatedEvent()
+ A.setMerged()
+ self.fake_gerrit.addEvent(event)
+ self.waitUntilSettled()
+
+ upstream = self.getUpstreamRepos(projects)
+ states = [
+ {p1: dict(commit=str(upstream[p1].commit('master')),
+ present=[A], branch='master'),
+ p2: dict(commit=str(upstream[p2].commit('master')),
+ absent=[A], branch='master'),
+ },
+ ]
+
+ for number, build in enumerate(self.builds):
+ self.log.debug("Build parameters: %s", build.parameters)
+ work = build.getWorkspaceRepos(projects)
+ state = states[number]
+
+ for project in projects:
+ self.assertRepoState(work[project], state[project],
+ project, build, number)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
diff --git a/tests/unit/test_gerrit.py b/tests/unit/test_gerrit.py
index 999e55d..a369aff 100644
--- a/tests/unit/test_gerrit.py
+++ b/tests/unit/test_gerrit.py
@@ -22,6 +22,7 @@
import tests.base
from tests.base import BaseTestCase
+from zuul.driver.gerrit import GerritDriver
from zuul.driver.gerrit.gerritconnection import GerritConnection
FIXTURE_DIR = os.path.join(tests.base.FIXTURE_DIR, 'gerrit')
@@ -53,7 +54,8 @@
'user': 'gerrit',
'server': 'localhost',
}
- gerrit = GerritConnection(None, 'review_gerrit', gerrit_config)
+ driver = GerritDriver()
+ gerrit = GerritConnection(driver, 'review_gerrit', gerrit_config)
calls, values = read_fixtures(files)
_ssh_mock.side_effect = values
diff --git a/tests/unit/test_git_driver.py b/tests/unit/test_git_driver.py
index 4d75944..1cfadf4 100644
--- a/tests/unit/test_git_driver.py
+++ b/tests/unit/test_git_driver.py
@@ -27,10 +27,10 @@
tenant = self.sched.abide.tenants.get('tenant-one')
# Check that we have the git source for common-config and the
# gerrit source for the project.
- self.assertEqual('git', tenant.config_repos[0][0].name)
- self.assertEqual('common-config', tenant.config_repos[0][1].name)
- self.assertEqual('gerrit', tenant.project_repos[0][0].name)
- self.assertEqual('org/project', tenant.project_repos[0][1].name)
+ self.assertEqual('git', tenant.config_projects[0].source.name)
+ self.assertEqual('common-config', tenant.config_projects[0].name)
+ self.assertEqual('gerrit', tenant.untrusted_projects[0].source.name)
+ self.assertEqual('org/project', tenant.untrusted_projects[0].name)
# The configuration for this test is accessed via the git
# driver (in common-config), rather than the gerrit driver, so
diff --git a/tests/unit/test_github_driver.py b/tests/unit/test_github_driver.py
new file mode 100644
index 0000000..6cc010e
--- /dev/null
+++ b/tests/unit/test_github_driver.py
@@ -0,0 +1,515 @@
+# Copyright 2015 GoodData
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import re
+from testtools.matchers import MatchesRegex
+import time
+
+from tests.base import ZuulTestCase, simple_layout, random_sha1
+
+
+class TestGithubDriver(ZuulTestCase):
+ config_file = 'zuul-github-driver.conf'
+
+ @simple_layout('layouts/basic-github.yaml', driver='github')
+ def test_pull_event(self):
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_github.openFakePullRequest('org/project', 'master', 'A')
+ self.fake_github.emitEvent(A.getPullRequestOpenedEvent())
+ self.waitUntilSettled()
+
+ build_params = self.builds[0].parameters
+ self.assertEqual('master', build_params['ZUUL_BRANCH'])
+ self.assertEqual(str(A.number), build_params['ZUUL_CHANGE'])
+ self.assertEqual(A.head_sha, build_params['ZUUL_PATCHSET'])
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ self.assertEqual('SUCCESS',
+ self.getJobFromHistory('project-test1').result)
+ self.assertEqual('SUCCESS',
+ self.getJobFromHistory('project-test2').result)
+
+ job = self.getJobFromHistory('project-test2')
+ zuulvars = job.parameters['vars']['zuul']
+ self.assertEqual(A.number, zuulvars['change'])
+ self.assertEqual(A.head_sha, zuulvars['patchset'])
+ self.assertEqual(1, len(A.comments))
+ self.assertEqual(2, len(self.history))
+
+ # test_pull_unmatched_branch_event(self):
+ self.create_branch('org/project', 'unmatched_branch')
+ B = self.fake_github.openFakePullRequest(
+ 'org/project', 'unmatched_branch', 'B')
+ self.fake_github.emitEvent(B.getPullRequestOpenedEvent())
+ self.waitUntilSettled()
+
+ self.assertEqual(2, len(self.history))
+
+ @simple_layout('layouts/files-github.yaml', driver='github')
+ def test_pull_matched_file_event(self):
+ A = self.fake_github.openFakePullRequest(
+ 'org/project', 'master', 'A',
+ files=['random.txt', 'build-requires'])
+ self.fake_github.emitEvent(A.getPullRequestOpenedEvent())
+ self.waitUntilSettled()
+ self.assertEqual(1, len(self.history))
+
+ # test_pull_unmatched_file_event
+ B = self.fake_github.openFakePullRequest('org/project', 'master', 'B',
+ files=['random.txt'])
+ self.fake_github.emitEvent(B.getPullRequestOpenedEvent())
+ self.waitUntilSettled()
+ self.assertEqual(1, len(self.history))
+
+ @simple_layout('layouts/basic-github.yaml', driver='github')
+ def test_comment_event(self):
+ A = self.fake_github.openFakePullRequest('org/project', 'master', 'A')
+ self.fake_github.emitEvent(A.getCommentAddedEvent('test me'))
+ self.waitUntilSettled()
+ self.assertEqual(2, len(self.history))
+
+ # Test an unmatched comment, history should remain the same
+ B = self.fake_github.openFakePullRequest('org/project', 'master', 'B')
+ self.fake_github.emitEvent(B.getCommentAddedEvent('casual comment'))
+ self.waitUntilSettled()
+ self.assertEqual(2, len(self.history))
+
+ @simple_layout('layouts/push-tag-github.yaml', driver='github')
+ def test_tag_event(self):
+ self.executor_server.hold_jobs_in_build = True
+
+ sha = random_sha1()
+ self.fake_github.emitEvent(
+ self.fake_github.getPushEvent('org/project', 'refs/tags/newtag',
+ new_rev=sha))
+ self.waitUntilSettled()
+
+ build_params = self.builds[0].parameters
+ self.assertEqual('refs/tags/newtag', build_params['ZUUL_REF'])
+ self.assertEqual('00000000000000000000000000000000',
+ build_params['ZUUL_OLDREV'])
+ self.assertEqual(sha, build_params['ZUUL_NEWREV'])
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ self.assertEqual('SUCCESS',
+ self.getJobFromHistory('project-tag').result)
+
+ @simple_layout('layouts/push-tag-github.yaml', driver='github')
+ def test_push_event(self):
+ self.executor_server.hold_jobs_in_build = True
+
+ old_sha = random_sha1()
+ new_sha = random_sha1()
+ self.fake_github.emitEvent(
+ self.fake_github.getPushEvent('org/project', 'refs/heads/master',
+ old_sha, new_sha))
+ self.waitUntilSettled()
+
+ build_params = self.builds[0].parameters
+ self.assertEqual('refs/heads/master', build_params['ZUUL_REF'])
+ self.assertEqual(old_sha, build_params['ZUUL_OLDREV'])
+ self.assertEqual(new_sha, build_params['ZUUL_NEWREV'])
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ self.assertEqual('SUCCESS',
+ self.getJobFromHistory('project-post').result)
+ self.assertEqual(1, len(self.history))
+
+ # test unmatched push event
+ old_sha = random_sha1()
+ new_sha = random_sha1()
+ self.fake_github.emitEvent(
+ self.fake_github.getPushEvent('org/project',
+ 'refs/heads/unmatched_branch',
+ old_sha, new_sha))
+ self.waitUntilSettled()
+
+ self.assertEqual(1, len(self.history))
+
+ @simple_layout('layouts/labeling-github.yaml', driver='github')
+ def test_labels(self):
+ A = self.fake_github.openFakePullRequest('org/project', 'master', 'A')
+ self.fake_github.emitEvent(A.addLabel('test'))
+ self.waitUntilSettled()
+ self.assertEqual(1, len(self.history))
+ self.assertEqual('project-labels', self.history[0].name)
+ self.assertEqual(['tests passed'], A.labels)
+
+ # test label removed
+ B = self.fake_github.openFakePullRequest('org/project', 'master', 'B')
+ B.addLabel('do not test')
+ self.fake_github.emitEvent(B.removeLabel('do not test'))
+ self.waitUntilSettled()
+ self.assertEqual(2, len(self.history))
+ self.assertEqual('project-labels', self.history[1].name)
+ self.assertEqual(['tests passed'], B.labels)
+
+ # test unmatched label
+ C = self.fake_github.openFakePullRequest('org/project', 'master', 'C')
+ self.fake_github.emitEvent(C.addLabel('other label'))
+ self.waitUntilSettled()
+ self.assertEqual(2, len(self.history))
+ self.assertEqual(['other label'], C.labels)
+
+ @simple_layout('layouts/reviews-github.yaml', driver='github')
+ def test_review_event(self):
+ A = self.fake_github.openFakePullRequest('org/project', 'master', 'A')
+ self.fake_github.emitEvent(A.getReviewAddedEvent('approve'))
+ self.waitUntilSettled()
+ self.assertEqual(1, len(self.history))
+ self.assertEqual('project-reviews', self.history[0].name)
+ self.assertEqual(['tests passed'], A.labels)
+
+ # test_review_unmatched_event
+ B = self.fake_github.openFakePullRequest('org/project', 'master', 'B')
+ self.fake_github.emitEvent(B.getReviewAddedEvent('comment'))
+ self.waitUntilSettled()
+ self.assertEqual(1, len(self.history))
+
+ @simple_layout('layouts/dequeue-github.yaml', driver='github')
+ def test_dequeue_pull_synchronized(self):
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_github.openFakePullRequest(
+ 'org/one-job-project', 'master', 'A')
+ self.fake_github.emitEvent(A.getPullRequestOpenedEvent())
+ self.waitUntilSettled()
+
+ # event update stamp has resolution one second, wait so the latter
+ # one has newer timestamp
+ time.sleep(1)
+ A.addCommit()
+ self.fake_github.emitEvent(A.getPullRequestSynchronizeEvent())
+ self.waitUntilSettled()
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(2, len(self.history))
+ self.assertEqual(1, self.countJobResults(self.history, 'ABORTED'))
+
+ @simple_layout('layouts/dequeue-github.yaml', driver='github')
+ def test_dequeue_pull_abandoned(self):
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_github.openFakePullRequest(
+ 'org/one-job-project', 'master', 'A')
+ self.fake_github.emitEvent(A.getPullRequestOpenedEvent())
+ self.waitUntilSettled()
+ self.fake_github.emitEvent(A.getPullRequestClosedEvent())
+ self.waitUntilSettled()
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(1, len(self.history))
+ self.assertEqual(1, self.countJobResults(self.history, 'ABORTED'))
+
+ @simple_layout('layouts/basic-github.yaml', driver='github')
+ def test_git_https_url(self):
+ """Test that git_ssh option gives git url with ssh"""
+ url = self.fake_github.real_getGitUrl('org/project')
+ self.assertEqual('https://github.com/org/project', url)
+
+ @simple_layout('layouts/basic-github.yaml', driver='github')
+ def test_git_ssh_url(self):
+ """Test that git_ssh option gives git url with ssh"""
+ url = self.fake_github_ssh.real_getGitUrl('org/project')
+ self.assertEqual('ssh://git@github.com/org/project.git', url)
+
+ @simple_layout('layouts/basic-github.yaml', driver='github')
+ def test_git_enterprise_url(self):
+ """Test that git_url option gives git url with proper host"""
+ url = self.fake_github_ent.real_getGitUrl('org/project')
+ self.assertEqual('ssh://git@github.enterprise.io/org/project.git', url)
+
+ @simple_layout('layouts/reporting-github.yaml', driver='github')
+ def test_reporting(self):
+ # pipeline reports pull status both on start and success
+ self.executor_server.hold_jobs_in_build = True
+ A = self.fake_github.openFakePullRequest('org/project', 'master', 'A')
+ self.fake_github.emitEvent(A.getPullRequestOpenedEvent())
+ self.waitUntilSettled()
+ # We should have a status container for the head sha
+ self.assertIn(A.head_sha, A.statuses.keys())
+ # We should only have one status for the head sha
+ self.assertEqual(1, len(A.statuses[A.head_sha]))
+ check_status = A.statuses[A.head_sha][0]
+ check_url = ('http://zuul.example.com/status/#%s,%s' %
+ (A.number, A.head_sha))
+ self.assertEqual('tenant-one/check', check_status['context'])
+ self.assertEqual('Standard check', check_status['description'])
+ self.assertEqual('pending', check_status['state'])
+ self.assertEqual(check_url, check_status['url'])
+ self.assertEqual(0, len(A.comments))
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+ # We should only have two statuses for the head sha
+ self.assertEqual(2, len(A.statuses[A.head_sha]))
+ check_status = A.statuses[A.head_sha][0]
+ check_url = ('http://zuul.example.com/status/#%s,%s' %
+ (A.number, A.head_sha))
+ self.assertEqual('tenant-one/check', check_status['context'])
+ self.assertEqual('success', check_status['state'])
+ self.assertEqual(check_url, check_status['url'])
+ self.assertEqual(1, len(A.comments))
+ self.assertThat(A.comments[0],
+ MatchesRegex('.*Build succeeded.*', re.DOTALL))
+
+ # pipeline does not report any status but does comment
+ self.executor_server.hold_jobs_in_build = True
+ self.fake_github.emitEvent(
+ A.getCommentAddedEvent('reporting check'))
+ self.waitUntilSettled()
+ self.assertEqual(2, len(A.statuses[A.head_sha]))
+ # comments increased by one for the start message
+ self.assertEqual(2, len(A.comments))
+ self.assertThat(A.comments[1],
+ MatchesRegex('.*Starting reporting jobs.*', re.DOTALL))
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+ # pipeline reports success status
+ self.assertEqual(3, len(A.statuses[A.head_sha]))
+ report_status = A.statuses[A.head_sha][0]
+ self.assertEqual('tenant-one/reporting', report_status['context'])
+ self.assertEqual('success', report_status['state'])
+ self.assertEqual(2, len(A.comments))
+ report_url = ('http://logs.example.com/reporting/%s/%s/%s/' %
+ (A.project, A.number, A.head_sha))
+ self.assertEqual(report_url, report_status['url'])
+
+ @simple_layout('layouts/merging-github.yaml', driver='github')
+ def test_report_pull_merge(self):
+ # pipeline merges the pull request on success
+ A = self.fake_github.openFakePullRequest('org/project', 'master',
+ 'PR title')
+ self.fake_github.emitEvent(A.getCommentAddedEvent('merge me'))
+ self.waitUntilSettled()
+ self.assertTrue(A.is_merged)
+ self.assertThat(A.merge_message,
+ MatchesRegex('.*PR title.*Reviewed-by.*', re.DOTALL))
+
+ # pipeline merges the pull request on success after failure
+ self.fake_github.merge_failure = True
+ B = self.fake_github.openFakePullRequest('org/project', 'master', 'B')
+ self.fake_github.emitEvent(B.getCommentAddedEvent('merge me'))
+ self.waitUntilSettled()
+ self.assertFalse(B.is_merged)
+ self.fake_github.merge_failure = False
+
+ # pipeline merges the pull request on second run of merge
+ # first merge failed on 405 Method Not Allowed error
+ self.fake_github.merge_not_allowed_count = 1
+ C = self.fake_github.openFakePullRequest('org/project', 'master', 'C')
+ self.fake_github.emitEvent(C.getCommentAddedEvent('merge me'))
+ self.waitUntilSettled()
+ self.assertTrue(C.is_merged)
+
+ # pipeline does not merge the pull request
+ # merge failed on 405 Method Not Allowed error - twice
+ self.fake_github.merge_not_allowed_count = 2
+ D = self.fake_github.openFakePullRequest('org/project', 'master', 'D')
+ self.fake_github.emitEvent(D.getCommentAddedEvent('merge me'))
+ self.waitUntilSettled()
+ self.assertFalse(D.is_merged)
+ self.assertEqual(len(D.comments), 1)
+ self.assertEqual(D.comments[0], 'Merge failed')
+
+ @simple_layout('layouts/dependent-github.yaml', driver='github')
+ def test_parallel_changes(self):
+ "Test that changes are tested in parallel and merged in series"
+
+ self.executor_server.hold_jobs_in_build = True
+ A = self.fake_github.openFakePullRequest('org/project', 'master', 'A')
+ B = self.fake_github.openFakePullRequest('org/project', 'master', 'B')
+ C = self.fake_github.openFakePullRequest('org/project', 'master', 'C')
+
+ self.fake_github.emitEvent(A.addLabel('merge'))
+ self.fake_github.emitEvent(B.addLabel('merge'))
+ self.fake_github.emitEvent(C.addLabel('merge'))
+
+ self.waitUntilSettled()
+ self.assertEqual(len(self.builds), 1)
+ self.assertEqual(self.builds[0].name, 'project-merge')
+ self.assertTrue(self.builds[0].hasChanges(A))
+
+ self.executor_server.release('.*-merge')
+ self.waitUntilSettled()
+ self.assertEqual(len(self.builds), 3)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertTrue(self.builds[0].hasChanges(A))
+ self.assertEqual(self.builds[1].name, 'project-test2')
+ self.assertTrue(self.builds[1].hasChanges(A))
+ self.assertEqual(self.builds[2].name, 'project-merge')
+ self.assertTrue(self.builds[2].hasChanges(A, B))
+
+ self.executor_server.release('.*-merge')
+ self.waitUntilSettled()
+ self.assertEqual(len(self.builds), 5)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertTrue(self.builds[0].hasChanges(A))
+ self.assertEqual(self.builds[1].name, 'project-test2')
+ self.assertTrue(self.builds[1].hasChanges(A))
+
+ self.assertEqual(self.builds[2].name, 'project-test1')
+ self.assertTrue(self.builds[2].hasChanges(A))
+ self.assertEqual(self.builds[3].name, 'project-test2')
+ self.assertTrue(self.builds[3].hasChanges(A, B))
+
+ self.assertEqual(self.builds[4].name, 'project-merge')
+ self.assertTrue(self.builds[4].hasChanges(A, B, C))
+
+ self.executor_server.release('.*-merge')
+ self.waitUntilSettled()
+ self.assertEqual(len(self.builds), 6)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertTrue(self.builds[0].hasChanges(A))
+ self.assertEqual(self.builds[1].name, 'project-test2')
+ self.assertTrue(self.builds[1].hasChanges(A))
+
+ self.assertEqual(self.builds[2].name, 'project-test1')
+ self.assertTrue(self.builds[2].hasChanges(A, B))
+ self.assertEqual(self.builds[3].name, 'project-test2')
+ self.assertTrue(self.builds[3].hasChanges(A, B))
+
+ self.assertEqual(self.builds[4].name, 'project-test1')
+ self.assertTrue(self.builds[4].hasChanges(A, B, C))
+ self.assertEqual(self.builds[5].name, 'project-test2')
+ self.assertTrue(self.builds[5].hasChanges(A, B, C))
+
+ all_builds = self.builds[:]
+ self.release(all_builds[2])
+ self.release(all_builds[3])
+ self.waitUntilSettled()
+ self.assertFalse(A.is_merged)
+ self.assertFalse(B.is_merged)
+ self.assertFalse(C.is_merged)
+
+ self.release(all_builds[0])
+ self.release(all_builds[1])
+ self.waitUntilSettled()
+ self.assertTrue(A.is_merged)
+ self.assertTrue(B.is_merged)
+ self.assertFalse(C.is_merged)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+ self.assertEqual(len(self.builds), 0)
+ self.assertEqual(len(self.history), 9)
+ self.assertTrue(C.is_merged)
+
+ self.assertNotIn('merge', A.labels)
+ self.assertNotIn('merge', B.labels)
+ self.assertNotIn('merge', C.labels)
+
+ @simple_layout('layouts/dependent-github.yaml', driver='github')
+ def test_failed_changes(self):
+ "Test that a change behind a failed change is retested"
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_github.openFakePullRequest('org/project', 'master', 'A')
+ B = self.fake_github.openFakePullRequest('org/project', 'master', 'B')
+
+ self.executor_server.failJob('project-test1', A)
+
+ self.fake_github.emitEvent(A.addLabel('merge'))
+ self.fake_github.emitEvent(B.addLabel('merge'))
+ self.waitUntilSettled()
+
+ self.executor_server.release('.*-merge')
+ self.waitUntilSettled()
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+
+ self.waitUntilSettled()
+ # It's certain that the merge job for change 2 will run, but
+ # the test1 and test2 jobs may or may not run.
+ self.assertTrue(len(self.history) > 6)
+ self.assertFalse(A.is_merged)
+ self.assertTrue(B.is_merged)
+ self.assertNotIn('merge', A.labels)
+ self.assertNotIn('merge', B.labels)
+
+ @simple_layout('layouts/dependent-github.yaml', driver='github')
+ def test_failed_change_at_head(self):
+ "Test that if a change at the head fails, jobs behind it are canceled"
+
+ self.executor_server.hold_jobs_in_build = True
+ A = self.fake_github.openFakePullRequest('org/project', 'master', 'A')
+ B = self.fake_github.openFakePullRequest('org/project', 'master', 'B')
+ C = self.fake_github.openFakePullRequest('org/project', 'master', 'C')
+
+ self.executor_server.failJob('project-test1', A)
+
+ self.fake_github.emitEvent(A.addLabel('merge'))
+ self.fake_github.emitEvent(B.addLabel('merge'))
+ self.fake_github.emitEvent(C.addLabel('merge'))
+
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 1)
+ self.assertEqual(self.builds[0].name, 'project-merge')
+ self.assertTrue(self.builds[0].hasChanges(A))
+
+ self.executor_server.release('.*-merge')
+ self.waitUntilSettled()
+ self.executor_server.release('.*-merge')
+ self.waitUntilSettled()
+ self.executor_server.release('.*-merge')
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 6)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertEqual(self.builds[1].name, 'project-test2')
+ self.assertEqual(self.builds[2].name, 'project-test1')
+ self.assertEqual(self.builds[3].name, 'project-test2')
+ self.assertEqual(self.builds[4].name, 'project-test1')
+ self.assertEqual(self.builds[5].name, 'project-test2')
+
+ self.release(self.builds[0])
+ self.waitUntilSettled()
+
+ # project-test2, project-merge for B
+ self.assertEqual(len(self.builds), 2)
+ self.assertEqual(self.countJobResults(self.history, 'ABORTED'), 4)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 0)
+ self.assertEqual(len(self.history), 15)
+ self.assertFalse(A.is_merged)
+ self.assertTrue(B.is_merged)
+ self.assertTrue(C.is_merged)
+ self.assertNotIn('merge', A.labels)
+ self.assertNotIn('merge', B.labels)
+ self.assertNotIn('merge', C.labels)
diff --git a/tests/unit/test_github_requirements.py b/tests/unit/test_github_requirements.py
new file mode 100644
index 0000000..5dd6e80
--- /dev/null
+++ b/tests/unit/test_github_requirements.py
@@ -0,0 +1,328 @@
+#!/usr/bin/env python
+# Copyright (c) 2017 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import time
+
+from tests.base import ZuulTestCase, simple_layout
+
+
+class TestGithubRequirements(ZuulTestCase):
+ """Test pipeline and trigger requirements"""
+ config_file = 'zuul-github-driver.conf'
+
+ @simple_layout('layouts/requirements-github.yaml', driver='github')
+ def test_pipeline_require_status(self):
+ "Test pipeline requirement: status"
+ A = self.fake_github.openFakePullRequest('org/project1', 'master', 'A')
+ # A comment event that we will keep submitting to trigger
+ comment = A.getCommentAddedEvent('test me')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ # No status from zuul so should not be enqueued
+ self.assertEqual(len(self.history), 0)
+
+ # An error status should not cause it to be enqueued
+ A.setStatus(A.head_sha, 'error', 'null', 'null', 'check')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # A success status goes in
+ A.setStatus(A.head_sha, 'success', 'null', 'null', 'check')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 1)
+ self.assertEqual(self.history[0].name, 'project1-pipeline')
+
+ @simple_layout('layouts/requirements-github.yaml', driver='github')
+ def test_trigger_require_status(self):
+ "Test trigger requirement: status"
+ A = self.fake_github.openFakePullRequest('org/project1', 'master', 'A')
+ # A comment event that we will keep submitting to trigger
+ comment = A.getCommentAddedEvent('trigger me')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ # No status from zuul so should not be enqueued
+ self.assertEqual(len(self.history), 0)
+
+ # An error status should not cause it to be enqueued
+ A.setStatus(A.head_sha, 'error', 'null', 'null', 'check')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # A success status goes in
+ A.setStatus(A.head_sha, 'success', 'null', 'null', 'check')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 1)
+ self.assertEqual(self.history[0].name, 'project1-pipeline')
+
+ @simple_layout('layouts/requirements-github.yaml', driver='github')
+ def test_trigger_on_status(self):
+ "Test trigger on: status"
+ A = self.fake_github.openFakePullRequest('org/project2', 'master', 'A')
+
+ # An error status should not cause it to be enqueued
+ A.setStatus(A.head_sha, 'error', 'null', 'null', 'check')
+ self.fake_github.emitEvent(A.getCommitStatusEvent('check',
+ state='error'))
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # A success status from unknown user should not cause it to be
+ # enqueued
+ A.setStatus(A.head_sha, 'success', 'null', 'null', 'check', user='foo')
+ self.fake_github.emitEvent(A.getCommitStatusEvent('check',
+ state='success',
+ user='foo'))
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # A success status from zuul goes in
+ A.setStatus(A.head_sha, 'success', 'null', 'null', 'check')
+ self.fake_github.emitEvent(A.getCommitStatusEvent('check'))
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 1)
+ self.assertEqual(self.history[0].name, 'project2-trigger')
+
+ # An error status for a different context should not cause it to be
+ # enqueued
+ A.setStatus(A.head_sha, 'error', 'null', 'null', 'gate')
+ self.fake_github.emitEvent(A.getCommitStatusEvent('gate',
+ state='error'))
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 1)
+
+ @simple_layout('layouts/requirements-github.yaml', driver='github')
+ def test_pipeline_require_review_username(self):
+ "Test pipeline requirement: review username"
+
+ A = self.fake_github.openFakePullRequest('org/project3', 'master', 'A')
+ # A comment event that we will keep submitting to trigger
+ comment = A.getCommentAddedEvent('test me')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ # No approval from derp so should not be enqueued
+ self.assertEqual(len(self.history), 0)
+
+ # Add an approved review from derp
+ A.addReview('derp', 'APPROVED')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 1)
+ self.assertEqual(self.history[0].name, 'project3-reviewusername')
+
+ @simple_layout('layouts/requirements-github.yaml', driver='github')
+ def test_pipeline_require_review_state(self):
+ "Test pipeline requirement: review state"
+
+ A = self.fake_github.openFakePullRequest('org/project4', 'master', 'A')
+ # Add derp to writers
+ A.writers.append('derp')
+ # A comment event that we will keep submitting to trigger
+ comment = A.getCommentAddedEvent('test me')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ # No positive review from derp so should not be enqueued
+ self.assertEqual(len(self.history), 0)
+
+ # A negative review from derp should not cause it to be enqueued
+ A.addReview('derp', 'CHANGES_REQUESTED')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # A positive from nobody should not cause it to be enqueued
+ A.addReview('nobody', 'APPROVED')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # A positive review from derp should cause it to be enqueued
+ A.addReview('derp', 'APPROVED')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 1)
+ self.assertEqual(self.history[0].name, 'project4-reviewreq')
+
+ @simple_layout('layouts/requirements-github.yaml', driver='github')
+ def test_pipeline_require_review_user_state(self):
+ "Test pipeline requirement: review state from user"
+
+ A = self.fake_github.openFakePullRequest('org/project5', 'master', 'A')
+ # Add derp and herp to writers
+ A.writers.extend(('derp', 'herp'))
+ # A comment event that we will keep submitting to trigger
+ comment = A.getCommentAddedEvent('test me')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ # No positive review from derp so should not be enqueued
+ self.assertEqual(len(self.history), 0)
+
+ # A negative review from derp should not cause it to be enqueued
+ A.addReview('derp', 'CHANGES_REQUESTED')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # A positive from nobody should not cause it to be enqueued
+ A.addReview('nobody', 'APPROVED')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # A positive review from herp (a writer) should not cause it to be
+ # enqueued
+ A.addReview('herp', 'APPROVED')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # A positive review from derp should cause it to be enqueued
+ A.addReview('derp', 'APPROVED')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 1)
+ self.assertEqual(self.history[0].name, 'project5-reviewuserstate')
+
+# TODO: Implement reject on approval username/state
+
+ @simple_layout('layouts/requirements-github.yaml', driver='github')
+ def test_pipeline_require_review_latest_user_state(self):
+ "Test pipeline requirement: review state from user"
+
+ A = self.fake_github.openFakePullRequest('org/project5', 'master', 'A')
+ # Add derp and herp to writers
+ A.writers.extend(('derp', 'herp'))
+ # A comment event that we will keep submitting to trigger
+ comment = A.getCommentAddedEvent('test me')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ # No positive review from derp so should not be enqueued
+ self.assertEqual(len(self.history), 0)
+
+ # The first negative review from derp should not cause it to be
+ # enqueued
+ for i in range(1, 4):
+ submitted_at = time.time() - 72 * 60 * 60
+ A.addReview('derp', 'CHANGES_REQUESTED',
+ submitted_at)
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # A positive review from derp should cause it to be enqueued
+ A.addReview('derp', 'APPROVED')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 1)
+ self.assertEqual(self.history[0].name, 'project5-reviewuserstate')
+
+ @simple_layout('layouts/requirements-github.yaml', driver='github')
+ def test_require_review_newer_than(self):
+
+ A = self.fake_github.openFakePullRequest('org/project6', 'master', 'A')
+ # Add derp and herp to writers
+ A.writers.extend(('derp', 'herp'))
+ # A comment event that we will keep submitting to trigger
+ comment = A.getCommentAddedEvent('test me')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ # No positive review from derp so should not be enqueued
+ self.assertEqual(len(self.history), 0)
+
+ # Add a too-old positive review, should not be enqueued
+ submitted_at = time.time() - 72 * 60 * 60
+ A.addReview('derp', 'APPROVED',
+ submitted_at)
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # Add a recent positive review
+ submitted_at = time.time() - 12 * 60 * 60
+ A.addReview('derp', 'APPROVED', submitted_at)
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 1)
+ self.assertEqual(self.history[0].name, 'project6-newerthan')
+
+ @simple_layout('layouts/requirements-github.yaml', driver='github')
+ def test_require_review_older_than(self):
+
+ A = self.fake_github.openFakePullRequest('org/project7', 'master', 'A')
+ # Add derp and herp to writers
+ A.writers.extend(('derp', 'herp'))
+ # A comment event that we will keep submitting to trigger
+ comment = A.getCommentAddedEvent('test me')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ # No positive review from derp so should not be enqueued
+ self.assertEqual(len(self.history), 0)
+
+ # Add a too-new positive, should not be enqueued
+ submitted_at = time.time() - 12 * 60 * 60
+ A.addReview('derp', 'APPROVED',
+ submitted_at)
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # Add an old enough positive, should enqueue
+ submitted_at = time.time() - 72 * 60 * 60
+ A.addReview('herp', 'APPROVED', submitted_at)
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 1)
+ self.assertEqual(self.history[0].name, 'project7-olderthan')
+
+ @simple_layout('layouts/requirements-github.yaml', driver='github')
+ def test_require_open(self):
+
+ A = self.fake_github.openFakePullRequest('org/project8', 'master', 'A')
+ # A comment event that we will keep submitting to trigger
+ comment = A.getCommentAddedEvent('test me')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+
+ # PR is open, we should have enqueued
+ self.assertEqual(len(self.history), 1)
+
+ # close the PR and try again
+ A.state = 'closed'
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ # PR is closed, should not trigger
+ self.assertEqual(len(self.history), 1)
+
+ @simple_layout('layouts/requirements-github.yaml', driver='github')
+ def test_require_current(self):
+
+ A = self.fake_github.openFakePullRequest('org/project9', 'master', 'A')
+ # A sync event that we will keep submitting to trigger
+ sync = A.getPullRequestSynchronizeEvent()
+ self.fake_github.emitEvent(sync)
+ self.waitUntilSettled()
+
+ # PR head is current should enqueue
+ self.assertEqual(len(self.history), 1)
+
+ # Add a commit to the PR, re-issue the original comment event
+ A.addCommit()
+ self.fake_github.emitEvent(sync)
+ self.waitUntilSettled()
+ # Event hash is not current, should not trigger
+ self.assertEqual(len(self.history), 1)
diff --git a/tests/unit/test_inventory.py b/tests/unit/test_inventory.py
new file mode 100644
index 0000000..2835d30
--- /dev/null
+++ b/tests/unit/test_inventory.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import yaml
+
+from tests.base import ZuulTestCase
+
+
+class TestInventory(ZuulTestCase):
+
+ tenant_config_file = 'config/inventory/main.yaml'
+
+ def setUp(self):
+ super(TestInventory, self).setUp()
+ self.executor_server.hold_jobs_in_build = True
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ def _get_build_inventory(self, name):
+ build = self.getBuildByName(name)
+ inv_path = os.path.join(build.jobdir.root, 'ansible', 'inventory.yaml')
+ return yaml.safe_load(open(inv_path, 'r'))
+
+ def test_single_inventory(self):
+
+ inventory = self._get_build_inventory('single-inventory')
+
+ all_nodes = ('ubuntu-xenial',)
+ self.assertIn('all', inventory)
+ self.assertIn('hosts', inventory['all'])
+ self.assertIn('vars', inventory['all'])
+ for node_name in all_nodes:
+ self.assertIn(node_name, inventory['all']['hosts'])
+ self.assertIn('zuul', inventory['all']['vars'])
+ z_vars = inventory['all']['vars']['zuul']
+ self.assertIn('executor', z_vars)
+ self.assertIn('src_root', z_vars['executor'])
+ self.assertIn('job', z_vars)
+ self.assertEqual(z_vars['job'], 'single-inventory')
+
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ def test_group_inventory(self):
+
+ inventory = self._get_build_inventory('group-inventory')
+
+ all_nodes = ('controller', 'compute1', 'compute2')
+ self.assertIn('all', inventory)
+ self.assertIn('hosts', inventory['all'])
+ self.assertIn('vars', inventory['all'])
+ for group_name in ('ceph-osd', 'ceph-monitor'):
+ self.assertIn(group_name, inventory)
+ for node_name in all_nodes:
+ self.assertIn(node_name, inventory['all']['hosts'])
+ self.assertIn(node_name,
+ inventory['ceph-monitor']['hosts'])
+ self.assertIn('zuul', inventory['all']['vars'])
+ z_vars = inventory['all']['vars']['zuul']
+ self.assertIn('executor', z_vars)
+ self.assertIn('src_root', z_vars['executor'])
+ self.assertIn('job', z_vars)
+ self.assertEqual(z_vars['job'], 'group-inventory')
+
+ self.executor_server.release()
+ self.waitUntilSettled()
diff --git a/tests/unit/test_log_streamer.py b/tests/unit/test_log_streamer.py
new file mode 100644
index 0000000..3ea5a8e
--- /dev/null
+++ b/tests/unit/test_log_streamer.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import socket
+import tempfile
+
+import zuul.lib.log_streamer
+import tests.base
+
+
+class TestLogStreamer(tests.base.BaseTestCase):
+
+ log = logging.getLogger("zuul.test.cloner")
+
+ def setUp(self):
+ super(TestLogStreamer, self).setUp()
+ self.host = '0.0.0.0'
+
+ def startStreamer(self, port, root=None):
+ if not root:
+ root = tempfile.gettempdir()
+ return zuul.lib.log_streamer.LogStreamer(None, self.host, port, root)
+
+ def test_start_stop(self):
+ port = 7900
+ streamer = self.startStreamer(port)
+ self.addCleanup(streamer.stop)
+
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.addCleanup(s.close)
+ self.assertEqual(0, s.connect_ex((self.host, port)))
+ s.close()
+
+ streamer.stop()
+
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.addCleanup(s.close)
+ self.assertNotEqual(0, s.connect_ex((self.host, port)))
+ s.close()
diff --git a/tests/unit/test_model.py b/tests/unit/test_model.py
index 2167a3b..e7e53c4 100644
--- a/tests/unit/test_model.py
+++ b/tests/unit/test_model.py
@@ -27,20 +27,22 @@
from tests.base import BaseTestCase, FIXTURE_DIR
-class FakeSource(object):
- def __init__(self, name):
- self.name = name
+class Dummy(object):
+ def __init__(self, **kw):
+ for k, v in kw.items():
+ setattr(self, k, v)
class TestJob(BaseTestCase):
-
def setUp(self):
super(TestJob, self).setUp()
+ self.connection = Dummy(connection_name='dummy_connection')
+ self.source = Dummy(canonical_hostname='git.example.com',
+ connection=self.connection)
self.tenant = model.Tenant('tenant')
self.layout = model.Layout()
- self.project = model.Project('project', 'connection')
- self.source = FakeSource('connection')
- self.tenant.addProjectRepo(self.source, self.project)
+ self.project = model.Project('project', self.source)
+ self.tenant.addUntrustedProject(self.project)
self.pipeline = model.Pipeline('gate', self.layout)
self.layout.addPipeline(self.pipeline)
self.queue = model.ChangeQueue(self.pipeline)
@@ -71,11 +73,21 @@
change.files = ['/COMMIT_MSG', 'docs/foo']
self.assertFalse(self.job.changeMatches(change))
+ def test_change_matches_returns_false_for_single_matched_skip_if(self):
+ change = model.Change('project')
+ change.files = ['docs/foo']
+ self.assertFalse(self.job.changeMatches(change))
+
def test_change_matches_returns_true_for_unmatched_skip_if(self):
change = model.Change('project')
change.files = ['/COMMIT_MSG', 'foo']
self.assertTrue(self.job.changeMatches(change))
+ def test_change_matches_returns_true_for_single_unmatched_skip_if(self):
+ change = model.Change('project')
+ change.files = ['foo']
+ self.assertTrue(self.job.changeMatches(change))
+
def test_job_sets_defaults_for_boolean_attributes(self):
self.assertIsNotNone(self.job.voting)
@@ -94,7 +106,7 @@
base.auth = model.AuthContext()
py27 = model.Job('py27')
- self.assertEqual(None, py27.timeout)
+ self.assertIsNone(py27.timeout)
py27.inheritFrom(base)
self.assertEqual(30, py27.timeout)
self.assertEqual(['base-pre'],
@@ -103,7 +115,7 @@
[x.path for x in py27.run])
self.assertEqual(['base-post'],
[x.path for x in py27.post_run])
- self.assertEqual(None, py27.auth)
+ self.assertIsNone(py27.auth)
def test_job_variants(self):
# This simulates freezing a job.
@@ -162,7 +174,8 @@
pipeline = model.Pipeline('gate', layout)
layout.addPipeline(pipeline)
queue = model.ChangeQueue(pipeline)
- project = model.Project('project', None)
+ project = model.Project('project', self.source)
+ tenant.addUntrustedProject(project)
base = configloader.JobParser.fromYaml(tenant, layout, {
'_source_context': self.context,
@@ -420,15 +433,16 @@
})
layout.addJob(in_repo_job_with_inherit_false)
- self.assertEqual(None, in_repo_job_without_inherit.auth)
+ self.assertIsNone(in_repo_job_without_inherit.auth)
self.assertEqual(1, len(in_repo_job_with_inherit.auth.secrets))
self.assertEqual(in_repo_job_with_inherit.auth.secrets[0].name,
'pypi-credentials')
- self.assertEqual(None, in_repo_job_with_inherit_false.auth)
+ self.assertIsNone(in_repo_job_with_inherit_false.auth)
def test_job_inheritance_job_tree(self):
tenant = model.Tenant('tenant')
layout = model.Layout()
+ tenant.addUntrustedProject(self.project)
pipeline = model.Pipeline('gate', layout)
layout.addPipeline(pipeline)
@@ -508,7 +522,8 @@
pipeline = model.Pipeline('gate', layout)
layout.addPipeline(pipeline)
queue = model.ChangeQueue(pipeline)
- project = model.Project('project', None)
+ project = model.Project('project', self.source)
+ tenant.addUntrustedProject(project)
base = configloader.JobParser.fromYaml(tenant, layout, {
'_source_context': self.context,
@@ -554,7 +569,7 @@
def test_job_source_project(self):
tenant = model.Tenant('tenant')
layout = model.Layout()
- base_project = model.Project('base_project', None)
+ base_project = model.Project('base_project', self.source)
base_context = model.SourceContext(base_project, 'master',
'test', True)
@@ -565,7 +580,7 @@
})
layout.addJob(base)
- other_project = model.Project('other_project', None)
+ other_project = model.Project('other_project', self.source)
other_context = model.SourceContext(other_project, 'master',
'test', True)
base2 = configloader.JobParser.fromYaml(tenant, layout, {
@@ -588,7 +603,8 @@
})
self.layout.addJob(job)
- project2 = model.Project('project2', None)
+ project2 = model.Project('project2', self.source)
+ self.tenant.addUntrustedProject(project2)
context2 = model.SourceContext(project2, 'master',
'test', True)
@@ -778,3 +794,137 @@
graph.addJob(jobs[3])
jobs[6].dependencies = frozenset([jobs[2].name])
graph.addJob(jobs[6])
+
+
+class TestTenant(BaseTestCase):
+ def test_add_project(self):
+ tenant = model.Tenant('tenant')
+ connection1 = Dummy(connection_name='dummy_connection1')
+ source1 = Dummy(canonical_hostname='git1.example.com',
+ name='dummy', # TODOv3(jeblair): remove
+ connection=connection1)
+
+ source1_project1 = model.Project('project1', source1)
+ tenant.addConfigProject(source1_project1)
+ d = {'project1':
+ {'git1.example.com': source1_project1}}
+ self.assertEqual(d, tenant.projects)
+ self.assertEqual((True, source1_project1),
+ tenant.getProject('project1'))
+ self.assertEqual((True, source1_project1),
+ tenant.getProject('git1.example.com/project1'))
+
+ source1_project2 = model.Project('project2', source1)
+ tenant.addUntrustedProject(source1_project2)
+ d = {'project1':
+ {'git1.example.com': source1_project1},
+ 'project2':
+ {'git1.example.com': source1_project2}}
+ self.assertEqual(d, tenant.projects)
+ self.assertEqual((False, source1_project2),
+ tenant.getProject('project2'))
+ self.assertEqual((False, source1_project2),
+ tenant.getProject('git1.example.com/project2'))
+
+ connection2 = Dummy(connection_name='dummy_connection2')
+ source2 = Dummy(canonical_hostname='git2.example.com',
+ name='dummy', # TODOv3(jeblair): remove
+ connection=connection2)
+
+ source2_project1 = model.Project('project1', source2)
+ tenant.addUntrustedProject(source2_project1)
+ d = {'project1':
+ {'git1.example.com': source1_project1,
+ 'git2.example.com': source2_project1},
+ 'project2':
+ {'git1.example.com': source1_project2}}
+ self.assertEqual(d, tenant.projects)
+ with testtools.ExpectedException(
+ Exception,
+ "Project name 'project1' is ambiguous"):
+ tenant.getProject('project1')
+ self.assertEqual((False, source1_project2),
+ tenant.getProject('project2'))
+ self.assertEqual((True, source1_project1),
+ tenant.getProject('git1.example.com/project1'))
+ self.assertEqual((False, source2_project1),
+ tenant.getProject('git2.example.com/project1'))
+
+ source2_project2 = model.Project('project2', source2)
+ tenant.addConfigProject(source2_project2)
+ d = {'project1':
+ {'git1.example.com': source1_project1,
+ 'git2.example.com': source2_project1},
+ 'project2':
+ {'git1.example.com': source1_project2,
+ 'git2.example.com': source2_project2}}
+ self.assertEqual(d, tenant.projects)
+ with testtools.ExpectedException(
+ Exception,
+ "Project name 'project1' is ambiguous"):
+ tenant.getProject('project1')
+ with testtools.ExpectedException(
+ Exception,
+ "Project name 'project2' is ambiguous"):
+ tenant.getProject('project2')
+ self.assertEqual((True, source1_project1),
+ tenant.getProject('git1.example.com/project1'))
+ self.assertEqual((False, source2_project1),
+ tenant.getProject('git2.example.com/project1'))
+ self.assertEqual((False, source1_project2),
+ tenant.getProject('git1.example.com/project2'))
+ self.assertEqual((True, source2_project2),
+ tenant.getProject('git2.example.com/project2'))
+
+ source1_project2b = model.Project('subpath/project2', source1)
+ tenant.addConfigProject(source1_project2b)
+ d = {'project1':
+ {'git1.example.com': source1_project1,
+ 'git2.example.com': source2_project1},
+ 'project2':
+ {'git1.example.com': source1_project2,
+ 'git2.example.com': source2_project2},
+ 'subpath/project2':
+ {'git1.example.com': source1_project2b}}
+ self.assertEqual(d, tenant.projects)
+ self.assertEqual((False, source1_project2),
+ tenant.getProject('git1.example.com/project2'))
+ self.assertEqual((True, source2_project2),
+ tenant.getProject('git2.example.com/project2'))
+ self.assertEqual((True, source1_project2b),
+ tenant.getProject('subpath/project2'))
+ self.assertEqual(
+ (True, source1_project2b),
+ tenant.getProject('git1.example.com/subpath/project2'))
+
+ source2_project2b = model.Project('subpath/project2', source2)
+ tenant.addConfigProject(source2_project2b)
+ d = {'project1':
+ {'git1.example.com': source1_project1,
+ 'git2.example.com': source2_project1},
+ 'project2':
+ {'git1.example.com': source1_project2,
+ 'git2.example.com': source2_project2},
+ 'subpath/project2':
+ {'git1.example.com': source1_project2b,
+ 'git2.example.com': source2_project2b}}
+ self.assertEqual(d, tenant.projects)
+ self.assertEqual((False, source1_project2),
+ tenant.getProject('git1.example.com/project2'))
+ self.assertEqual((True, source2_project2),
+ tenant.getProject('git2.example.com/project2'))
+ with testtools.ExpectedException(
+ Exception,
+ "Project name 'subpath/project2' is ambiguous"):
+ tenant.getProject('subpath/project2')
+ self.assertEqual(
+ (True, source1_project2b),
+ tenant.getProject('git1.example.com/subpath/project2'))
+ self.assertEqual(
+ (True, source2_project2b),
+ tenant.getProject('git2.example.com/subpath/project2'))
+
+ with testtools.ExpectedException(
+ Exception,
+ "Project project1 is already in project index"):
+ tenant._addProject(source1_project1)
diff --git a/tests/unit/test_multi_driver.py b/tests/unit/test_multi_driver.py
new file mode 100644
index 0000000..864bd31
--- /dev/null
+++ b/tests/unit/test_multi_driver.py
@@ -0,0 +1,45 @@
+# Copyright 2015 GoodData
+# Copyright (c) 2017 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from tests.base import ZuulTestCase
+
+
+class TestGerritAndGithub(ZuulTestCase):
+ config_file = 'zuul-connections-gerrit-and-github.conf'
+ tenant_config_file = 'config/multi-driver/main.yaml'
+
+ def setup_config(self):
+ super(TestGerritAndGithub, self).setup_config()
+
+ def test_multiple_project_gerrit_and_github(self):
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ B = self.fake_github.openFakePullRequest('org/project1', 'master', 'B')
+ self.fake_github.emitEvent(B.getPullRequestOpenedEvent())
+ self.waitUntilSettled()
+
+ self.assertEqual(2, len(self.builds))
+ self.assertEqual('project-gerrit', self.builds[0].name)
+ self.assertEqual('project1-github', self.builds[1].name)
+ self.assertTrue(self.builds[0].hasChanges(A))
+ self.assertTrue(self.builds[1].hasChanges(B))
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
diff --git a/tests/unit/test_nodepool.py b/tests/unit/test_nodepool.py
index 0f23c9e..ba7523c 100644
--- a/tests/unit/test_nodepool.py
+++ b/tests/unit/test_nodepool.py
@@ -27,7 +27,7 @@
# scheduler.
def setUp(self):
- super(BaseTestCase, self).setUp()
+ super(TestNodepool, self).setUp()
self.zk_chroot_fixture = self.useFixture(
ChrootedKazooFixture(self.id()))
@@ -37,6 +37,7 @@
self.zk_chroot_fixture.zookeeper_chroot)
self.zk = zuul.zk.ZooKeeper()
+ self.addCleanup(self.zk.disconnect)
self.zk.connect(self.zk_config)
self.hostname = 'nodepool-test-hostname'
@@ -49,6 +50,7 @@
self.zk_chroot_fixture.zookeeper_host,
self.zk_chroot_fixture.zookeeper_port,
self.zk_chroot_fixture.zookeeper_chroot)
+ self.addCleanup(self.fake_nodepool.stop)
def waitForRequests(self):
# Wait until all requests are complete.
diff --git a/tests/unit/test_openstack.py b/tests/unit/test_openstack.py
index c7a47ec..4fceba0 100644
--- a/tests/unit/test_openstack.py
+++ b/tests/unit/test_openstack.py
@@ -69,11 +69,13 @@
# Check that a change to nova triggered a keystone clone
executor_git_dir = os.path.join(self.executor_src_root,
+ 'review.example.com',
'openstack', 'keystone', '.git')
self.assertTrue(os.path.exists(executor_git_dir),
msg='openstack/keystone should be cloned.')
jobdir_git_dir = os.path.join(build.jobdir.src_root,
+ 'review.example.com',
'openstack', 'keystone', '.git')
self.assertTrue(os.path.exists(jobdir_git_dir),
msg='openstack/keystone should be cloned.')
@@ -90,11 +92,13 @@
# Check that a change to keystone triggered a nova clone
executor_git_dir = os.path.join(self.executor_src_root,
+ 'review.example.com',
'openstack', 'nova', '.git')
self.assertTrue(os.path.exists(executor_git_dir),
msg='openstack/nova should be cloned.')
jobdir_git_dir = os.path.join(build.jobdir.src_root,
+ 'review.example.com',
'openstack', 'nova', '.git')
self.assertTrue(os.path.exists(jobdir_git_dir),
msg='openstack/nova should be cloned.')
diff --git a/tests/unit/test_push_reqs.py b/tests/unit/test_push_reqs.py
new file mode 100644
index 0000000..657d9b8
--- /dev/null
+++ b/tests/unit/test_push_reqs.py
@@ -0,0 +1,53 @@
+# Copyright (c) 2017 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from tests.base import ZuulTestCase
+
+
+class TestPushRequirements(ZuulTestCase):
+ config_file = 'zuul-push-reqs.conf'
+ tenant_config_file = 'config/push-reqs/main.yaml'
+
+ def setup_config(self):
+ super(TestPushRequirements, self).setup_config()
+
+ def test_push_requirements(self):
+ self.executor_server.hold_jobs_in_build = True
+
+ # Create a github change, add a change and emit a push event
+ A = self.fake_github.openFakePullRequest('org/project1', 'master', 'A')
+ old_sha = A.head_sha
+ self.fake_github.emitEvent(A.getPushEvent(old_sha))
+
+ self.waitUntilSettled()
+
+ # All but one pipeline should be skipped
+ self.assertEqual(1, len(self.builds))
+ self.assertEqual('pushhub', self.builds[0].pipeline)
+ self.assertEqual('org/project1', self.builds[0].project)
+
+ # Make a gerrit change, and emit a ref-updated event
+ B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+ self.fake_gerrit.addEvent(B.getRefUpdatedEvent())
+
+ self.waitUntilSettled()
+
+ # All but one pipeline should be skipped, increasing builds by 1
+ self.assertEqual(2, len(self.builds))
+ self.assertEqual('pushgerrit', self.builds[1].pipeline)
+ self.assertEqual('org/project2', self.builds[1].project)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index 43a8ddf..0ac42c1 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -14,6 +14,7 @@
# License for the specific language governing permissions and limitations
# under the License.
+import gc
import json
import textwrap
@@ -35,6 +36,7 @@
from tests.base import (
ZuulTestCase,
repack_repo,
+ simple_layout,
)
@@ -61,7 +63,11 @@
self.assertIsNone(self.getJobFromHistory('project-test2').node)
# TODOv3(jeblair): we may want to report stats by tenant (also?).
- self.assertReportedStat('gerrit.event.comment-added', value='1|c')
+ # Per-driver
+ self.assertReportedStat('zuul.event.gerrit.comment-added', value='1|c')
+ # Per-driver per-connection
+ self.assertReportedStat('zuul.event.gerrit.gerrit.comment-added',
+ value='1|c')
self.assertReportedStat('zuul.pipeline.gate.current_changes',
value='1|g')
self.assertReportedStat('zuul.pipeline.gate.job.project-merge.SUCCESS',
@@ -528,8 +534,8 @@
queue = self.gearman_server.getQueue()
self.assertEqual(len(self.builds), 0)
self.assertEqual(len(queue), 1)
- self.assertEqual(queue[0].name, 'executor:execute')
- job_args = json.loads(queue[0].arguments)
+ self.assertEqual(queue[0].name, b'executor:execute')
+ job_args = json.loads(queue[0].arguments.decode('utf8'))
self.assertEqual(job_args['job'], 'project-merge')
self.assertEqual(job_args['items'][0]['number'], '%d' % A.number)
@@ -545,17 +551,23 @@
self.assertEqual(len(queue), 6)
self.assertEqual(
- json.loads(queue[0].arguments)['job'], 'project-test1')
+ json.loads(queue[0].arguments.decode('utf8'))['job'],
+ 'project-test1')
self.assertEqual(
- json.loads(queue[1].arguments)['job'], 'project-test2')
+ json.loads(queue[1].arguments.decode('utf8'))['job'],
+ 'project-test2')
self.assertEqual(
- json.loads(queue[2].arguments)['job'], 'project-test1')
+ json.loads(queue[2].arguments.decode('utf8'))['job'],
+ 'project-test1')
self.assertEqual(
- json.loads(queue[3].arguments)['job'], 'project-test2')
+ json.loads(queue[3].arguments.decode('utf8'))['job'],
+ 'project-test2')
self.assertEqual(
- json.loads(queue[4].arguments)['job'], 'project-test1')
+ json.loads(queue[4].arguments.decode('utf8'))['job'],
+ 'project-test1')
self.assertEqual(
- json.loads(queue[5].arguments)['job'], 'project-test2')
+ json.loads(queue[5].arguments.decode('utf8'))['job'],
+ 'project-test2')
self.release(queue[0])
self.waitUntilSettled()
@@ -905,7 +917,8 @@
# TODO: move to test_gerrit (this is a unit test!)
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
tenant = self.sched.abide.tenants.get('tenant-one')
- source = tenant.layout.pipelines['gate'].source
+ (trusted, project) = tenant.getProject('org/project')
+ source = project.source
# TODO(pabelanger): As we add more source / trigger APIs we should make
# it easier for users to create events for testing.
@@ -947,7 +960,6 @@
self.waitUntilSettled()
self.assertEqual(A.reported, 1)
- self.assertEqual(B.reported, 1)
self.assertEqual(C.reported, 1)
self.gearman_server.release('project-merge')
@@ -965,7 +977,7 @@
self.assertEqual(B.data['status'], 'NEW')
self.assertEqual(C.data['status'], 'MERGED')
self.assertEqual(A.reported, 2)
- self.assertEqual(B.reported, 2)
+ self.assertIn('Merge Failed', B.messages[-1])
self.assertEqual(C.reported, 2)
self.assertHistory([
@@ -981,8 +993,9 @@
"Test that delayed check merge conflicts are handled properly"
# Hold jobs in the gearman queue so that we can test whether
- # the executor returns a merge failure after the scheduler has
- # successfully merged.
+ # the executor sucesfully merges a change based on an old
+ # repo state (frozen by the scheduler) which would otherwise
+ # conflict.
self.gearman_server.hold_jobs_in_queue = True
A = self.fake_gerrit.addFakeChange('org/project',
'master', 'A',
@@ -999,6 +1012,7 @@
self.fake_gerrit.addEvent(A.addApproval('approved', 1))
self.waitUntilSettled()
self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
self.fake_gerrit.addEvent(C.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
@@ -1054,9 +1068,12 @@
dict(name='project-merge', result='SUCCESS', changes='1,1'),
dict(name='project-test1', result='SUCCESS', changes='1,1'),
dict(name='project-test2', result='SUCCESS', changes='1,1'),
- dict(name='project-merge', result='MERGER_FAILURE', changes='2,1'),
- dict(name='project-merge', result='MERGER_FAILURE',
- changes='2,1 3,1'),
+ dict(name='project-merge', result='SUCCESS', changes='2,1'),
+ dict(name='project-test1', result='SUCCESS', changes='2,1'),
+ dict(name='project-test2', result='SUCCESS', changes='2,1'),
+ dict(name='project-merge', result='SUCCESS', changes='2,1 3,1'),
+ dict(name='project-test1', result='SUCCESS', changes='2,1 3,1'),
+ dict(name='project-test2', result='SUCCESS', changes='2,1 3,1'),
], ordered=False)
def test_post(self):
@@ -1103,12 +1120,9 @@
self.assertEqual(len(self.history), 0)
self.assertNotIn('project-post', job_names)
+ @simple_layout('layouts/dont-ignore-ref-deletes.yaml')
def test_post_ignore_deletes_negative(self):
"Test that deleting refs does trigger post jobs"
-
- self.updateConfigLayout('layout-dont-ignore-ref-deletes')
- self.sched.reconfigure(self.config)
-
e = {
"type": "ref-updated",
"submitter": {
@@ -1171,7 +1185,8 @@
self.assertEqual(B.data['status'], 'NEW')
self.assertEqual(B.reported, 2)
self.assertEqual(C.data['status'], 'NEW')
- self.assertEqual(C.reported, 2)
+ self.assertIn('This change depends on a change that failed to merge.',
+ C.messages[-1])
self.assertEqual(len(self.history), 1)
def test_failing_dependent_changes(self):
@@ -1245,16 +1260,15 @@
# aborted jobs.
self.executor_server.hold_jobs_in_build = True
- A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
- B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
- C = self.fake_gerrit.addFakeChange('org/project1', 'master', 'C')
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ C = self.fake_gerrit.addFakeChange('org/project', 'master', 'C')
A.addApproval('code-review', 2)
B.addApproval('code-review', 2)
C.addApproval('code-review', 2)
self.executor_server.failJob('project-test1', A)
self.executor_server.failJob('project-test2', A)
- self.executor_server.failJob('project1-project2-integration', A)
self.fake_gerrit.addEvent(A.addApproval('approved', 1))
self.fake_gerrit.addEvent(B.addApproval('approved', 1))
@@ -1273,29 +1287,26 @@
self.executor_server.release('.*-merge')
self.waitUntilSettled()
- self.assertEqual(len(self.builds), 9)
+ self.assertEqual(len(self.builds), 6)
self.assertEqual(self.builds[0].name, 'project-test1')
self.assertEqual(self.builds[1].name, 'project-test2')
- self.assertEqual(self.builds[2].name, 'project1-project2-integration')
- self.assertEqual(self.builds[3].name, 'project-test1')
- self.assertEqual(self.builds[4].name, 'project-test2')
- self.assertEqual(self.builds[5].name, 'project1-project2-integration')
- self.assertEqual(self.builds[6].name, 'project-test1')
- self.assertEqual(self.builds[7].name, 'project-test2')
- self.assertEqual(self.builds[8].name, 'project1-project2-integration')
+ self.assertEqual(self.builds[2].name, 'project-test1')
+ self.assertEqual(self.builds[3].name, 'project-test2')
+ self.assertEqual(self.builds[4].name, 'project-test1')
+ self.assertEqual(self.builds[5].name, 'project-test2')
self.release(self.builds[0])
self.waitUntilSettled()
- self.assertEqual(len(self.builds), 3) # test2,integration, merge for B
- self.assertEqual(self.countJobResults(self.history, 'ABORTED'), 6)
+ self.assertEqual(len(self.builds), 2) # test2, merge for B
+ self.assertEqual(self.countJobResults(self.history, 'ABORTED'), 4)
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
self.assertEqual(len(self.builds), 0)
- self.assertEqual(len(self.history), 20)
+ self.assertEqual(len(self.history), 15)
self.assertEqual(A.data['status'], 'NEW')
self.assertEqual(B.data['status'], 'MERGED')
@@ -1304,6 +1315,7 @@
self.assertEqual(B.reported, 2)
self.assertEqual(C.reported, 2)
+ @simple_layout('layouts/nonvoting-job.yaml')
def test_nonvoting_job(self):
"Test that non-voting jobs don't vote."
@@ -1365,10 +1377,11 @@
self.assertEqual(self.getJobFromHistory('project-test2').result,
'FAILURE')
+ @simple_layout('layouts/three-projects.yaml')
def test_dependent_behind_dequeue(self):
# This particular test does a large amount of merges and needs a little
# more time to complete
- self.wait_timeout = 90
+ self.wait_timeout = 120
"test that dependent changes behind dequeued changes work"
# This complicated test is a reproduction of a real life bug
self.sched.reconfigure(self.config)
@@ -1472,10 +1485,12 @@
self.assertEmptyQueues()
self.build_history = []
- path = os.path.join(self.merger_src_root, "org/project")
+ path = os.path.join(self.merger_src_root, "review.example.com",
+ "org/project")
if os.path.exists(path):
repack_repo(path)
- path = os.path.join(self.executor_src_root, "org/project")
+ path = os.path.join(self.executor_src_root, "review.example.com",
+ "org/project")
if os.path.exists(path):
repack_repo(path)
@@ -1497,17 +1512,21 @@
# https://bugs.executepad.net/zuul/+bug/1078946
# This test assumes the repo is already cloned; make sure it is
tenant = self.sched.abide.tenants.get('tenant-one')
- url = self.fake_gerrit.getGitUrl(
- tenant.layout.project_configs.get('org/project1'))
- self.merge_server.merger.addProject('org/project1', url)
- A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ trusted, project = tenant.getProject('org/project')
+ url = self.fake_gerrit.getGitUrl(project)
+ self.executor_server.merger._addProject('review.example.com',
+ 'org/project', url)
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
A.addPatchset(large=True)
- path = os.path.join(self.upstream_root, "org/project1")
+ # TODOv3(jeblair): add hostname to upstream root
+ path = os.path.join(self.upstream_root, 'org/project')
repack_repo(path)
- path = os.path.join(self.merger_src_root, "org/project1")
+ path = os.path.join(self.merger_src_root, 'review.example.com',
+ 'org/project')
if os.path.exists(path):
repack_repo(path)
- path = os.path.join(self.executor_src_root, "org/project1")
+ path = os.path.join(self.executor_src_root, 'review.example.com',
+ 'org/project')
if os.path.exists(path):
repack_repo(path)
@@ -1744,11 +1763,13 @@
def test_abandoned_not_timer(self):
"Test that an abandoned change does not cancel timer jobs"
-
+ # This test can not use simple_layout because it must start
+ # with a configuration which does not include a
+ # timer-triggered job so that we have an opportunity to set
+ # the hold flag before the first job.
self.executor_server.hold_jobs_in_build = True
-
# Start timer trigger - also org/project
- self.updateConfigLayout('layout-idle')
+ self.commitConfigUpdate('common-config', 'layouts/idle.yaml')
self.sched.reconfigure(self.config)
# The pipeline triggers every second, so we should have seen
# several by now.
@@ -1757,9 +1778,9 @@
# Stop queuing timer triggered jobs so that the assertions
# below don't race against more jobs being queued.
# Must be in same repo, so overwrite config with another one
- self.commitLayoutUpdate('layout-idle', 'layout-no-timer')
-
+ self.commitConfigUpdate('common-config', 'layouts/no-timer.yaml')
self.sched.reconfigure(self.config)
+
self.assertEqual(len(self.builds), 2, "Two timer jobs")
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
@@ -1895,6 +1916,7 @@
self.assertEqual(len(self.history), 10)
self.assertEqual(self.countJobResults(self.history, 'ABORTED'), 1)
+ @simple_layout('layouts/noop-job.yaml')
def test_noop_job(self):
"Test that the internal noop job works"
A = self.fake_gerrit.addFakeChange('org/noop-project', 'master', 'A')
@@ -1908,6 +1930,7 @@
self.assertEqual(A.data['status'], 'MERGED')
self.assertEqual(A.reported, 2)
+ @simple_layout('layouts/no-jobs-project.yaml')
def test_no_job_project(self):
"Test that reports with no jobs don't get sent"
A = self.fake_gerrit.addFakeChange('org/no-jobs-project',
@@ -2051,7 +2074,7 @@
# The assertion is that we have one job in the queue, project-merge
self.assertEqual(len(self.gearman_server.getQueue()), 1)
- self.commitLayoutUpdate('common-config', 'layout-no-jobs')
+ self.commitConfigUpdate('common-config', 'layouts/no-jobs.yaml')
self.sched.reconfigure(self.config)
self.waitUntilSettled()
@@ -2112,9 +2135,6 @@
def _test_irrelevant_files_jobs(self, should_skip):
"Test that jobs with irrelevant-files filter run only when appropriate"
- self.updateConfigLayout('layout-irrelevant-files')
- self.sched.reconfigure(self.config)
-
if should_skip:
files = {'ignoreme': 'ignored\n'}
else:
@@ -2135,16 +2155,16 @@
else:
self.assertIn(change.data['number'], tested_change_ids)
+ @simple_layout('layouts/irrelevant-files.yaml')
def test_irrelevant_files_match_skips_job(self):
self._test_irrelevant_files_jobs(should_skip=True)
+ @simple_layout('layouts/irrelevant-files.yaml')
def test_irrelevant_files_no_match_runs_job(self):
self._test_irrelevant_files_jobs(should_skip=False)
+ @simple_layout('layouts/inheritance.yaml')
def test_inherited_jobs_keep_matchers(self):
- self.updateConfigLayout('layout-inheritance')
- self.sched.reconfigure(self.config)
-
files = {'ignoreme': 'ignored\n'}
change = self.fake_gerrit.addFakeChange('org/project',
@@ -2159,18 +2179,11 @@
self.assertEqual(set(['project-test-nomatch-starts-empty',
'project-test-nomatch-starts-full']), run_jobs)
- @skip("Disabled for early v3 development")
- def test_test_config(self):
- "Test that we can test the config"
- self.sched.testConfig(self.config.get('zuul', 'tenant_config'),
- self.connections)
-
def test_queue_names(self):
"Test shared change queue names"
tenant = self.sched.abide.tenants.get('tenant-one')
- source = tenant.layout.pipelines['gate'].source
- project1 = source.getProject('org/project1')
- project2 = source.getProject('org/project2')
+ (trusted, project1) = tenant.getProject('org/project1')
+ (trusted, project2) = tenant.getProject('org/project2')
q1 = tenant.layout.pipelines['gate'].getQueue(project1)
q2 = tenant.layout.pipelines['gate'].getQueue(project2)
self.assertEqual(q1.name, 'integrated')
@@ -2229,7 +2242,7 @@
self.assertIn('Cache-Control', headers)
self.assertIn('Last-Modified', headers)
self.assertIn('Expires', headers)
- data = f.read()
+ data = f.read().decode('utf8')
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
@@ -2266,246 +2279,6 @@
self.assertEqual('https://server/job/project-test2/0/',
status_jobs[2]['report_url'])
- def test_semaphore_one(self):
- "Test semaphores with max=1 (mutex)"
- self.updateConfigLayout('layout-semaphore')
- self.sched.reconfigure(self.config)
-
- self.waitUntilSettled()
- tenant = self.sched.abide.tenants.get('openstack')
-
- self.executor_server.hold_jobs_in_build = True
-
- A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
- B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
- self.assertFalse('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
- self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
- self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
- self.waitUntilSettled()
-
- self.assertEqual(len(self.builds), 3)
- self.assertEqual(self.builds[0].name, 'project-test1')
- self.assertEqual(self.builds[1].name, 'semaphore-one-test1')
- self.assertEqual(self.builds[2].name, 'project-test1')
-
- self.executor_server.release('semaphore-one-test1')
- self.waitUntilSettled()
-
- self.assertEqual(len(self.builds), 3)
- self.assertEqual(self.builds[0].name, 'project-test1')
- self.assertEqual(self.builds[1].name, 'project-test1')
- self.assertEqual(self.builds[2].name, 'semaphore-one-test2')
- self.assertTrue('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
- self.executor_server.release('semaphore-one-test2')
- self.waitUntilSettled()
-
- self.assertEqual(len(self.builds), 3)
- self.assertEqual(self.builds[0].name, 'project-test1')
- self.assertEqual(self.builds[1].name, 'project-test1')
- self.assertEqual(self.builds[2].name, 'semaphore-one-test1')
- self.assertTrue('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
- self.executor_server.release('semaphore-one-test1')
- self.waitUntilSettled()
-
- self.assertEqual(len(self.builds), 3)
- self.assertEqual(self.builds[0].name, 'project-test1')
- self.assertEqual(self.builds[1].name, 'project-test1')
- self.assertEqual(self.builds[2].name, 'semaphore-one-test2')
- self.assertTrue('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
- self.executor_server.release('semaphore-one-test2')
- self.waitUntilSettled()
-
- self.assertEqual(len(self.builds), 2)
- self.assertEqual(self.builds[0].name, 'project-test1')
- self.assertEqual(self.builds[1].name, 'project-test1')
- self.assertFalse('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
- self.executor_server.hold_jobs_in_build = False
- self.executor_server.release()
-
- self.waitUntilSettled()
- self.assertEqual(len(self.builds), 0)
-
- self.assertEqual(A.reported, 1)
- self.assertEqual(B.reported, 1)
- self.assertFalse('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
- def test_semaphore_two(self):
- "Test semaphores with max>1"
- self.updateConfigLayout('layout-semaphore')
- self.sched.reconfigure(self.config)
-
- self.waitUntilSettled()
- tenant = self.sched.abide.tenants.get('openstack')
-
- self.executor_server.hold_jobs_in_build = True
- A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
- B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
- self.assertFalse('test-semaphore-two' in
- tenant.semaphore_handler.semaphores)
-
- self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
- self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
- self.waitUntilSettled()
-
- self.assertEqual(len(self.builds), 4)
- self.assertEqual(self.builds[0].name, 'project-test1')
- self.assertEqual(self.builds[1].name, 'semaphore-two-test1')
- self.assertEqual(self.builds[2].name, 'semaphore-two-test2')
- self.assertEqual(self.builds[3].name, 'project-test1')
- self.assertTrue('test-semaphore-two' in
- tenant.semaphore_handler.semaphores)
- self.assertEqual(len(tenant.semaphore_handler.semaphores.get(
- 'test-semaphore-two', [])), 2)
-
- self.executor_server.release('semaphore-two-test1')
- self.waitUntilSettled()
-
- self.assertEqual(len(self.builds), 4)
- self.assertEqual(self.builds[0].name, 'project-test1')
- self.assertEqual(self.builds[1].name, 'semaphore-two-test2')
- self.assertEqual(self.builds[2].name, 'project-test1')
- self.assertEqual(self.builds[3].name, 'semaphore-two-test1')
- self.assertTrue('test-semaphore-two' in
- tenant.semaphore_handler.semaphores)
- self.assertEqual(len(tenant.semaphore_handler.semaphores.get(
- 'test-semaphore-two', [])), 2)
-
- self.executor_server.release('semaphore-two-test2')
- self.waitUntilSettled()
-
- self.assertEqual(len(self.builds), 4)
- self.assertEqual(self.builds[0].name, 'project-test1')
- self.assertEqual(self.builds[1].name, 'project-test1')
- self.assertEqual(self.builds[2].name, 'semaphore-two-test1')
- self.assertEqual(self.builds[3].name, 'semaphore-two-test2')
- self.assertTrue('test-semaphore-two' in
- tenant.semaphore_handler.semaphores)
- self.assertEqual(len(tenant.semaphore_handler.semaphores.get(
- 'test-semaphore-two', [])), 2)
-
- self.executor_server.release('semaphore-two-test1')
- self.waitUntilSettled()
-
- self.assertEqual(len(self.builds), 3)
- self.assertEqual(self.builds[0].name, 'project-test1')
- self.assertEqual(self.builds[1].name, 'project-test1')
- self.assertEqual(self.builds[2].name, 'semaphore-two-test2')
- self.assertTrue('test-semaphore-two' in
- tenant.semaphore_handler.semaphores)
- self.assertEqual(len(tenant.semaphore_handler.semaphores.get(
- 'test-semaphore-two', [])), 1)
-
- self.executor_server.release('semaphore-two-test2')
- self.waitUntilSettled()
-
- self.assertEqual(len(self.builds), 2)
- self.assertEqual(self.builds[0].name, 'project-test1')
- self.assertEqual(self.builds[1].name, 'project-test1')
- self.assertFalse('test-semaphore-two' in
- tenant.semaphore_handler.semaphores)
-
- self.executor_server.hold_jobs_in_build = False
- self.executor_server.release()
-
- self.waitUntilSettled()
- self.assertEqual(len(self.builds), 0)
-
- self.assertEqual(A.reported, 1)
- self.assertEqual(B.reported, 1)
-
- def test_semaphore_abandon(self):
- "Test abandon with job semaphores"
- self.updateConfigLayout('layout-semaphore')
- self.sched.reconfigure(self.config)
-
- self.waitUntilSettled()
- tenant = self.sched.abide.tenants.get('openstack')
-
- self.executor_server.hold_jobs_in_build = True
-
- tenant = self.sched.abide.tenants.get('openstack')
- check_pipeline = tenant.layout.pipelines['check']
-
- A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
- self.assertFalse('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
- self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
- self.waitUntilSettled()
-
- self.assertTrue('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
- self.fake_gerrit.addEvent(A.getChangeAbandonedEvent())
- self.waitUntilSettled()
-
- # The check pipeline should be empty
- items = check_pipeline.getAllItems()
- self.assertEqual(len(items), 0)
-
- # The semaphore should be released
- self.assertFalse('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
- self.executor_server.hold_jobs_in_build = False
- self.executor_server.release()
- self.waitUntilSettled()
-
- def test_semaphore_reconfigure(self):
- "Test reconfigure with job semaphores"
- self.updateConfigLayout('layout-semaphore')
- self.sched.reconfigure(self.config)
-
- self.waitUntilSettled()
- tenant = self.sched.abide.tenants.get('openstack')
-
- self.executor_server.hold_jobs_in_build = True
-
- A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
- self.assertFalse('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
- self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
- self.waitUntilSettled()
-
- self.assertTrue('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
- # reconfigure without layout change
- self.sched.reconfigure(self.config)
- self.waitUntilSettled()
- tenant = self.sched.abide.tenants.get('openstack')
-
- # semaphore still must be held
- self.assertTrue('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
- self.updateConfigLayout('layout-semaphore-reconfiguration')
- self.sched.reconfigure(self.config)
- self.waitUntilSettled()
- tenant = self.sched.abide.tenants.get('openstack')
-
- self.executor_server.release('project-test1')
- self.waitUntilSettled()
-
- # There should be no builds anymore
- self.assertEqual(len(self.builds), 0)
-
- # The semaphore should be released
- self.assertFalse('test-semaphore' in
- tenant.semaphore_handler.semaphores)
-
def test_live_reconfiguration(self):
"Test that live reconfiguration works"
self.executor_server.hold_jobs_in_build = True
@@ -2529,13 +2302,11 @@
self.assertEqual(A.data['status'], 'MERGED')
self.assertEqual(A.reported, 2)
- @skip("Disabled for early v3 development")
def test_live_reconfiguration_merge_conflict(self):
# A real-world bug: a change in a gate queue has a merge
# conflict and a job is added to its project while it's
# sitting in the queue. The job gets added to the change and
# enqueued and the change gets stuck.
- self.worker.registerFunction('build:project-test3')
self.executor_server.hold_jobs_in_build = True
# This change is fine. It's here to stop the queue long
@@ -2543,14 +2314,14 @@
# reconfiguration, as well as to provide a conflict for the
# next change. This change will succeed and merge.
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
- A.addPatchset(['conflict'])
+ A.addPatchset({'conflict': 'A'})
A.addApproval('code-review', 2)
# This change will be in merge conflict. During the
# reconfiguration, we will add a job. We want to make sure
# that doesn't cause it to get stuck.
B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
- B.addPatchset(['conflict'])
+ B.addPatchset({'conflict': 'B'})
B.addApproval('code-review', 2)
self.fake_gerrit.addEvent(A.addApproval('approved', 1))
@@ -2562,12 +2333,11 @@
self.assertEqual(A.data['status'], 'NEW')
self.assertEqual(A.reported, 1)
self.assertEqual(B.data['status'], 'NEW')
- self.assertEqual(B.reported, 1)
self.assertEqual(len(self.history), 0)
# Add the "project-test3" job.
- self.updateConfigLayout(
- 'tests/fixtures/layout-live-reconfiguration-add-job.yaml')
+ self.commitConfigUpdate('common-config',
+ 'layouts/live-reconfiguration-add-job.yaml')
self.sched.reconfigure(self.config)
self.waitUntilSettled()
@@ -2578,7 +2348,7 @@
self.assertEqual(A.data['status'], 'MERGED')
self.assertEqual(A.reported, 2)
self.assertEqual(B.data['status'], 'NEW')
- self.assertEqual(B.reported, 2)
+ self.assertIn('Merge Failed', B.messages[-1])
self.assertEqual(self.getJobFromHistory('project-merge').result,
'SUCCESS')
self.assertEqual(self.getJobFromHistory('project-test1').result,
@@ -2589,19 +2359,17 @@
'SUCCESS')
self.assertEqual(len(self.history), 4)
- @skip("Disabled for early v3 development")
def test_live_reconfiguration_failed_root(self):
# An extrapolation of test_live_reconfiguration_merge_conflict
# that tests a job added to a job tree with a failed root does
# not run.
- self.worker.registerFunction('build:project-test3')
self.executor_server.hold_jobs_in_build = True
# This change is fine. It's here to stop the queue long
# enough for the next change to be subject to the
# reconfiguration. This change will succeed and merge.
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
- A.addPatchset(['conflict'])
+ A.addPatchset({'conflict': 'A'})
A.addApproval('code-review', 2)
self.fake_gerrit.addEvent(A.addApproval('approved', 1))
self.waitUntilSettled()
@@ -2629,8 +2397,8 @@
self.assertEqual(len(self.history), 2)
# Add the "project-test3" job.
- self.updateConfigLayout(
- 'tests/fixtures/layout-live-reconfiguration-add-job.yaml')
+ self.commitConfigUpdate('common-config',
+ 'layouts/live-reconfiguration-add-job.yaml')
self.sched.reconfigure(self.config)
self.waitUntilSettled()
@@ -2651,7 +2419,6 @@
self.assertEqual(self.history[4].result, 'SUCCESS')
self.assertEqual(len(self.history), 5)
- @skip("Disabled for early v3 development")
def test_live_reconfiguration_failed_job(self):
# Test that a change with a removed failing job does not
# disrupt reconfiguration. If a change has a failed job and
@@ -2683,8 +2450,8 @@
self.assertEqual(len(self.history), 2)
# Remove the test1 job.
- self.updateConfigLayout(
- 'tests/fixtures/layout-live-reconfiguration-failed-job.yaml')
+ self.commitConfigUpdate('common-config',
+ 'layouts/live-reconfiguration-failed-job.yaml')
self.sched.reconfigure(self.config)
self.waitUntilSettled()
@@ -2704,7 +2471,6 @@
# Ensure the removed job was not included in the report.
self.assertNotIn('project-test1', A.messages[0])
- @skip("Disabled for early v3 development")
def test_live_reconfiguration_shared_queue(self):
# Test that a change with a failing job which was removed from
# this project but otherwise still exists in the system does
@@ -2726,15 +2492,16 @@
self.assertEqual(A.data['status'], 'NEW')
self.assertEqual(A.reported, 0)
- self.assertEqual(self.getJobFromHistory('project1-merge').result,
+ self.assertEqual(self.getJobFromHistory('project-merge').result,
'SUCCESS')
self.assertEqual(self.getJobFromHistory(
'project1-project2-integration').result, 'FAILURE')
self.assertEqual(len(self.history), 2)
# Remove the integration job.
- self.updateConfigLayout(
- 'tests/fixtures/layout-live-reconfiguration-shared-queue.yaml')
+ self.commitConfigUpdate(
+ 'common-config',
+ 'layouts/live-reconfiguration-shared-queue.yaml')
self.sched.reconfigure(self.config)
self.waitUntilSettled()
@@ -2742,11 +2509,11 @@
self.executor_server.release()
self.waitUntilSettled()
- self.assertEqual(self.getJobFromHistory('project1-merge').result,
+ self.assertEqual(self.getJobFromHistory('project-merge').result,
'SUCCESS')
- self.assertEqual(self.getJobFromHistory('project1-test1').result,
+ self.assertEqual(self.getJobFromHistory('project-test1').result,
'SUCCESS')
- self.assertEqual(self.getJobFromHistory('project1-test2').result,
+ self.assertEqual(self.getJobFromHistory('project-test2').result,
'SUCCESS')
self.assertEqual(self.getJobFromHistory(
'project1-project2-integration').result, 'FAILURE')
@@ -2758,7 +2525,6 @@
# Ensure the removed job was not included in the report.
self.assertNotIn('project1-project2-integration', A.messages[0])
- @skip("Disabled for early v3 development")
def test_double_live_reconfiguration_shared_queue(self):
# This was a real-world regression. A change is added to
# gate; a reconfigure happens, a second change which depends
@@ -2828,8 +2594,9 @@
self.assertEqual(len(self.builds), 5)
# This layout defines only org/project, not org/project1
- self.commitLayoutUpdate('common-config',
- 'layout-live-reconfiguration-del-project')
+ self.commitConfigUpdate(
+ 'common-config',
+ 'layouts/live-reconfiguration-del-project.yaml')
self.sched.reconfigure(self.config)
self.waitUntilSettled()
@@ -2879,10 +2646,8 @@
self.assertEqual(A.data['status'], 'MERGED')
self.assertEqual(A.reported, 2)
+ @simple_layout('layouts/repo-deleted.yaml')
def test_repo_deleted(self):
- self.updateConfigLayout('layout-repo-deleted')
- self.sched.reconfigure(self.config)
-
self.init_repo("org/delete-project")
A = self.fake_gerrit.addFakeChange('org/delete-project', 'master', 'A')
@@ -2919,18 +2684,16 @@
self.assertEqual(B.data['status'], 'MERGED')
self.assertEqual(B.reported, 2)
+ @simple_layout('layouts/tags.yaml')
def test_tags(self):
"Test job tags"
- self.updateConfigLayout('layout-tags')
- self.sched.reconfigure(self.config)
-
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
- self.assertEqual(len(self.history), 8)
+ self.assertEqual(len(self.history), 2)
results = {self.getJobFromHistory('merge',
project='org/project1').uuid: 'extratag merge',
@@ -2943,8 +2706,12 @@
def test_timer(self):
"Test that a periodic job is triggered"
+ # This test can not use simple_layout because it must start
+ # with a configuration which does not include a
+ # timer-triggered job so that we have an opportunity to set
+ # the hold flag before the first job.
self.executor_server.hold_jobs_in_build = True
- self.updateConfigLayout('layout-timer')
+ self.commitConfigUpdate('common-config', 'layouts/timer.yaml')
self.sched.reconfigure(self.config)
# The pipeline triggers every second, so we should have seen
@@ -2957,14 +2724,14 @@
port = self.webapp.server.socket.getsockname()[1]
req = urllib.request.Request(
- "http://localhost:%s/openstack/status" % port)
+ "http://localhost:%s/tenant-one/status" % port)
f = urllib.request.urlopen(req)
- data = f.read()
+ data = f.read().decode('utf8')
self.executor_server.hold_jobs_in_build = False
# Stop queuing timer triggered jobs so that the assertions
# below don't race against more jobs being queued.
- self.commitLayoutUpdate('layout-timer', 'layout-no-timer')
+ self.commitConfigUpdate('common-config', 'layouts/no-timer.yaml')
self.sched.reconfigure(self.config)
self.executor_server.release()
self.waitUntilSettled()
@@ -2987,13 +2754,18 @@
def test_idle(self):
"Test that frequent periodic jobs work"
+ # This test can not use simple_layout because it must start
+ # with a configuration which does not include a
+ # timer-triggered job so that we have an opportunity to set
+ # the hold flag before the first job.
self.executor_server.hold_jobs_in_build = True
- self.updateConfigLayout('layout-idle')
for x in range(1, 3):
# Test that timer triggers periodic jobs even across
# layout config reloads.
# Start timer trigger
+ self.commitConfigUpdate('common-config',
+ 'layouts/idle.yaml')
self.sched.reconfigure(self.config)
self.waitUntilSettled()
@@ -3003,7 +2775,8 @@
# Stop queuing timer triggered jobs so that the assertions
# below don't race against more jobs being queued.
- before = self.commitLayoutUpdate('layout-idle', 'layout-no-timer')
+ self.commitConfigUpdate('common-config',
+ 'layouts/no-timer.yaml')
self.sched.reconfigure(self.config)
self.waitUntilSettled()
self.assertEqual(len(self.builds), 2,
@@ -3012,16 +2785,9 @@
self.waitUntilSettled()
self.assertEqual(len(self.builds), 0)
self.assertEqual(len(self.history), x * 2)
- # Revert back to layout-idle
- repo = git.Repo(os.path.join(self.test_root,
- 'upstream',
- 'layout-idle'))
- repo.git.reset('--hard', before)
+ @simple_layout('layouts/smtp.yaml')
def test_check_smtp_pool(self):
- self.updateConfigLayout('layout-smtp')
- self.sched.reconfigure(self.config)
-
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
self.waitUntilSettled()
@@ -3050,8 +2816,12 @@
def test_timer_smtp(self):
"Test that a periodic job is triggered"
+ # This test can not use simple_layout because it must start
+ # with a configuration which does not include a
+ # timer-triggered job so that we have an opportunity to set
+ # the hold flag before the first job.
self.executor_server.hold_jobs_in_build = True
- self.updateConfigLayout('layout-timer-smtp')
+ self.commitConfigUpdate('common-config', 'layouts/timer-smtp.yaml')
self.sched.reconfigure(self.config)
# The pipeline triggers every second, so we should have seen
@@ -3084,7 +2854,7 @@
# Stop queuing timer triggered jobs and let any that may have
# queued through so that end of test assertions pass.
- self.commitLayoutUpdate('layout-timer-smtp', 'layout-no-timer')
+ self.commitConfigUpdate('common-config', 'layouts/no-timer.yaml')
self.sched.reconfigure(self.config)
self.waitUntilSettled()
self.executor_server.release('.*')
@@ -3142,6 +2912,7 @@
client = zuul.rpcclient.RPCClient('127.0.0.1',
self.gearman_server.port)
+ self.addCleanup(client.shutdown)
r = client.enqueue(tenant='tenant-one',
pipeline='gate',
project='org/project',
@@ -3163,6 +2934,7 @@
client = zuul.rpcclient.RPCClient('127.0.0.1',
self.gearman_server.port)
+ self.addCleanup(client.shutdown)
r = client.enqueue_ref(
tenant='tenant-one',
pipeline='post',
@@ -3181,6 +2953,7 @@
"Test that the RPC client returns errors"
client = zuul.rpcclient.RPCClient('127.0.0.1',
self.gearman_server.port)
+ self.addCleanup(client.shutdown)
with testtools.ExpectedException(zuul.rpcclient.RPCFailure,
"Invalid tenant"):
r = client.enqueue(tenant='tenant-foo',
@@ -3188,7 +2961,6 @@
project='org/project',
trigger='gerrit',
change='1,1')
- client.shutdown()
self.assertEqual(r, False)
with testtools.ExpectedException(zuul.rpcclient.RPCFailure,
@@ -3198,7 +2970,6 @@
project='project-does-not-exist',
trigger='gerrit',
change='1,1')
- client.shutdown()
self.assertEqual(r, False)
with testtools.ExpectedException(zuul.rpcclient.RPCFailure,
@@ -3208,7 +2979,6 @@
project='org/project',
trigger='gerrit',
change='1,1')
- client.shutdown()
self.assertEqual(r, False)
with testtools.ExpectedException(zuul.rpcclient.RPCFailure,
@@ -3218,7 +2988,6 @@
project='org/project',
trigger='trigger-does-not-exist',
change='1,1')
- client.shutdown()
self.assertEqual(r, False)
with testtools.ExpectedException(zuul.rpcclient.RPCFailure,
@@ -3228,7 +2997,6 @@
project='org/project',
trigger='gerrit',
change='1,1')
- client.shutdown()
self.assertEqual(r, False)
self.waitUntilSettled()
@@ -3259,6 +3027,7 @@
client = zuul.rpcclient.RPCClient('127.0.0.1',
self.gearman_server.port)
+ self.addCleanup(client.shutdown)
r = client.promote(tenant='tenant-one',
pipeline='gate',
change_ids=['2,1', '3,1'])
@@ -3307,7 +3076,6 @@
self.assertEqual(C.data['status'], 'MERGED')
self.assertEqual(C.reported, 2)
- client.shutdown()
self.assertEqual(r, True)
def test_client_promote_dependent(self):
@@ -3333,6 +3101,7 @@
client = zuul.rpcclient.RPCClient('127.0.0.1',
self.gearman_server.port)
+ self.addCleanup(client.shutdown)
r = client.promote(tenant='tenant-one',
pipeline='gate',
change_ids=['3,1'])
@@ -3375,7 +3144,6 @@
self.assertEqual(C.data['status'], 'MERGED')
self.assertEqual(C.reported, 2)
- client.shutdown()
self.assertEqual(r, True)
def test_client_promote_negative(self):
@@ -3388,29 +3156,27 @@
client = zuul.rpcclient.RPCClient('127.0.0.1',
self.gearman_server.port)
+ self.addCleanup(client.shutdown)
with testtools.ExpectedException(zuul.rpcclient.RPCFailure):
r = client.promote(tenant='tenant-one',
pipeline='nonexistent',
change_ids=['2,1', '3,1'])
- client.shutdown()
self.assertEqual(r, False)
with testtools.ExpectedException(zuul.rpcclient.RPCFailure):
r = client.promote(tenant='tenant-one',
pipeline='gate',
change_ids=['4,1'])
- client.shutdown()
self.assertEqual(r, False)
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
+ @simple_layout('layouts/rate-limit.yaml')
def test_queue_rate_limiting(self):
"Test that DependentPipelines are rate limited with dep across window"
- self.updateConfigLayout('layout-rate-limit')
- self.sched.reconfigure(self.config)
self.executor_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
@@ -3434,9 +3200,10 @@
self.assertEqual(self.builds[0].name, 'project-merge')
self.assertEqual(self.builds[1].name, 'project-merge')
- self.executor_server.release('.*-merge')
+ # Release the merge jobs one at a time.
+ self.builds[0].release()
self.waitUntilSettled()
- self.executor_server.release('.*-merge')
+ self.builds[0].release()
self.waitUntilSettled()
# Only A and B will have their test jobs queued because
@@ -3450,7 +3217,7 @@
self.executor_server.release('project-.*')
self.waitUntilSettled()
- tenant = self.sched.abide.tenants.get('openstack')
+ tenant = self.sched.abide.tenants.get('tenant-one')
queue = tenant.layout.pipelines['gate'].queues[0]
# A failed so window is reduced by 1 to 1.
self.assertEqual(queue.window, 1)
@@ -3498,10 +3265,9 @@
self.assertEqual(queue.window_floor, 1)
self.assertEqual(C.data['status'], 'MERGED')
+ @simple_layout('layouts/rate-limit.yaml')
def test_queue_rate_limiting_dependent(self):
"Test that DependentPipelines are rate limited with dep in window"
- self.updateConfigLayout('layout-rate-limit')
- self.sched.reconfigure(self.config)
self.executor_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
@@ -3526,10 +3292,7 @@
self.assertEqual(self.builds[0].name, 'project-merge')
self.assertEqual(self.builds[1].name, 'project-merge')
- self.executor_server.release('.*-merge')
- self.waitUntilSettled()
- self.executor_server.release('.*-merge')
- self.waitUntilSettled()
+ self.orderedRelease(2)
# Only A and B will have their test jobs queued because
# window is 2.
@@ -3542,7 +3305,7 @@
self.executor_server.release('project-.*')
self.waitUntilSettled()
- tenant = self.sched.abide.tenants.get('openstack')
+ tenant = self.sched.abide.tenants.get('tenant-one')
queue = tenant.layout.pipelines['gate'].queues[0]
# A failed so window is reduced by 1 to 1.
self.assertEqual(queue.window, 1)
@@ -3555,8 +3318,7 @@
self.assertEqual(len(self.builds), 1)
self.assertEqual(self.builds[0].name, 'project-merge')
- self.executor_server.release('.*-merge')
- self.waitUntilSettled()
+ self.orderedRelease(1)
# Only C's test jobs are queued because window is still 1.
self.assertEqual(len(self.builds), 2)
@@ -3591,7 +3353,7 @@
if time.time() - start > 10:
raise Exception("Timeout waiting for gearman server to report "
+ "back to the client")
- build = self.executor.builds.values()[0]
+ build = list(self.executor.builds.values())[0]
if build.worker.name == "My Worker":
break
else:
@@ -3610,11 +3372,9 @@
self.executor_server.release()
self.waitUntilSettled()
+ @simple_layout('layouts/footer-message.yaml')
def test_footer_message(self):
"Test a pipeline's footer message is correctly added to the report."
- self.updateConfigLayout('layout-footer-message')
- self.sched.reconfigure(self.config)
-
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
A.addApproval('code-review', 2)
self.executor_server.failJob('project-test1', A)
@@ -3640,6 +3400,16 @@
self.assertFalse(self.smtp_messages[1]['body'].startswith(failure_msg))
self.assertTrue(self.smtp_messages[1]['body'].endswith(footer_msg))
+ @simple_layout('layouts/unmanaged-project.yaml')
+ def test_unmanaged_project_start_message(self):
+ "Test start reporting is not done for unmanaged projects."
+ self.init_repo("org/project", tag='init')
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.assertEqual(0, len(A.messages))
+
@skip("Disabled for early v3 development")
def test_merge_failure_reporters(self):
"""Check that the config is set up correctly"""
@@ -3758,6 +3528,7 @@
client = zuul.rpcclient.RPCClient('127.0.0.1',
self.gearman_server.port)
+ self.addCleanup(client.shutdown)
# Wait for gearman server to send the initial workData back to zuul
start = time.time()
@@ -3765,7 +3536,7 @@
if time.time() - start > 10:
raise Exception("Timeout waiting for gearman server to report "
+ "back to the client")
- build = self.executor_client.builds.values()[0]
+ build = list(self.executor_client.builds.values())[0]
if build.worker.name == "My Worker":
break
else:
@@ -3778,9 +3549,9 @@
self.assertEqual([], running_item['failing_reasons'])
self.assertEqual([], running_item['items_behind'])
self.assertEqual('https://hostname/1', running_item['url'])
- self.assertEqual(None, running_item['item_ahead'])
+ self.assertIsNone(running_item['item_ahead'])
self.assertEqual('org/project', running_item['project'])
- self.assertEqual(None, running_item['remaining_time'])
+ self.assertIsNone(running_item['remaining_time'])
self.assertEqual(True, running_item['active'])
self.assertEqual('1,1', running_item['id'])
@@ -3795,7 +3566,7 @@
self.assertEqual(7, len(job['worker']))
self.assertEqual(False, job['canceled'])
self.assertEqual(True, job['voting'])
- self.assertEqual(None, job['result'])
+ self.assertIsNone(job['result'])
self.assertEqual('gate', job['pipeline'])
break
@@ -3806,6 +3577,7 @@
running_items = client.get_running_jobs()
self.assertEqual(0, len(running_items))
+ @simple_layout('layouts/nonvoting-pipeline.yaml')
def test_nonvoting_pipeline(self):
"Test that a nonvoting pipeline (experimental) can still report"
@@ -4072,7 +3844,7 @@
def test_crd_gate_unknown(self):
"Test unknown projects in dependent pipeline"
- self.init_repo("org/unknown")
+ self.init_repo("org/unknown", tag='init')
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/unknown', 'master', 'B')
A.addApproval('code-review', 2)
@@ -4127,8 +3899,6 @@
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
- queue = self.gearman_server.getQueue()
- ref = self.getParameter(queue[-1], 'ZUUL_REF')
self.gearman_server.hold_jobs_in_queue = False
self.gearman_server.release()
self.waitUntilSettled()
@@ -4136,21 +3906,7 @@
self.executor_server.release('.*-merge')
self.waitUntilSettled()
- path = os.path.join(self.builds[0].jobdir.src_root, "org/project1")
- repo = git.Repo(path)
- repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
- repo_messages.reverse()
- correct_messages = [
- 'initial commit', 'add content from fixture', 'A-1']
- self.assertEqual(repo_messages, correct_messages)
-
- path = os.path.join(self.builds[0].jobdir.src_root, "org/project2")
- repo = git.Repo(path)
- repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
- repo_messages.reverse()
- correct_messages = [
- 'initial commit', 'add content from fixture', 'B-1']
- self.assertEqual(repo_messages, correct_messages)
+ self.assertTrue(self.builds[0].hasChanges(A, B))
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
@@ -4286,13 +4042,12 @@
independent pipelines"""
# It's a hack for fake gerrit,
# as it implies repo creation upon the creation of any change
- self.init_repo("org/unknown")
+ self.init_repo("org/unknown", tag='init')
self._test_crd_check_reconfiguration('org/project1', 'org/unknown')
+ @simple_layout('layouts/ignore-dependencies.yaml')
def test_crd_check_ignore_dependencies(self):
"Test cross-repo dependencies can be ignored"
- self.updateConfigLayout('layout-ignore-dependencies')
- self.sched.reconfigure(self.config)
self.gearman_server.hold_jobs_in_queue = True
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
@@ -4311,7 +4066,7 @@
# Make sure none of the items share a change queue, and all
# are live.
- tenant = self.sched.abide.tenants.get('openstack')
+ tenant = self.sched.abide.tenants.get('tenant-one')
check_pipeline = tenant.layout.pipelines['check']
self.assertEqual(len(check_pipeline.queues), 3)
self.assertEqual(len(check_pipeline.getAllItems()), 3)
@@ -4333,6 +4088,7 @@
for job in self.history:
self.assertEqual(len(job.changes.split()), 1)
+ @simple_layout('layouts/three-projects.yaml')
def test_crd_check_transitive(self):
"Test transitive cross-repo dependencies"
# Specifically, if A -> B -> C, and C gets a new patchset and
@@ -4375,7 +4131,7 @@
def test_crd_check_unknown(self):
"Test unknown projects in independent pipeline"
- self.init_repo("org/unknown")
+ self.init_repo("org/unknown", tag='init')
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/unknown', 'master', 'D')
# A Depends-On: B
@@ -4418,7 +4174,8 @@
# processing.
tenant = self.sched.abide.tenants.get('tenant-one')
- source = tenant.layout.pipelines['gate'].source
+ (trusted, project) = tenant.getProject('org/project')
+ source = project.source
# TODO(pabelanger): As we add more source / trigger APIs we should make
# it easier for users to create events for testing.
@@ -4441,13 +4198,11 @@
event.change_number = '2'
source.getChange(event, True)
+ @simple_layout('layouts/disable_at.yaml')
def test_disable_at(self):
"Test a pipeline will only report to the disabled trigger when failing"
- self.updateConfigLayout('layout-disabled-at')
- self.sched.reconfigure(self.config)
-
- tenant = self.sched.abide.tenants.get('openstack')
+ tenant = self.sched.abide.tenants.get('tenant-one')
self.assertEqual(3, tenant.layout.pipelines['check'].disable_at)
self.assertEqual(
0, tenant.layout.pipelines['check']._consecutive_failures)
@@ -4545,7 +4300,7 @@
# comes out of disabled
self.sched.reconfigure(self.config)
- tenant = self.sched.abide.tenants.get('openstack')
+ tenant = self.sched.abide.tenants.get('tenant-one')
self.assertEqual(3, tenant.layout.pipelines['check'].disable_at)
self.assertEqual(
@@ -4568,6 +4323,24 @@
# No more messages reported via smtp
self.assertEqual(3, len(self.smtp_messages))
+ @simple_layout('layouts/one-job-project.yaml')
+ def test_one_job_project(self):
+ "Test that queueing works with one job"
+ A = self.fake_gerrit.addFakeChange('org/one-job-project',
+ 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/one-job-project',
+ 'master', 'B')
+ A.addApproval('code-review', 2)
+ B.addApproval('code-review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+ self.fake_gerrit.addEvent(B.addApproval('approved', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'MERGED')
+ self.assertEqual(A.reported, 2)
+ self.assertEqual(B.data['status'], 'MERGED')
+ self.assertEqual(B.reported, 2)
+
def test_rerun_on_abort(self):
"Test that if a execute server fails to run a job, it is run again"
@@ -4638,6 +4411,39 @@
self.assertIn('project-test2 : SKIPPED', A.messages[1])
+class TestExecutor(ZuulTestCase):
+ tenant_config_file = 'config/single-tenant/main.yaml'
+
+ def assertFinalState(self):
+ # In this test, we expect to shut down in a non-final state,
+ # so skip these checks.
+ pass
+
+ def assertCleanShutdown(self):
+ self.log.debug("Assert clean shutdown")
+
+ # After shutdown, make sure no jobs are running
+ self.assertEqual({}, self.executor_server.job_workers)
+
+ # Make sure that git.Repo objects have been garbage collected.
+ repos = []
+ gc.collect()
+ for obj in gc.get_objects():
+ if isinstance(obj, git.Repo):
+ self.log.debug("Leaked git repo object: %s" % repr(obj))
+ repos.append(obj)
+ self.assertEqual(len(repos), 0)
+
+ def test_executor_shutdown(self):
+ "Test that the executor can shut down with jobs running"
+
+ self.executor_server.hold_jobs_in_build = True
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ A.addApproval('code-review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+ self.waitUntilSettled()
+
+
class TestDependencyGraph(ZuulTestCase):
tenant_config_file = 'config/dependency-graph/main.yaml'
@@ -4784,27 +4590,6 @@
self.assertIn('project-test1', A.messages[0])
-class TestSchedulerOneJobProject(ZuulTestCase):
- tenant_config_file = 'config/one-job-project/main.yaml'
-
- def test_one_job_project(self):
- "Test that queueing works with one job"
- A = self.fake_gerrit.addFakeChange('org/one-job-project',
- 'master', 'A')
- B = self.fake_gerrit.addFakeChange('org/one-job-project',
- 'master', 'B')
- A.addApproval('code-review', 2)
- B.addApproval('code-review', 2)
- self.fake_gerrit.addEvent(A.addApproval('approved', 1))
- self.fake_gerrit.addEvent(B.addApproval('approved', 1))
- self.waitUntilSettled()
-
- self.assertEqual(A.data['status'], 'MERGED')
- self.assertEqual(A.reported, 2)
- self.assertEqual(B.data['status'], 'MERGED')
- self.assertEqual(B.reported, 2)
-
-
class TestSchedulerTemplatedProject(ZuulTestCase):
tenant_config_file = 'config/templated-project/main.yaml'
@@ -4901,7 +4686,8 @@
build = self.builds[-1]
ref = self.getParameter(build, 'ZUUL_REF')
- path = os.path.join(build.jobdir.src_root, project)
+ path = os.path.join(build.jobdir.src_root, 'review.example.com',
+ project)
repo = git.Repo(path)
repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
repo_messages.reverse()
@@ -4971,8 +4757,8 @@
build = self.builds[-1]
self.assertEqual(self.getParameter(build, 'ZUUL_BRANCH'), 'mp')
ref = self.getParameter(build, 'ZUUL_REF')
- path = os.path.join(
- build.jobdir.src_root, 'org/project-merge-branches')
+ path = os.path.join(build.jobdir.src_root, 'review.example.com',
+ 'org/project-merge-branches')
repo = git.Repo(path)
repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
@@ -5016,8 +4802,8 @@
self.log.debug("Got Zuul ref for change A: %s" % ref_A)
self.log.debug("Got Zuul commit for change A: %s" % commit_A)
- path = os.path.join(
- job_A.jobdir.src_root, "org/project-merge-branches")
+ path = os.path.join(job_A.jobdir.src_root, 'review.example.com',
+ 'org/project-merge-branches')
repo = git.Repo(path)
repo_messages = [c.message.strip()
for c in repo.iter_commits(ref_A)]
@@ -5038,8 +4824,8 @@
self.log.debug("Got Zuul ref for change B: %s" % ref_B)
self.log.debug("Got Zuul commit for change B: %s" % commit_B)
- path = os.path.join(
- job_B.jobdir.src_root, "org/project-merge-branches")
+ path = os.path.join(job_B.jobdir.src_root, 'review.example.com',
+ 'org/project-merge-branches')
repo = git.Repo(path)
repo_messages = [c.message.strip()
for c in repo.iter_commits(ref_B)]
@@ -5059,8 +4845,8 @@
commit_C = self.getParameter(job_C, 'ZUUL_COMMIT')
self.log.debug("Got Zuul ref for change C: %s" % ref_C)
self.log.debug("Got Zuul commit for change C: %s" % commit_C)
- path = os.path.join(
- job_C.jobdir.src_root, "org/project-merge-branches")
+ path = os.path.join(job_C.jobdir.src_root, 'review.example.com',
+ 'org/project-merge-branches')
repo = git.Repo(path)
repo_messages = [c.message.strip()
for c in repo.iter_commits(ref_C)]
@@ -5077,6 +4863,231 @@
self.waitUntilSettled()
+class TestSemaphore(ZuulTestCase):
+ tenant_config_file = 'config/semaphore/main.yaml'
+
+ def test_semaphore_one(self):
+ "Test semaphores with max=1 (mutex)"
+ tenant = self.sched.abide.tenants.get('tenant-one')
+
+ self.executor_server.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ self.assertFalse('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 3)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertEqual(self.builds[1].name, 'semaphore-one-test1')
+ self.assertEqual(self.builds[2].name, 'project-test1')
+
+ self.executor_server.release('semaphore-one-test1')
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 3)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertEqual(self.builds[1].name, 'project-test1')
+ self.assertEqual(self.builds[2].name, 'semaphore-one-test2')
+ self.assertTrue('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+ self.executor_server.release('semaphore-one-test2')
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 3)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertEqual(self.builds[1].name, 'project-test1')
+ self.assertEqual(self.builds[2].name, 'semaphore-one-test1')
+ self.assertTrue('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+ self.executor_server.release('semaphore-one-test1')
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 3)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertEqual(self.builds[1].name, 'project-test1')
+ self.assertEqual(self.builds[2].name, 'semaphore-one-test2')
+ self.assertTrue('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+ self.executor_server.release('semaphore-one-test2')
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 2)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertEqual(self.builds[1].name, 'project-test1')
+ self.assertFalse('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+
+ self.waitUntilSettled()
+ self.assertEqual(len(self.builds), 0)
+
+ self.assertEqual(A.reported, 1)
+ self.assertEqual(B.reported, 1)
+ self.assertFalse('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+ def test_semaphore_two(self):
+ "Test semaphores with max>1"
+ tenant = self.sched.abide.tenants.get('tenant-one')
+
+ self.executor_server.hold_jobs_in_build = True
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
+ self.assertFalse('test-semaphore-two' in
+ tenant.semaphore_handler.semaphores)
+
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 4)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertEqual(self.builds[1].name, 'semaphore-two-test1')
+ self.assertEqual(self.builds[2].name, 'semaphore-two-test2')
+ self.assertEqual(self.builds[3].name, 'project-test1')
+ self.assertTrue('test-semaphore-two' in
+ tenant.semaphore_handler.semaphores)
+ self.assertEqual(len(tenant.semaphore_handler.semaphores.get(
+ 'test-semaphore-two', [])), 2)
+
+ self.executor_server.release('semaphore-two-test1')
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 4)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertEqual(self.builds[1].name, 'semaphore-two-test2')
+ self.assertEqual(self.builds[2].name, 'project-test1')
+ self.assertEqual(self.builds[3].name, 'semaphore-two-test1')
+ self.assertTrue('test-semaphore-two' in
+ tenant.semaphore_handler.semaphores)
+ self.assertEqual(len(tenant.semaphore_handler.semaphores.get(
+ 'test-semaphore-two', [])), 2)
+
+ self.executor_server.release('semaphore-two-test2')
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 4)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertEqual(self.builds[1].name, 'project-test1')
+ self.assertEqual(self.builds[2].name, 'semaphore-two-test1')
+ self.assertEqual(self.builds[3].name, 'semaphore-two-test2')
+ self.assertTrue('test-semaphore-two' in
+ tenant.semaphore_handler.semaphores)
+ self.assertEqual(len(tenant.semaphore_handler.semaphores.get(
+ 'test-semaphore-two', [])), 2)
+
+ self.executor_server.release('semaphore-two-test1')
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 3)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertEqual(self.builds[1].name, 'project-test1')
+ self.assertEqual(self.builds[2].name, 'semaphore-two-test2')
+ self.assertTrue('test-semaphore-two' in
+ tenant.semaphore_handler.semaphores)
+ self.assertEqual(len(tenant.semaphore_handler.semaphores.get(
+ 'test-semaphore-two', [])), 1)
+
+ self.executor_server.release('semaphore-two-test2')
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 2)
+ self.assertEqual(self.builds[0].name, 'project-test1')
+ self.assertEqual(self.builds[1].name, 'project-test1')
+ self.assertFalse('test-semaphore-two' in
+ tenant.semaphore_handler.semaphores)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+
+ self.waitUntilSettled()
+ self.assertEqual(len(self.builds), 0)
+
+ self.assertEqual(A.reported, 1)
+ self.assertEqual(B.reported, 1)
+
+ def test_semaphore_abandon(self):
+ "Test abandon with job semaphores"
+ self.executor_server.hold_jobs_in_build = True
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ check_pipeline = tenant.layout.pipelines['check']
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.assertFalse('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.assertTrue('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+ self.fake_gerrit.addEvent(A.getChangeAbandonedEvent())
+ self.waitUntilSettled()
+
+ # The check pipeline should be empty
+ items = check_pipeline.getAllItems()
+ self.assertEqual(len(items), 0)
+
+ # The semaphore should be released
+ self.assertFalse('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ def test_semaphore_reconfigure(self):
+ "Test reconfigure with job semaphores"
+ self.executor_server.hold_jobs_in_build = True
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.assertFalse('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.assertTrue('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+ # reconfigure without layout change
+ self.sched.reconfigure(self.config)
+ self.waitUntilSettled()
+ tenant = self.sched.abide.tenants.get('tenant-one')
+
+ # semaphore still must be held
+ self.assertTrue('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+ self.commitConfigUpdate(
+ 'common-config',
+ 'config/semaphore/zuul-reconfiguration.yaml')
+ self.sched.reconfigure(self.config)
+ self.waitUntilSettled()
+ tenant = self.sched.abide.tenants.get('tenant-one')
+
+ self.executor_server.release('project-test1')
+ self.waitUntilSettled()
+
+ # There should be no builds anymore
+ self.assertEqual(len(self.builds), 0)
+
+ # The semaphore should be released
+ self.assertFalse('test-semaphore' in
+ tenant.semaphore_handler.semaphores)
+
+
class TestSemaphoreMultiTenant(ZuulTestCase):
tenant_config_file = 'config/multi-tenant-semaphore/main.yaml'
diff --git a/tests/unit/test_scheduler_cmd.py b/tests/unit/test_scheduler_cmd.py
new file mode 100644
index 0000000..ee6200f
--- /dev/null
+++ b/tests/unit/test_scheduler_cmd.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import testtools
+import zuul.cmd.scheduler
+
+from tests import base
+
+
+class TestSchedulerCmdArguments(testtools.TestCase):
+
+ def setUp(self):
+ super(TestSchedulerCmdArguments, self).setUp()
+ self.app = zuul.cmd.scheduler.Scheduler()
+
+ def test_test_config(self):
+ conf_path = os.path.join(base.FIXTURE_DIR, 'zuul.conf')
+ self.app.parse_arguments(['-t', '-c', conf_path])
+ self.assertTrue(self.app.args.validate)
+ self.app.read_config()
+ self.assertEqual(0, self.app.test_config())
diff --git a/tests/unit/test_ssh_agent.py b/tests/unit/test_ssh_agent.py
new file mode 100644
index 0000000..c9c1ebd
--- /dev/null
+++ b/tests/unit/test_ssh_agent.py
@@ -0,0 +1,56 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import subprocess
+
+from tests.base import ZuulTestCase
+from zuul.executor.server import SshAgent
+
+
+class TestSshAgent(ZuulTestCase):
+ tenant_config_file = 'config/single-tenant/main.yaml'
+
+ def test_ssh_agent(self):
+ # Need a private key to add
+ env_copy = dict(os.environ)
+ # DISPLAY and SSH_ASKPASS will cause interactive test runners to get a
+ # surprise
+ if 'DISPLAY' in env_copy:
+ del env_copy['DISPLAY']
+ if 'SSH_ASKPASS' in env_copy:
+ del env_copy['SSH_ASKPASS']
+
+ agent = SshAgent()
+ agent.start()
+ env_copy.update(agent.env)
+
+ pub_key_file = '{}.pub'.format(self.private_key_file)
+ pub_key = None
+ with open(pub_key_file) as pub_key_f:
+ pub_key = pub_key_f.read().split('== ')[0]
+
+ agent.add(self.private_key_file)
+ keys = agent.list()
+ self.assertEqual(1, len(keys))
+ self.assertEqual(keys[0].split('== ')[0], pub_key)
+ agent.remove(self.private_key_file)
+ keys = agent.list()
+ self.assertEqual([], keys)
+ agent.stop()
+ # Agent is now dead and thus this should fail
+ with open('/dev/null') as devnull:
+ self.assertRaises(subprocess.CalledProcessError,
+ subprocess.check_call,
+ ['ssh-add', self.private_key_file],
+ env=env_copy,
+ stderr=devnull)
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 678b957..18a49db 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -191,6 +191,61 @@
dict(name='project-test1', result='SUCCESS', changes='2,1'),
dict(name='project-test2', result='SUCCESS', changes='3,1')])
+ def test_crd_dynamic_config_branch(self):
+ # Test that we can create a job in one repo and be able to use
+ # it from a different branch on a different repo.
+
+ self.create_branch('org/project1', 'stable')
+
+ in_repo_conf = textwrap.dedent(
+ """
+ - job:
+ name: project-test2
+
+ - project:
+ name: org/project
+ check:
+ jobs:
+ - project-test2
+ """)
+
+ in_repo_playbook = textwrap.dedent(
+ """
+ - hosts: all
+ tasks: []
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf,
+ 'playbooks/project-test2.yaml': in_repo_playbook}
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+ files=file_dict)
+
+ second_repo_conf = textwrap.dedent(
+ """
+ - project:
+ name: org/project1
+ check:
+ jobs:
+ - project-test2
+ """)
+
+ second_file_dict = {'.zuul.yaml': second_repo_conf}
+ B = self.fake_gerrit.addFakeChange('org/project1', 'stable', 'B',
+ files=second_file_dict)
+ B.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ B.subject, A.data['id'])
+
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.reported, 1, "A should report")
+ self.assertHistory([
+ dict(name='project-test2', result='SUCCESS', changes='1,1'),
+ dict(name='project-test2', result='SUCCESS', changes='1,1 2,1'),
+ ])
+
def test_untrusted_syntax_error(self):
in_repo_conf = textwrap.dedent(
"""
@@ -207,9 +262,9 @@
self.waitUntilSettled()
self.assertEqual(A.data['status'], 'NEW')
- self.assertEqual(A.reported, 2,
- "A should report start and failure")
- self.assertIn('syntax error', A.messages[1],
+ self.assertEqual(A.reported, 1,
+ "A should report failure")
+ self.assertIn('syntax error', A.messages[0],
"A should have a syntax error reported")
def test_trusted_syntax_error(self):
@@ -228,9 +283,9 @@
self.waitUntilSettled()
self.assertEqual(A.data['status'], 'NEW')
- self.assertEqual(A.reported, 2,
- "A should report start and failure")
- self.assertIn('syntax error', A.messages[1],
+ self.assertEqual(A.reported, 1,
+ "A should report failure")
+ self.assertIn('syntax error', A.messages[0],
"A should have a syntax error reported")
def test_untrusted_yaml_error(self):
@@ -248,9 +303,29 @@
self.waitUntilSettled()
self.assertEqual(A.data['status'], 'NEW')
- self.assertEqual(A.reported, 2,
- "A should report start and failure")
- self.assertIn('syntax error', A.messages[1],
+ self.assertEqual(A.reported, 1,
+ "A should report failure")
+ self.assertIn('syntax error', A.messages[0],
+ "A should have a syntax error reported")
+
+ def test_untrusted_shadow_error(self):
+ in_repo_conf = textwrap.dedent(
+ """
+ - job:
+ name: common-config-test
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf}
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+ files=file_dict)
+ A.addApproval('code-review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(A.reported, 1,
+ "A should report failure")
+ self.assertIn('not permitted to shadow', A.messages[0],
"A should have a syntax error reported")
@@ -264,11 +339,13 @@
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
build = self.getJobFromHistory('timeout')
- self.assertEqual(build.result, 'ABORTED')
+ self.assertEqual(build.result, 'TIMED_OUT')
build = self.getJobFromHistory('faillocal')
self.assertEqual(build.result, 'FAILURE')
build = self.getJobFromHistory('check-vars')
self.assertEqual(build.result, 'SUCCESS')
+ build = self.getJobFromHistory('hello-world')
+ self.assertEqual(build.result, 'SUCCESS')
build = self.getJobFromHistory('python27')
self.assertEqual(build.result, 'SUCCESS')
flag_path = os.path.join(self.test_root, build.uuid + '.flag')
diff --git a/tests/unit/test_webapp.py b/tests/unit/test_webapp.py
index 8791a25..b2836ae 100644
--- a/tests/unit/test_webapp.py
+++ b/tests/unit/test_webapp.py
@@ -19,6 +19,7 @@
import json
from six.moves import urllib
+import webob
from tests.base import ZuulTestCase, FIXTURE_DIR
@@ -50,7 +51,7 @@
req = urllib.request.Request(
"http://localhost:%s/tenant-one/status" % self.port)
f = urllib.request.urlopen(req)
- data = json.loads(f.read())
+ data = json.loads(f.read().decode('utf8'))
self.assertIn('pipelines', data)
@@ -59,7 +60,7 @@
req = urllib.request.Request(
"http://localhost:%s/tenant-one/status.json" % self.port)
f = urllib.request.urlopen(req)
- data = json.loads(f.read())
+ data = json.loads(f.read().decode('utf8'))
self.assertIn('pipelines', data)
@@ -74,7 +75,7 @@
req = urllib.request.Request(
"http://localhost:%s/tenant-one/status/change/1,1" % self.port)
f = urllib.request.urlopen(req)
- data = json.loads(f.read())
+ data = json.loads(f.read().decode('utf8'))
self.assertEqual(1, len(data), data)
self.assertEqual("org/project", data[0]['project'])
@@ -82,13 +83,13 @@
req = urllib.request.Request(
"http://localhost:%s/tenant-one/status/change/2,1" % self.port)
f = urllib.request.urlopen(req)
- data = json.loads(f.read())
+ data = json.loads(f.read().decode('utf8'))
self.assertEqual(1, len(data), data)
self.assertEqual("org/project1", data[0]['project'], data)
def test_webapp_keys(self):
- with open(os.path.join(FIXTURE_DIR, 'public.pem')) as f:
+ with open(os.path.join(FIXTURE_DIR, 'public.pem'), 'rb') as f:
public_pem = f.read()
req = urllib.request.Request(
@@ -96,3 +97,16 @@
self.port)
f = urllib.request.urlopen(req)
self.assertEqual(f.read(), public_pem)
+
+ def test_webapp_custom_handler(self):
+ def custom_handler(path, tenant_name, request):
+ return webob.Response(body='ok')
+
+ self.webapp.register_path('/custom', custom_handler)
+ req = urllib.request.Request(
+ "http://localhost:%s/custom" % self.port)
+ f = urllib.request.urlopen(req)
+ self.assertEqual(b'ok', f.read())
+
+ self.webapp.unregister_path('/custom')
+ self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, req)
diff --git a/tools/test-setup.sh b/tools/test-setup.sh
index 3bdedf5..d3697c9 100755
--- a/tools/test-setup.sh
+++ b/tools/test-setup.sh
@@ -35,3 +35,9 @@
SET default_storage_engine=MYISAM;
DROP DATABASE IF EXISTS openstack_citest;
CREATE DATABASE openstack_citest CHARACTER SET utf8;"
+
+# TODO(pabelanger): Move this into bindep after we figure out how to enable our
+# PPA.
+sudo add-apt-repository ppa:openstack-ci-core/bubblewrap
+sudo apt-get update
+sudo apt-get --assume-yes install bubblewrap
diff --git a/tools/trigger-job.py b/tools/trigger-job.py
index 7123afc..dd69f1b 100755
--- a/tools/trigger-job.py
+++ b/tools/trigger-job.py
@@ -73,5 +73,6 @@
while not job.complete:
time.sleep(1)
+
if __name__ == '__main__':
main()
diff --git a/tools/update-storyboard.py b/tools/update-storyboard.py
index 12e6916..51434c9 100644
--- a/tools/update-storyboard.py
+++ b/tools/update-storyboard.py
@@ -96,5 +96,6 @@
if ok_lanes and not task_found:
add_task(sync, task, lanes[ok_lanes[0]])
+
if __name__ == '__main__':
main()
diff --git a/tox.ini b/tox.ini
index 1c81ae9..9b97eca 100644
--- a/tox.ini
+++ b/tox.ini
@@ -8,7 +8,7 @@
setenv = STATSD_HOST=127.0.0.1
STATSD_PORT=8125
VIRTUAL_ENV={envdir}
- OS_TEST_TIMEOUT=90
+ OS_TEST_TIMEOUT=120
passenv = ZUUL_TEST_ROOT OS_STDOUT_CAPTURE OS_STDERR_CAPTURE OS_LOG_CAPTURE OS_LOG_DEFAULTS
usedevelop = True
install_command = pip install {opts} {packages}
@@ -51,6 +51,6 @@
[flake8]
# These are ignored intentionally in openstack-infra projects;
# please don't submit patches that solely correct them or enable them.
-ignore = E125,E129,H
+ignore = E125,E129,E402,H,W503
show-source = True
exclude = .venv,.tox,dist,doc,build,*.egg
diff --git a/zuul/ansible/action/copy.py b/zuul/ansible/action/copy.py
index bb54430..d870c24 100644
--- a/zuul/ansible/action/copy.py
+++ b/zuul/ansible/action/copy.py
@@ -25,6 +25,6 @@
source = self._task.args.get('src', None)
remote_src = self._task.args.get('remote_src', False)
- if not remote_src and not paths._is_safe_path(source):
+ if not remote_src and source and not paths._is_safe_path(source):
return paths._fail_dict(source)
return super(ActionModule, self).run(tmp, task_vars)
diff --git a/zuul/ansible/callback/zuul_stream.py b/zuul/ansible/callback/zuul_stream.py
index e6b3461..904316c 100644
--- a/zuul/ansible/callback/zuul_stream.py
+++ b/zuul/ansible/callback/zuul_stream.py
@@ -24,14 +24,14 @@
def linesplit(socket):
- buff = socket.recv(4096)
+ buff = socket.recv(4096).decode("utf-8")
buffering = True
while buffering:
if "\n" in buff:
(line, buff) = buff.split("\n", 1)
yield line + "\n"
else:
- more = socket.recv(4096)
+ more = socket.recv(4096).decode("utf-8")
if not more:
buffering = False
else:
@@ -40,6 +40,32 @@
yield buff
+def zuul_filter_result(result):
+ """Remove keys from shell/command output.
+
+ Zuul streams stdout into the log above, so including stdout and stderr
+ in the result dict that ansible displays in the logs is duplicate
+ noise. We keep stdout in the result dict so that other callback plugins
+ like ARA could also have access to it. But drop them here.
+
+ Remove changed so that we don't show a bunch of "changed" titles
+ on successful shell tasks, since that doesn't make sense from a Zuul
+ POV. The super class treats missing "changed" key as False.
+
+ Remove cmd because most of the script content where people want to
+ see the script run is run with -x. It's possible we may want to revist
+ this to be smarter about when we remove it - like, only remove it
+ if it has an embedded newline - so that for normal 'simple' uses
+ of cmd it'll echo what the command was for folks.
+ """
+
+ for key in ('changed', 'cmd',
+ 'stderr', 'stderr_lines',
+ 'stdout', 'stdout_lines'):
+ result.pop(key, None)
+ return result
+
+
class CallbackModule(default.CallbackModule):
'''
@@ -103,3 +129,37 @@
target=self._read_log, args=(host, ip))
p.daemon = True
p.start()
+
+ def v2_runner_on_failed(self, result, ignore_errors=False):
+ if result._task.action in ('command', 'shell'):
+ zuul_filter_result(result._result)
+ super(CallbackModule, self).v2_runner_on_failed(
+ result, ignore_errors=ignore_errors)
+
+ def v2_runner_on_ok(self, result):
+ if result._task.action in ('command', 'shell'):
+ zuul_filter_result(result._result)
+ else:
+ return super(CallbackModule, self).v2_runner_on_ok(result)
+
+ if self._play.strategy == 'free':
+ return super(CallbackModule, self).v2_runner_on_ok(result)
+
+ delegated_vars = result._result.get('_ansible_delegated_vars', None)
+
+ if delegated_vars:
+ msg = "ok: [{host} -> {delegated_host} %s]".format(
+ host=result._host.get_name(),
+ delegated_host=delegated_vars['ansible_host'])
+ else:
+ msg = "ok: [{host}]".format(host=result._host.get_name())
+
+ if result._task.loop and 'results' in result._result:
+ self._process_items(result)
+ else:
+ msg += " Runtime: {delta} Start: {start} End: {end}".format(
+ **result._result)
+
+ self._handle_warnings(result._result)
+
+ self._display.display(msg)
diff --git a/zuul/ansible/library/command.py b/zuul/ansible/library/command.py
index 328ae7b..52de5a4 100644
--- a/zuul/ansible/library/command.py
+++ b/zuul/ansible/library/command.py
@@ -123,6 +123,8 @@
LOG_STREAM_FILE = '/tmp/console.log'
PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
+# List to save stdout log lines in as we collect them
+_log_lines = []
class Console(object):
@@ -150,6 +152,7 @@
line = fd.readline()
if not line:
break
+ _log_lines.append(line)
if not line.endswith('\n'):
line += '\n'
newline_warning = True
@@ -330,7 +333,8 @@
# cmd.stdout.close()
# ZUUL: stdout and stderr are in the console log file
- stdout = ''
+ # ZUUL: return the saved log lines so we can ship them back
+ stdout = ''.join(_log_lines)
stderr = ''
rc = cmd.returncode
diff --git a/zuul/ansible/library/zuul_afs.py b/zuul/ansible/library/zuul_afs.py
index 3ba426b..710c15d 100644
--- a/zuul/ansible/library/zuul_afs.py
+++ b/zuul/ansible/library/zuul_afs.py
@@ -116,6 +116,7 @@
module.exit_json(changed=True, build_roots=output)
from ansible.module_utils.basic import * # noqa
+from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
diff --git a/zuul/ansible/library/zuul_console.py b/zuul/ansible/library/zuul_console.py
index 1932cf9..b1dc2d9 100644
--- a/zuul/ansible/library/zuul_console.py
+++ b/zuul/ansible/library/zuul_console.py
@@ -17,8 +17,10 @@
import os
import sys
+import select
import socket
import threading
+import time
LOG_STREAM_FILE = '/tmp/console.log'
LOG_STREAM_PORT = 19885
@@ -181,6 +183,7 @@
s.run()
from ansible.module_utils.basic import * # noqa
+from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
diff --git a/zuul/change_matcher.py b/zuul/change_matcher.py
index 1da1d2c..baea217 100644
--- a/zuul/change_matcher.py
+++ b/zuul/change_matcher.py
@@ -108,7 +108,9 @@
yield self.commit_regex
def matches(self, change):
- if not (hasattr(change, 'files') and len(change.files) > 1):
+ if not (hasattr(change, 'files') and change.files):
+ return False
+ if len(change.files) == 1 and self.commit_regex.match(change.files[0]):
return False
for file_ in change.files:
matched_file = False
diff --git a/zuul/cmd/__init__.py b/zuul/cmd/__init__.py
old mode 100644
new mode 100755
index f2a2612..d31c5b8
--- a/zuul/cmd/__init__.py
+++ b/zuul/cmd/__init__.py
@@ -98,6 +98,6 @@
else:
logging.basicConfig(level=logging.DEBUG)
- def configure_connections(self):
+ def configure_connections(self, source_only=False):
self.connections = zuul.lib.connections.ConnectionRegistry()
- self.connections.configure(self.config)
+ self.connections.configure(self.config, source_only)
diff --git a/zuul/cmd/executor.py b/zuul/cmd/executor.py
old mode 100644
new mode 100755
index 96ba4b3..931639f
--- a/zuul/cmd/executor.py
+++ b/zuul/cmd/executor.py
@@ -24,9 +24,11 @@
import logging
import os
+import pwd
import socket
import sys
import signal
+import tempfile
import zuul.cmd
import zuul.executor.server
@@ -37,6 +39,9 @@
# Similar situation with gear and statsd.
+DEFAULT_FINGER_PORT = 79
+
+
class Executor(zuul.cmd.ZuulApp):
def parse_arguments(self):
@@ -72,15 +77,67 @@
self.executor.stop()
self.executor.join()
+ def start_log_streamer(self):
+ pipe_read, pipe_write = os.pipe()
+ child_pid = os.fork()
+ if child_pid == 0:
+ os.close(pipe_write)
+ import zuul.lib.log_streamer
+
+ self.log.info("Starting log streamer")
+ streamer = zuul.lib.log_streamer.LogStreamer(
+ self.user, '0.0.0.0', self.finger_port, self.jobroot_dir)
+
+ # Keep running until the parent dies:
+ pipe_read = os.fdopen(pipe_read)
+ pipe_read.read()
+ self.log.info("Stopping log streamer")
+ streamer.stop()
+ os._exit(0)
+ else:
+ os.close(pipe_read)
+ self.log_streamer_pid = child_pid
+
+ def change_privs(self):
+ '''
+ Drop our privileges to the zuul user.
+ '''
+ if os.getuid() != 0:
+ return
+ pw = pwd.getpwnam(self.user)
+ os.setgroups([])
+ os.setgid(pw.pw_gid)
+ os.setuid(pw.pw_uid)
+ os.umask(0o022)
+
def main(self, daemon=True):
# See comment at top of file about zuul imports
- self.setup_logging('executor', 'log_config')
+ if self.config.has_option('executor', 'user'):
+ self.user = self.config.get('executor', 'user')
+ else:
+ self.user = 'zuul'
+ if self.config.has_option('zuul', 'jobroot_dir'):
+ self.jobroot_dir = os.path.expanduser(
+ self.config.get('zuul', 'jobroot_dir'))
+ else:
+ self.jobroot_dir = tempfile.gettempdir()
+
+ self.setup_logging('executor', 'log_config')
self.log = logging.getLogger("zuul.Executor")
+ if self.config.has_option('executor', 'finger_port'):
+ self.finger_port = int(self.config.get('executor', 'finger_port'))
+ else:
+ self.finger_port = DEFAULT_FINGER_PORT
+
+ self.start_log_streamer()
+ self.change_privs()
+
ExecutorServer = zuul.executor.server.ExecutorServer
self.executor = ExecutorServer(self.config, self.connections,
+ jobdir_root=self.jobroot_dir,
keep_jobdir=self.args.keep_jobdir)
self.executor.start()
@@ -106,7 +163,7 @@
server.send_command(server.args.command)
sys.exit(0)
- server.configure_connections()
+ server.configure_connections(source_only=True)
if server.config.has_option('executor', 'pidfile'):
pid_fn = os.path.expanduser(server.config.get('executor', 'pidfile'))
diff --git a/zuul/cmd/merger.py b/zuul/cmd/merger.py
old mode 100644
new mode 100755
index 797a990..686f34a
--- a/zuul/cmd/merger.py
+++ b/zuul/cmd/merger.py
@@ -77,7 +77,7 @@
server.parse_arguments()
server.read_config()
- server.configure_connections()
+ server.configure_connections(source_only=True)
if server.config.has_option('zuul', 'state_dir'):
state_dir = os.path.expanduser(server.config.get('zuul', 'state_dir'))
diff --git a/zuul/cmd/scheduler.py b/zuul/cmd/scheduler.py
index ff4e1f4..5328bba 100755
--- a/zuul/cmd/scheduler.py
+++ b/zuul/cmd/scheduler.py
@@ -40,7 +40,7 @@
super(Scheduler, self).__init__()
self.gear_server_pid = None
- def parse_arguments(self):
+ def parse_arguments(self, args=None):
parser = argparse.ArgumentParser(description='Project gating system.')
parser.add_argument('-c', dest='config',
help='specify the config file')
@@ -52,7 +52,7 @@
parser.add_argument('--version', dest='version', action='version',
version=self._get_version(),
help='show zuul version')
- self.args = parser.parse_args()
+ self.args = parser.parse_args(args)
def reconfigure_handler(self, signum, frame):
signal.signal(signal.SIGHUP, signal.SIG_IGN)
@@ -182,7 +182,7 @@
self.log.info('Starting scheduler')
try:
self.sched.start()
- self.sched.registerConnections(self.connections)
+ self.sched.registerConnections(self.connections, webapp)
self.sched.reconfigure(self.config)
self.sched.resume()
except Exception:
diff --git a/zuul/configloader.py b/zuul/configloader.py
index a8d643b..3438815 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -10,6 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
+import base64
from contextlib import contextmanager
import copy
import os
@@ -46,6 +47,27 @@
pass
+class NodeFromGroupNotFoundError(Exception):
+ def __init__(self, nodeset, node, group):
+ message = textwrap.dedent("""\
+ In nodeset {nodeset} the group {group} contains a
+ node named {node} which is not defined in the nodeset.""")
+ message = textwrap.fill(message.format(nodeset=nodeset,
+ node=node, group=group))
+ super(NodeFromGroupNotFoundError, self).__init__(message)
+
+
+class ProjectNotFoundError(Exception):
+ def __init__(self, project):
+ message = textwrap.dedent("""\
+ The project {project} was not found. All projects
+ referenced within a Zuul configuration must first be
+ added to the main configuration file by the Zuul
+ administrator.""")
+ message = textwrap.fill(message.format(project=project))
+ super(ProjectNotFoundError, self).__init__(message)
+
+
def indent(s):
return '\n'.join([' ' + x for x in s.split('\n')])
@@ -54,7 +76,9 @@
def configuration_exceptions(stanza, conf):
try:
yield
- except vs.Invalid as e:
+ except ConfigurationSyntaxError:
+ raise
+ except Exception as e:
conf = copy.deepcopy(conf)
context = conf.pop('_source_context')
start_mark = conf.pop('_start_mark')
@@ -98,7 +122,7 @@
r = super(ZuulSafeLoader, self).construct_mapping(node, deep)
keys = frozenset(r.keys())
if len(keys) == 1 and keys.intersection(self.zuul_node_types):
- d = r.values()[0]
+ d = list(r.values())[0]
if isinstance(d, dict):
d['_start_mark'] = node.start_mark
d['_source_context'] = self.zuul_context
@@ -129,7 +153,7 @@
yaml_loader = yaml.SafeLoader
def __init__(self, ciphertext):
- self.ciphertext = ciphertext.decode('base64')
+ self.ciphertext = base64.b64decode(ciphertext)
def __ne__(self, other):
return not self.__eq__(other)
@@ -144,7 +168,8 @@
return cls(node.value)
def decrypt(self, private_key):
- return encryption.decrypt_pkcs1_oaep(self.ciphertext, private_key)
+ return encryption.decrypt_pkcs1_oaep(self.ciphertext,
+ private_key).decode('utf8')
class NodeSetParser(object):
@@ -154,8 +179,13 @@
vs.Required('image'): str,
}
+ group = {vs.Required('name'): str,
+ vs.Required('nodes'): [str]
+ }
+
nodeset = {vs.Required('name'): str,
vs.Required('nodes'): [node],
+ 'groups': [group],
'_source_context': model.SourceContext,
'_start_mark': yaml.Mark,
}
@@ -167,9 +197,18 @@
with configuration_exceptions('nodeset', conf):
NodeSetParser.getSchema()(conf)
ns = model.NodeSet(conf['name'])
+ node_names = []
for conf_node in as_list(conf['nodes']):
node = model.Node(conf_node['name'], conf_node['image'])
ns.addNode(node)
+ node_names.append(conf_node['name'])
+ for conf_group in as_list(conf.get('groups', [])):
+ for node_name in conf_group['nodes']:
+ if node_name not in node_names:
+ raise NodeFromGroupNotFoundError(conf['name'], node_name,
+ conf_group['name'])
+ group = model.Group(conf_group['name'], conf_group['nodes'])
+ ns.addGroup(group)
return ns
@@ -214,9 +253,11 @@
role = vs.Any(zuul_role, galaxy_role)
+ job_project = {vs.Required('name'): str,
+ 'override-branch': str}
+
job = {vs.Required('name'): str,
'parent': str,
- 'queue-name': str,
'failure-message': str,
'success-message': str,
'failure-url': str,
@@ -227,7 +268,7 @@
'tags': to_list(str),
'branches': to_list(str),
'files': to_list(str),
- 'auth': to_list(auth),
+ 'auth': auth,
'irrelevant-files': to_list(str),
'nodes': vs.Any([node], str),
'timeout': int,
@@ -238,10 +279,11 @@
'_source_context': model.SourceContext,
'_start_mark': yaml.Mark,
'roles': to_list(role),
- 'repos': to_list(str),
+ 'required-projects': to_list(vs.Any(job_project, str)),
'vars': dict,
'dependencies': to_list(str),
'allowed-projects': to_list(str),
+ 'override-branch': str,
}
return vs.Schema(job)
@@ -257,10 +299,33 @@
'success-message',
'failure-url',
'success-url',
+ 'override-branch',
]
@staticmethod
- def fromYaml(tenant, layout, conf):
+ def _getImpliedBranches(reference, job, project_pipeline):
+ # If the current job definition is not in the same branch as
+ # the reference definition of this job, and this is a project
+ # repo, add an implicit branch matcher for this branch
+ # (assuming there are no explicit branch matchers). But only
+ # for top-level job definitions and variants.
+ # Project-pipeline job variants should more closely attach to
+ # their branch if they appear in a project-repo.
+ if (reference and
+ reference.source_context and
+ reference.source_context.branch != job.source_context.branch):
+ same_context = False
+ else:
+ same_context = True
+
+ if (job.source_context and
+ (not job.source_context.trusted) and
+ ((not same_context) or project_pipeline)):
+ return [job.source_context.branch]
+ return None
+
+ @staticmethod
+ def fromYaml(tenant, layout, conf, project_pipeline=False):
with configuration_exceptions('job', conf):
JobParser.getSchema()(conf)
@@ -269,6 +334,8 @@
# them (e.g., "job.run = ..." rather than
# "job.run.append(...)").
+ reference = layout.jobs.get(conf['name'], [None])[0]
+
job = model.Job(conf['name'])
job.source_context = conf.get('_source_context')
if 'auth' in conf:
@@ -305,9 +372,10 @@
run = model.PlaybookContext(job.source_context, run_name)
job.run = (run,)
else:
- run_name = os.path.join('playbooks', job.name)
- run = model.PlaybookContext(job.source_context, run_name)
- job.implied_run = (run,) + job.implied_run
+ if not project_pipeline:
+ run_name = os.path.join('playbooks', job.name)
+ run = model.PlaybookContext(job.source_context, run_name)
+ job.implied_run = (run,) + job.implied_run
for k in JobParser.simple_attributes:
a = k.replace('-', '_')
@@ -325,10 +393,23 @@
ns.addNode(node)
job.nodeset = ns
- if 'repos' in conf:
- # Accumulate repos in a set so that job inheritance
- # is additive.
- job.repos = job.repos.union(set(conf.get('repos', [])))
+ if 'required-projects' in conf:
+ new_projects = {}
+ projects = as_list(conf.get('required-projects', []))
+ for project in projects:
+ if isinstance(project, dict):
+ project_name = project['name']
+ project_override_branch = project.get('override-branch')
+ else:
+ project_name = project
+ project_override_branch = None
+ (trusted, project) = tenant.getProject(project_name)
+ if project is None:
+ raise Exception("Unknown project %s" % (project_name,))
+ job_project = model.JobProject(project_name,
+ project_override_branch)
+ new_projects[project_name] = job_project
+ job.updateProjects(new_projects)
tags = conf.get('tags')
if tags:
@@ -339,13 +420,14 @@
job.dependencies = frozenset(as_list(conf.get('dependencies')))
- roles = []
- for role in conf.get('roles', []):
- if 'zuul' in role:
- r = JobParser._makeZuulRole(tenant, job, role)
- if r:
- roles.append(r)
- job.roles = job.roles.union(set(roles))
+ if 'roles' in conf:
+ roles = []
+ for role in conf.get('roles', []):
+ if 'zuul' in role:
+ r = JobParser._makeZuulRole(tenant, job, role)
+ if r:
+ roles.append(r)
+ job.roles = job.roles.union(set(roles))
variables = conf.get('vars', None)
if variables:
@@ -355,23 +437,26 @@
if allowed_projects:
allowed = []
for p in as_list(allowed_projects):
- # TODOv3(jeblair): this limits allowed_projects to the same
- # source; we should remove that limitation.
- source = job.source_context.project.connection_name
- (trusted, project) = tenant.getRepo(source, p)
+ (trusted, project) = tenant.getProject(p)
if project is None:
raise Exception("Unknown project %s" % (p,))
allowed.append(project.name)
job.allowed_projects = frozenset(allowed)
- # If the definition for this job came from a project repo,
- # implicitly apply a branch matcher for the branch it was on.
- if (not job.source_context.trusted):
- branches = [job.source_context.branch]
- elif 'branches' in conf:
+ # If the current job definition is not in the same branch as
+ # the reference definition of this job, and this is a project
+ # repo, add an implicit branch matcher for this branch
+ # (assuming there are no explicit branch matchers). But only
+ # for top-level job definitions and variants.
+ # Project-pipeline job variants should more closely attach to
+ # their branch if they appear in a project-repo.
+
+ branches = None
+ if (project_pipeline or 'branches' not in conf):
+ branches = JobParser._getImpliedBranches(
+ reference, job, project_pipeline)
+ if (not branches) and ('branches' in conf):
branches = as_list(conf['branches'])
- else:
- branches = None
if branches:
matchers = []
for branch in branches:
@@ -394,15 +479,13 @@
def _makeZuulRole(tenant, job, role):
name = role['zuul'].split('/')[-1]
- # TODOv3(jeblair): this limits roles to the same
- # source; we should remove that limitation.
- source = job.source_context.project.connection_name
- (trusted, project) = tenant.getRepo(source, role['zuul'])
+ (trusted, project) = tenant.getProject(role['zuul'])
if project is None:
return None
- return model.ZuulRole(role.get('name', name), source,
- project.name, trusted)
+ return model.ZuulRole(role.get('name', name),
+ project.connection_name,
+ project.name)
class ProjectTemplateParser(object):
@@ -450,23 +533,22 @@
start_mark, job_list):
for conf_job in conf:
if isinstance(conf_job, six.string_types):
- job = model.Job(conf_job)
- job_list.addJob(job)
+ attrs = dict(name=conf_job)
elif isinstance(conf_job, dict):
# A dictionary in a job tree may override params
- jobname, attrs = conf_job.items()[0]
+ jobname, attrs = list(conf_job.items())[0]
if attrs:
# We are overriding params, so make a new job def
attrs['name'] = jobname
- attrs['_source_context'] = source_context
- attrs['_start_mark'] = start_mark
- job_list.addJob(JobParser.fromYaml(tenant, layout, attrs))
else:
# Not overriding, so add a blank job
- job = model.Job(jobname)
- job_list.addJob(job)
+ attrs = dict(name=jobname)
else:
raise Exception("Job must be a string or dictionary")
+ attrs['_source_context'] = source_context
+ attrs['_start_mark'] = start_mark
+ job_list.addJob(JobParser.fromYaml(tenant, layout, attrs,
+ project_pipeline=True))
class ProjectParser(object):
@@ -479,6 +561,7 @@
'templates': [str],
'merge-mode': vs.Any('merge', 'merge-resolve',
'cherry-pick'),
+ 'default-branch': str,
'_source_context': model.SourceContext,
'_start_mark': yaml.Mark,
}
@@ -493,7 +576,13 @@
for conf in conf_list:
with configuration_exceptions('project', conf):
ProjectParser.getSchema(layout)(conf)
- project = model.ProjectConfig(conf_list[0]['name'])
+
+ with configuration_exceptions('project', conf_list[0]):
+ project_name = conf_list[0]['name']
+ (trusted, project) = tenant.getProject(project_name)
+ if project is None:
+ raise ProjectNotFoundError(project_name)
+ project_config = model.ProjectConfig(project.canonical_name)
configs = []
for conf in conf_list:
@@ -508,15 +597,20 @@
configs.extend([layout.project_templates[name]
for name in conf_templates])
configs.append(project_template)
+ # Set the following values to the first one that we find and
+ # ignore subsequent settings.
mode = conf.get('merge-mode')
- if mode and project.merge_mode is None:
- # Set the merge mode to the first one that we find and
- # ignore subsequent settings.
- project.merge_mode = model.MERGER_MAP[mode]
- if project.merge_mode is None:
+ if mode and project_config.merge_mode is None:
+ project_config.merge_mode = model.MERGER_MAP[mode]
+ default_branch = conf.get('default-branch')
+ if default_branch and project_config.default_branch is None:
+ project_config.default_branch = default_branch
+ if project_config.merge_mode is None:
# If merge mode was not specified in any project stanza,
# set it to the default.
- project.merge_mode = model.MERGER_MAP['merge-resolve']
+ project_config.merge_mode = model.MERGER_MAP['merge-resolve']
+ if project_config.default_branch is None:
+ project_config.default_branch = 'master'
for pipeline in layout.pipelines.values():
project_pipeline = model.ProjectPipelineConfig()
queue_name = None
@@ -537,9 +631,8 @@
if queue_name:
project_pipeline.queue_name = queue_name
if pipeline_defined:
- project.pipelines[pipeline.name] = project_pipeline
-
- return project
+ project_config.pipelines[pipeline.name] = project_pipeline
+ return project_config
class PipelineParser(object):
@@ -559,6 +652,8 @@
methods = {
'trigger': 'getTriggerSchema',
'reporter': 'getReporterSchema',
+ 'require': 'getRequireSchema',
+ 'reject': 'getRejectSchema',
}
schema = {}
@@ -598,7 +693,6 @@
pipeline = {vs.Required('name'): str,
vs.Required('manager'): manager,
- 'source': str,
'precedence': precedence,
'description': str,
'require': require,
@@ -621,6 +715,10 @@
'_source_context': model.SourceContext,
'_start_mark': yaml.Mark,
}
+ pipeline['require'] = PipelineParser.getDriverSchema('require',
+ connections)
+ pipeline['reject'] = PipelineParser.getDriverSchema('reject',
+ connections)
pipeline['trigger'] = vs.Required(
PipelineParser.getDriverSchema('trigger', connections))
for action in ['start', 'success', 'failure', 'merge-failure',
@@ -636,8 +734,6 @@
pipeline = model.Pipeline(conf['name'], layout)
pipeline.description = conf.get('description')
- pipeline.source = connections.getSource(conf['source'])
-
precedence = model.PRECEDENCE_MAP[conf.get('precedence')]
pipeline.precedence = precedence
pipeline.failure_message = conf.get('failure-message',
@@ -699,24 +795,21 @@
pipeline.setManager(manager)
layout.pipelines[conf['name']] = pipeline
- if 'require' in conf or 'reject' in conf:
- require = conf.get('require', {})
- reject = conf.get('reject', {})
- f = model.ChangeishFilter(
- open=require.get('open'),
- current_patchset=require.get('current-patchset'),
- statuses=as_list(require.get('status')),
- required_approvals=as_list(require.get('approval')),
- reject_approvals=as_list(reject.get('approval'))
- )
- manager.changeish_filters.append(f)
+ for source_name, require_config in conf.get('require', {}).items():
+ source = connections.getSource(source_name)
+ manager.changeish_filters.extend(
+ source.getRequireFilters(require_config))
+
+ for source_name, reject_config in conf.get('reject', {}).items():
+ source = connections.getSource(source_name)
+ manager.changeish_filters.extend(
+ source.getRejectFilters(reject_config))
for trigger_name, trigger_config in conf.get('trigger').items():
trigger = connections.getTrigger(trigger_name, trigger_config)
pipeline.triggers.append(trigger)
-
- manager.event_filters += trigger.getEventFilters(
- conf['trigger'][trigger_name])
+ manager.event_filters.extend(
+ trigger.getEventFilters(conf['trigger'][trigger_name]))
return pipeline
@@ -743,8 +836,8 @@
class TenantParser(object):
log = logging.getLogger("zuul.TenantParser")
- tenant_source = vs.Schema({'config-repos': [str],
- 'project-repos': [str]})
+ tenant_source = vs.Schema({'config-projects': [str],
+ 'untrusted-projects': [str]})
@staticmethod
def validateTenantSources(connections):
@@ -774,25 +867,24 @@
tenant = model.Tenant(conf['name'])
tenant.unparsed_config = conf
unparsed_config = model.UnparsedTenantConfig()
- tenant.config_repos, tenant.project_repos = \
- TenantParser._loadTenantConfigRepos(
+ config_projects, untrusted_projects = \
+ TenantParser._loadTenantProjects(
project_key_dir, connections, conf)
- for source, repo in tenant.config_repos:
- tenant.addConfigRepo(source, repo)
- for source, repo in tenant.project_repos:
- tenant.addProjectRepo(source, repo)
- tenant.config_repos_config, tenant.project_repos_config = \
+ for project in config_projects:
+ tenant.addConfigProject(project)
+ for project in untrusted_projects:
+ tenant.addUntrustedProject(project)
+ tenant.config_projects_config, tenant.untrusted_projects_config = \
TenantParser._loadTenantInRepoLayouts(merger, connections,
- tenant.config_repos,
- tenant.project_repos,
+ tenant.config_projects,
+ tenant.untrusted_projects,
cached)
- unparsed_config.extend(tenant.config_repos_config)
- unparsed_config.extend(tenant.project_repos_config)
+ unparsed_config.extend(tenant.config_projects_config)
+ unparsed_config.extend(tenant.untrusted_projects_config)
tenant.layout = TenantParser._parseLayout(base, tenant,
unparsed_config,
scheduler,
connections)
- tenant.layout.tenant = tenant
return tenant
@staticmethod
@@ -842,77 +934,79 @@
encryption.deserialize_rsa_keypair(f.read())
@staticmethod
- def _loadTenantConfigRepos(project_key_dir, connections, conf_tenant):
- config_repos = []
- project_repos = []
+ def _loadTenantProjects(project_key_dir, connections, conf_tenant):
+ config_projects = []
+ untrusted_projects = []
for source_name, conf_source in conf_tenant.get('source', {}).items():
source = connections.getSource(source_name)
- for conf_repo in conf_source.get('config-repos', []):
+ for conf_repo in conf_source.get('config-projects', []):
project = source.getProject(conf_repo)
TenantParser._loadProjectKeys(
project_key_dir, source_name, project)
- config_repos.append((source, project))
+ config_projects.append(project)
- for conf_repo in conf_source.get('project-repos', []):
+ for conf_repo in conf_source.get('untrusted-projects', []):
project = source.getProject(conf_repo)
TenantParser._loadProjectKeys(
project_key_dir, source_name, project)
- project_repos.append((source, project))
+ untrusted_projects.append(project)
- return config_repos, project_repos
+ return config_projects, untrusted_projects
@staticmethod
- def _loadTenantInRepoLayouts(merger, connections, config_repos,
- project_repos, cached):
- config_repos_config = model.UnparsedTenantConfig()
- project_repos_config = model.UnparsedTenantConfig()
+ def _loadTenantInRepoLayouts(merger, connections, config_projects,
+ untrusted_projects, cached):
+ config_projects_config = model.UnparsedTenantConfig()
+ untrusted_projects_config = model.UnparsedTenantConfig()
jobs = []
- for (source, project) in config_repos:
+ for project in config_projects:
# If we have cached data (this is a reconfiguration) use it.
if cached and project.unparsed_config:
TenantParser.log.info(
"Loading previously parsed configuration from %s" %
(project,))
- config_repos_config.extend(project.unparsed_config)
+ config_projects_config.extend(project.unparsed_config)
continue
# Otherwise, prepare an empty unparsed config object to
# hold cached data later.
project.unparsed_config = model.UnparsedTenantConfig()
# Get main config files. These files are permitted the
# full range of configuration.
- url = source.getGitUrl(project)
- job = merger.getFiles(project.name, url, 'master',
- files=['zuul.yaml', '.zuul.yaml'])
+ job = merger.getFiles(
+ project.source.connection.connection_name,
+ project.name, 'master',
+ files=['zuul.yaml', '.zuul.yaml'])
job.source_context = model.SourceContext(project, 'master',
'', True)
jobs.append(job)
- for (source, project) in project_repos:
+ for project in untrusted_projects:
# If we have cached data (this is a reconfiguration) use it.
if cached and project.unparsed_config:
TenantParser.log.info(
"Loading previously parsed configuration from %s" %
(project,))
- project_repos_config.extend(project.unparsed_config)
+ untrusted_projects_config.extend(project.unparsed_config)
continue
# Otherwise, prepare an empty unparsed config object to
# hold cached data later.
project.unparsed_config = model.UnparsedTenantConfig()
# Get in-project-repo config files which have a restricted
# set of options.
- url = source.getGitUrl(project)
# For each branch in the repo, get the zuul.yaml for that
# branch. Remember the branch and then implicitly add a
# branch selector to each job there. This makes the
# in-repo configuration apply only to that branch.
- for branch in source.getProjectBranches(project):
+ for branch in project.source.getProjectBranches(project):
project.unparsed_branch_config[branch] = \
model.UnparsedTenantConfig()
- job = merger.getFiles(project.name, url, branch,
- files=['.zuul.yaml'])
+ job = merger.getFiles(
+ project.source.connection.connection_name,
+ project.name, branch,
+ files=['.zuul.yaml'])
job.source_context = model.SourceContext(
project, branch, '', False)
jobs.append(job)
@@ -941,27 +1035,27 @@
project = job.source_context.project
branch = job.source_context.branch
if job.source_context.trusted:
- incdata = TenantParser._parseConfigRepoLayout(
+ incdata = TenantParser._parseConfigProjectLayout(
job.files[fn], job.source_context)
- config_repos_config.extend(incdata)
+ config_projects_config.extend(incdata)
else:
- incdata = TenantParser._parseProjectRepoLayout(
+ incdata = TenantParser._parseUntrustedProjectLayout(
job.files[fn], job.source_context)
- project_repos_config.extend(incdata)
+ untrusted_projects_config.extend(incdata)
project.unparsed_config.extend(incdata)
if branch in project.unparsed_branch_config:
project.unparsed_branch_config[branch].extend(incdata)
- return config_repos_config, project_repos_config
+ return config_projects_config, untrusted_projects_config
@staticmethod
- def _parseConfigRepoLayout(data, source_context):
+ def _parseConfigProjectLayout(data, source_context):
# This is the top-level configuration for a tenant.
config = model.UnparsedTenantConfig()
config.extend(safe_load_yaml(data, source_context))
return config
@staticmethod
- def _parseProjectRepoLayout(data, source_context):
+ def _parseUntrustedProjectLayout(data, source_context):
# TODOv3(jeblair): this should implement some rules to protect
# aspects of the config that should not be changed in-repo
config = model.UnparsedTenantConfig()
@@ -984,7 +1078,8 @@
layout.addSecret(SecretParser.fromYaml(layout, config_secret))
for config_job in data.jobs:
- layout.addJob(JobParser.fromYaml(tenant, layout, config_job))
+ with configuration_exceptions('job', config_job):
+ layout.addJob(JobParser.fromYaml(tenant, layout, config_job))
for config_semaphore in data.semaphores:
layout.addSemaphore(SemaphoreParser.fromYaml(config_semaphore))
@@ -997,6 +1092,8 @@
layout.addProjectConfig(ProjectParser.fromYaml(
tenant, layout, config_project))
+ layout.tenant = tenant
+
for pipeline in layout.pipelines.values():
pipeline.manager._postConfig(layout)
@@ -1049,46 +1146,45 @@
new_abide.tenants[tenant.name] = new_tenant
return new_abide
- def _loadDynamicProjectData(self, config, source, project, files,
- config_repo):
- if config_repo:
+ def _loadDynamicProjectData(self, config, project, files, trusted):
+ if trusted:
branches = ['master']
fn = 'zuul.yaml'
else:
- branches = source.getProjectBranches(project)
+ branches = project.source.getProjectBranches(project)
fn = '.zuul.yaml'
for branch in branches:
incdata = None
- data = files.getFile(project.name, branch, fn)
+ data = files.getFile(project.source.connection.connection_name,
+ project.name, branch, fn)
if data:
source_context = model.SourceContext(project, branch,
- fn, config_repo)
- if config_repo:
- incdata = TenantParser._parseConfigRepoLayout(
+ fn, trusted)
+ if trusted:
+ incdata = TenantParser._parseConfigProjectLayout(
data, source_context)
else:
- incdata = TenantParser._parseProjectRepoLayout(
+ incdata = TenantParser._parseUntrustedProjectLayout(
data, source_context)
else:
- if config_repo:
+ if trusted:
incdata = project.unparsed_config
else:
incdata = project.unparsed_branch_config.get(branch)
if incdata:
config.extend(incdata)
- def createDynamicLayout(self, tenant, files, include_config_repos=False):
- if include_config_repos:
+ def createDynamicLayout(self, tenant, files,
+ include_config_projects=False):
+ if include_config_projects:
config = model.UnparsedTenantConfig()
- for source, project in tenant.config_repos:
- self._loadDynamicProjectData(config, source, project,
- files, True)
+ for project in tenant.config_projects:
+ self._loadDynamicProjectData(config, project, files, True)
else:
- config = tenant.config_repos_config.copy()
- for source, project in tenant.project_repos:
- self._loadDynamicProjectData(config, source, project,
- files, False)
+ config = tenant.config_projects_config.copy()
+ for project in tenant.untrusted_projects:
+ self._loadDynamicProjectData(config, project, files, False)
layout = model.Layout()
# NOTE: the actual pipeline objects (complete with queues and
@@ -1112,7 +1208,8 @@
layout.addSecret(SecretParser.fromYaml(layout, config_secret))
for config_job in config.jobs:
- layout.addJob(JobParser.fromYaml(tenant, layout, config_job))
+ with configuration_exceptions('job', config_job):
+ layout.addJob(JobParser.fromYaml(tenant, layout, config_job))
for config_template in config.project_templates:
layout.addProjectTemplate(ProjectTemplateParser.fromYaml(
diff --git a/zuul/connection/__init__.py b/zuul/connection/__init__.py
index 6913294..90ab39c 100644
--- a/zuul/connection/__init__.py
+++ b/zuul/connection/__init__.py
@@ -14,6 +14,7 @@
import abc
+import extras
import six
@@ -43,6 +44,26 @@
self.driver = driver
self.connection_name = connection_name
self.connection_config = connection_config
+ self.statsd = extras.try_import('statsd.statsd')
+
+ def logEvent(self, event):
+ self.log.debug(
+ 'Scheduling {driver} event from {connection}: {event}'.format(
+ driver=self.driver.name,
+ connection=self.connection_name,
+ event=event.type))
+ try:
+ if self.statsd:
+ self.statsd.incr(
+ 'zuul.event.{driver}.{event}'.format(
+ driver=self.driver.name, event=event.type))
+ self.statsd.incr(
+ 'zuul.event.{driver}.{connection}.{event}'.format(
+ driver=self.driver.name,
+ connection=self.connection_name,
+ event=event.type))
+ except:
+ self.log.exception("Exception reporting event stats")
def onLoad(self):
pass
@@ -59,3 +80,21 @@
This lets the user supply a list of change objects that are
still in use. Anything in our cache that isn't in the supplied
list should be safe to remove from the cache."""
+
+ def registerWebapp(self, webapp):
+ self.webapp = webapp
+
+ def registerHttpHandler(self, path, handler):
+ """Add connection handler for HTTP URI.
+
+ Connection can use builtin HTTP server for listening on incoming event
+ requests. The resulting path will be /connection/connection_name/path.
+ """
+ self.webapp.register_path(self._connectionPath(path), handler)
+
+ def unregisterHttpHandler(self, path):
+ """Remove the connection handler for HTTP URI."""
+ self.webapp.unregister_path(self._connectionPath(path))
+
+ def _connectionPath(self, path):
+ return '/connection/%s/%s' % (self.connection_name, path)
diff --git a/zuul/driver/__init__.py b/zuul/driver/__init__.py
index 1cc5235..0c3105d 100644
--- a/zuul/driver/__init__.py
+++ b/zuul/driver/__init__.py
@@ -68,6 +68,17 @@
"""
pass
+ def stop(self):
+ """Stop the driver from running.
+
+ This method is optional; the base implementation does nothing.
+
+ This method is called when the connection registry is stopped
+ allowing you additionally stop any running Driver computation
+ not specific to a connection.
+ """
+ pass
+
@six.add_metaclass(abc.ABCMeta)
class ConnectionInterface(object):
@@ -180,6 +191,30 @@
"""
pass
+ @abc.abstractmethod
+ def getRequireSchema(self):
+ """Get the schema for this driver's pipeline requirement filter.
+
+ This method is required by the interface.
+
+ :returns: A voluptuous schema.
+ :rtype: dict or Schema
+
+ """
+ pass
+
+ @abc.abstractmethod
+ def getRejectSchema(self):
+ """Get the schema for this driver's pipeline reject filter.
+
+ This method is required by the interface.
+
+ :returns: A voluptuous schema.
+ :rtype: dict or Schema
+
+ """
+ pass
+
@six.add_metaclass(abc.ABCMeta)
class ReporterInterface(object):
@@ -219,3 +254,27 @@
"""
pass
+
+
+@six.add_metaclass(abc.ABCMeta)
+class WrapperInterface(object):
+ """The wrapper interface to be implmeneted by a driver.
+
+ A driver which wraps execution of commands executed by Zuul should
+ implement this interface.
+
+ """
+
+ @abc.abstractmethod
+ def getPopen(self, **kwargs):
+ """Create and return a subprocess.Popen factory wrapped however the
+ driver sees fit.
+
+ This method is required by the interface
+
+ :arg dict kwargs: key/values for use by driver as needed
+
+ :returns: a callable that takes the same args as subprocess.Popen
+ :rtype: Callable
+ """
+ pass
diff --git a/zuul/driver/bubblewrap/__init__.py b/zuul/driver/bubblewrap/__init__.py
new file mode 100644
index 0000000..c93e912
--- /dev/null
+++ b/zuul/driver/bubblewrap/__init__.py
@@ -0,0 +1,173 @@
+# Copyright 2012 Hewlett-Packard Development Company, L.P.
+# Copyright 2013 OpenStack Foundation
+# Copyright 2016 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import argparse
+import grp
+import logging
+import os
+import pwd
+import subprocess
+import sys
+
+from six.moves import shlex_quote
+
+from zuul.driver import (Driver, WrapperInterface)
+
+
+class WrappedPopen(object):
+ def __init__(self, command, passwd_r, group_r):
+ self.command = command
+ self.passwd_r = passwd_r
+ self.group_r = group_r
+
+ def __call__(self, args, *sub_args, **kwargs):
+ try:
+ args = self.command + args
+ if kwargs.get('close_fds') or sys.version_info.major >= 3:
+ # The default in py3 is close_fds=True, so we need to pass
+ # our open fds in. However, this can only work right in
+ # py3.2 or later due to the lack of 'pass_fds' in prior
+ # versions. So until we are py3 only we can only bwrap
+ # things that are close_fds=False
+ pass_fds = list(kwargs.get('pass_fds', []))
+ for fd in (self.passwd_r, self.group_r):
+ if fd not in pass_fds:
+ pass_fds.append(fd)
+ kwargs['pass_fds'] = pass_fds
+ proc = subprocess.Popen(args, *sub_args, **kwargs)
+ finally:
+ self.__del__()
+ return proc
+
+ def __del__(self):
+ if self.passwd_r:
+ try:
+ os.close(self.passwd_r)
+ except OSError:
+ pass
+ self.passwd_r = None
+ if self.group_r:
+ try:
+ os.close(self.group_r)
+ except OSError:
+ pass
+ self.group_r = None
+
+
+class BubblewrapDriver(Driver, WrapperInterface):
+ name = 'bubblewrap'
+ log = logging.getLogger("zuul.BubblewrapDriver")
+
+ bwrap_command = [
+ 'bwrap',
+ '--dir', '/tmp',
+ '--tmpfs', '/tmp',
+ '--dir', '/var',
+ '--dir', '/var/tmp',
+ '--dir', '/run/user/{uid}',
+ '--ro-bind', '/usr', '/usr',
+ '--ro-bind', '/lib', '/lib',
+ '--ro-bind', '/lib64', '/lib64',
+ '--ro-bind', '/bin', '/bin',
+ '--ro-bind', '/sbin', '/sbin',
+ '--ro-bind', '/etc/resolv.conf', '/etc/resolv.conf',
+ '--ro-bind', '{ansible_dir}', '{ansible_dir}',
+ '--ro-bind', '{ssh_auth_sock}', '{ssh_auth_sock}',
+ '--dir', '{work_dir}',
+ '--bind', '{work_dir}', '{work_dir}',
+ '--dev', '/dev',
+ '--dir', '{user_home}',
+ '--chdir', '/',
+ '--unshare-all',
+ '--share-net',
+ '--uid', '{uid}',
+ '--gid', '{gid}',
+ '--file', '{uid_fd}', '/etc/passwd',
+ '--file', '{gid_fd}', '/etc/group',
+ ]
+
+ def reconfigure(self, tenant):
+ pass
+
+ def stop(self):
+ pass
+
+ def getPopen(self, **kwargs):
+ # Set zuul_dir if it was not passed in
+ if 'zuul_dir' in kwargs:
+ zuul_dir = kwargs['zuul_dir']
+ else:
+ zuul_python_dir = os.path.dirname(sys.executable)
+ # We want the dir directly above bin to get the whole venv
+ zuul_dir = os.path.normpath(os.path.join(zuul_python_dir, '..'))
+
+ bwrap_command = list(self.bwrap_command)
+ if not zuul_dir.startswith('/usr'):
+ bwrap_command.extend(['--ro-bind', zuul_dir, zuul_dir])
+
+ # Need users and groups
+ uid = os.getuid()
+ passwd = pwd.getpwuid(uid)
+ passwd_bytes = b':'.join(
+ ['{}'.format(x).encode('utf8') for x in passwd])
+ (passwd_r, passwd_w) = os.pipe()
+ os.write(passwd_w, passwd_bytes)
+ os.close(passwd_w)
+
+ gid = os.getgid()
+ group = grp.getgrgid(gid)
+ group_bytes = b':'.join(
+ ['{}'.format(x).encode('utf8') for x in group])
+ group_r, group_w = os.pipe()
+ os.write(group_w, group_bytes)
+ os.close(group_w)
+
+ kwargs = dict(kwargs) # Don't update passed in dict
+ kwargs['uid'] = uid
+ kwargs['gid'] = gid
+ kwargs['uid_fd'] = passwd_r
+ kwargs['gid_fd'] = group_r
+ kwargs['user_home'] = passwd.pw_dir
+ command = [x.format(**kwargs) for x in bwrap_command]
+
+ self.log.debug("Bubblewrap command: %s",
+ " ".join(shlex_quote(c) for c in command))
+
+ wrapped_popen = WrappedPopen(command, passwd_r, group_r)
+
+ return wrapped_popen
+
+
+def main(args=None):
+ driver = BubblewrapDriver()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('work_dir')
+ parser.add_argument('ansible_dir')
+ parser.add_argument('run_args', nargs='+')
+ cli_args = parser.parse_args()
+
+ ssh_auth_sock = os.environ.get('SSH_AUTH_SOCK')
+
+ popen = driver.getPopen(work_dir=cli_args.work_dir,
+ ansible_dir=cli_args.ansible_dir,
+ ssh_auth_sock=ssh_auth_sock)
+ x = popen(cli_args.run_args)
+ x.wait()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/zuul/driver/gerrit/__init__.py b/zuul/driver/gerrit/__init__.py
index 3bc371e..76ab5b7 100644
--- a/zuul/driver/gerrit/__init__.py
+++ b/zuul/driver/gerrit/__init__.py
@@ -14,10 +14,10 @@
from zuul.driver import Driver, ConnectionInterface, TriggerInterface
from zuul.driver import SourceInterface, ReporterInterface
-import gerritconnection
-import gerrittrigger
-import gerritsource
-import gerritreporter
+from zuul.driver.gerrit import gerritconnection
+from zuul.driver.gerrit import gerrittrigger
+from zuul.driver.gerrit import gerritsource
+from zuul.driver.gerrit import gerritreporter
class GerritDriver(Driver, ConnectionInterface, TriggerInterface,
@@ -41,3 +41,9 @@
def getReporterSchema(self):
return gerritreporter.getSchema()
+
+ def getRequireSchema(self):
+ return gerritsource.getRequireSchema()
+
+ def getRejectSchema(self):
+ return gerritsource.getRejectSchema()
diff --git a/zuul/driver/gerrit/gerritconnection.py b/zuul/driver/gerrit/gerritconnection.py
index f8d47d2..a1d97e7 100644
--- a/zuul/driver/gerrit/gerritconnection.py
+++ b/zuul/driver/gerrit/gerritconnection.py
@@ -20,14 +20,16 @@
import time
from six.moves import queue as Queue
from six.moves import urllib
+from six.moves import shlex_quote
import paramiko
import logging
import pprint
import voluptuous as v
from zuul.connection import BaseConnection
-from zuul.model import TriggerEvent, Project, Change, Ref
+from zuul.model import Ref
from zuul import exceptions
+from zuul.driver.gerrit.gerritmodel import GerritChange, GerritTriggerEvent
# Walk the change dependency tree to find a cycle
@@ -72,10 +74,11 @@
# should always be a constant number of seconds behind Gerrit.
now = time.time()
time.sleep(max((ts + self.delay) - now, 0.0))
- event = TriggerEvent()
+ event = GerritTriggerEvent()
event.type = data.get('type')
event.trigger_name = 'gerrit'
change = data.get('change')
+ event.project_hostname = self.connection.canonical_hostname
if change:
event.project_name = change.get('project')
event.branch = change.get('branch')
@@ -140,6 +143,7 @@
self.connection._getChange(event.change_number,
event.patch_number,
refresh=True)
+ self.connection.logEvent(event)
self.connection.sched.addEvent(event)
def run(self):
@@ -170,11 +174,15 @@
self._stopped = False
def _read(self, fd):
- l = fd.readline()
- data = json.loads(l)
- self.log.debug("Received data from Gerrit event stream: \n%s" %
- pprint.pformat(data))
- self.gerrit_connection.addEvent(data)
+ while True:
+ l = fd.readline()
+ data = json.loads(l)
+ self.log.debug("Received data from Gerrit event stream: \n%s" %
+ pprint.pformat(data))
+ self.gerrit_connection.addEvent(data)
+ # Continue until all the lines received are consumed
+ if fd._pos == fd._realpos:
+ break
def _listen(self, stdout, stderr):
poll = select.poll()
@@ -268,11 +276,13 @@
self._change_cache = {}
self.projects = {}
self.gerrit_event_connector = None
+ self.source = driver.getSource(self)
def getProject(self, name):
- if name not in self.projects:
- self.projects[name] = Project(name, self.connection_name)
- return self.projects[name]
+ return self.projects.get(name)
+
+ def addProject(self, project):
+ self.projects[project.name] = project
def maintainCache(self, relevant):
# This lets the user supply a list of change objects that are
@@ -290,14 +300,14 @@
change = self._getChange(event.change_number, event.patch_number,
refresh=refresh)
elif event.ref:
- project = self.getProject(event.project_name)
+ project = self.source.getProject(event.project_name)
change = Ref(project)
change.ref = event.ref
change.oldrev = event.oldrev
change.newrev = event.newrev
change.url = self._getGitwebUrl(project, sha=event.newrev)
else:
- project = self.getProject(event.project_name)
+ project = self.source.getProject(event.project_name)
change = Ref(project)
branch = event.branch or 'master'
change.ref = 'refs/heads/%s' % branch
@@ -313,7 +323,7 @@
if change and not refresh:
return change
if not change:
- change = Change(None)
+ change = GerritChange(None)
change.number = number
change.patchset = patchset
key = '%s,%s' % (change.number, change.patchset)
@@ -375,7 +385,7 @@
if 'project' not in data:
raise exceptions.ChangeNotFound(change.number, change.patchset)
- change.project = self.getProject(data['project'])
+ change.project = self.source.getProject(data['project'])
change.branch = data['branch']
change.url = data['url']
max_ps = 0
@@ -523,16 +533,16 @@
# Wait for the ref to show up in the repo
start = time.time()
while time.time() - start < self.replication_timeout:
- sha = self.getRefSha(project.name, ref)
+ sha = self.getRefSha(project, ref)
if old_sha != sha:
return True
time.sleep(self.replication_retry_interval)
return False
- def getRefSha(self, project_name, ref):
+ def getRefSha(self, project, ref):
refs = {}
try:
- refs = self.getInfoRefs(project_name)
+ refs = self.getInfoRefs(project)
except:
self.log.exception("Exception looking for ref %s" %
ref)
@@ -594,7 +604,7 @@
return changes
def getProjectBranches(self, project):
- refs = self.getInfoRefs(project.name)
+ refs = self.getInfoRefs(project)
heads = [str(k[len('refs/heads/'):]) for k in refs.keys()
if k.startswith('refs/heads/')]
return heads
@@ -611,12 +621,12 @@
def review(self, project, change, message, action={}):
cmd = 'gerrit review --project %s' % project
if message:
- cmd += ' --message "%s"' % message
+ cmd += ' --message %s' % shlex_quote(message)
for key, val in action.items():
if val is True:
cmd += ' --%s' % key
else:
- cmd += ' --%s %s' % (key, val)
+ cmd += ' --label %s=%s' % (key, val)
cmd += ' %s' % change
out, err = self._ssh(cmd)
return err
@@ -716,21 +726,21 @@
if stdin_data:
stdin.write(stdin_data)
- out = stdout.read()
+ out = stdout.read().decode('utf-8')
self.log.debug("SSH received stdout:\n%s" % out)
ret = stdout.channel.recv_exit_status()
self.log.debug("SSH exit status: %s" % ret)
- err = stderr.read()
+ err = stderr.read().decode('utf-8')
self.log.debug("SSH received stderr:\n%s" % err)
if ret:
raise Exception("Gerrit error executing %s" % command)
return (out, err)
- def getInfoRefs(self, project_name):
+ def getInfoRefs(self, project):
url = "%s/p/%s/info/refs?service=git-upload-pack" % (
- self.baseurl, project_name)
+ self.baseurl, project.name)
try:
data = urllib.request.urlopen(url).read()
except:
diff --git a/zuul/driver/gerrit/gerritmodel.py b/zuul/driver/gerrit/gerritmodel.py
new file mode 100644
index 0000000..818d260
--- /dev/null
+++ b/zuul/driver/gerrit/gerritmodel.py
@@ -0,0 +1,358 @@
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import re
+import time
+
+from zuul.model import EventFilter, RefFilter
+from zuul.model import Change, TriggerEvent
+from zuul.driver.util import time_to_seconds
+
+
+EMPTY_GIT_REF = '0' * 40 # git sha of all zeros, used during creates/deletes
+
+
+def normalize_category(name):
+ name = name.lower()
+ return re.sub(' ', '-', name)
+
+
+class GerritChange(Change):
+ def __init__(self, project):
+ super(GerritChange, self).__init__(project)
+ self.approvals = []
+
+
+class GerritTriggerEvent(TriggerEvent):
+ """Incoming event from an external system."""
+ def __init__(self):
+ super(GerritTriggerEvent, self).__init__()
+ self.approvals = []
+
+ def __repr__(self):
+ ret = '<GerritTriggerEvent %s %s' % (self.type,
+ self.canonical_project_name)
+
+ if self.branch:
+ ret += " %s" % self.branch
+ if self.change_number:
+ ret += " %s,%s" % (self.change_number, self.patch_number)
+ if self.approvals:
+ ret += ' ' + ', '.join(
+ ['%s:%s' % (a['type'], a['value']) for a in self.approvals])
+ ret += '>'
+
+ return ret
+
+ def isPatchsetCreated(self):
+ return 'patchset-created' == self.type
+
+ def isChangeAbandoned(self):
+ return 'change-abandoned' == self.type
+
+
+class GerritApprovalFilter(object):
+ def __init__(self, required_approvals=[], reject_approvals=[]):
+ self._required_approvals = copy.deepcopy(required_approvals)
+ self.required_approvals = self._tidy_approvals(required_approvals)
+ self._reject_approvals = copy.deepcopy(reject_approvals)
+ self.reject_approvals = self._tidy_approvals(reject_approvals)
+
+ def _tidy_approvals(self, approvals):
+ for a in approvals:
+ for k, v in a.items():
+ if k == 'username':
+ a['username'] = re.compile(v)
+ elif k in ['email', 'email-filter']:
+ a['email'] = re.compile(v)
+ elif k == 'newer-than':
+ a[k] = time_to_seconds(v)
+ elif k == 'older-than':
+ a[k] = time_to_seconds(v)
+ if 'email-filter' in a:
+ del a['email-filter']
+ return approvals
+
+ def _match_approval_required_approval(self, rapproval, approval):
+ # Check if the required approval and approval match
+ if 'description' not in approval:
+ return False
+ now = time.time()
+ by = approval.get('by', {})
+ for k, v in rapproval.items():
+ if k == 'username':
+ if (not v.search(by.get('username', ''))):
+ return False
+ elif k == 'email':
+ if (not v.search(by.get('email', ''))):
+ return False
+ elif k == 'newer-than':
+ t = now - v
+ if (approval['grantedOn'] < t):
+ return False
+ elif k == 'older-than':
+ t = now - v
+ if (approval['grantedOn'] >= t):
+ return False
+ else:
+ if not isinstance(v, list):
+ v = [v]
+ if (normalize_category(approval['description']) != k or
+ int(approval['value']) not in v):
+ return False
+ return True
+
+ def matchesApprovals(self, change):
+ if self.required_approvals or self.reject_approvals:
+ if not hasattr(change, 'number'):
+ # Not a change, no reviews
+ return False
+ if (self.required_approvals and not change.approvals
+ or self.reject_approvals and not change.approvals):
+ # A change with no approvals can not match
+ return False
+
+ # TODO(jhesketh): If we wanted to optimise this slightly we could
+ # analyse both the REQUIRE and REJECT filters by looping over the
+ # approvals on the change and keeping track of what we have checked
+ # rather than needing to loop on the change approvals twice
+ return (self.matchesRequiredApprovals(change) and
+ self.matchesNoRejectApprovals(change))
+
+ def matchesRequiredApprovals(self, change):
+ # Check if any approvals match the requirements
+ for rapproval in self.required_approvals:
+ matches_rapproval = False
+ for approval in change.approvals:
+ if self._match_approval_required_approval(rapproval, approval):
+ # We have a matching approval so this requirement is
+ # fulfilled
+ matches_rapproval = True
+ break
+ if not matches_rapproval:
+ return False
+ return True
+
+ def matchesNoRejectApprovals(self, change):
+ # Check to make sure no approvals match a reject criteria
+ for rapproval in self.reject_approvals:
+ for approval in change.approvals:
+ if self._match_approval_required_approval(rapproval, approval):
+ # A reject approval has been matched, so we reject
+ # immediately
+ return False
+ # To get here no rejects can have been matched so we should be good to
+ # queue
+ return True
+
+
+class GerritEventFilter(EventFilter, GerritApprovalFilter):
+ def __init__(self, trigger, types=[], branches=[], refs=[],
+ event_approvals={}, comments=[], emails=[], usernames=[],
+ required_approvals=[], reject_approvals=[],
+ ignore_deletes=True):
+
+ EventFilter.__init__(self, trigger)
+
+ GerritApprovalFilter.__init__(self,
+ required_approvals=required_approvals,
+ reject_approvals=reject_approvals)
+
+ self._types = types
+ self._branches = branches
+ self._refs = refs
+ self._comments = comments
+ self._emails = emails
+ self._usernames = usernames
+ self.types = [re.compile(x) for x in types]
+ self.branches = [re.compile(x) for x in branches]
+ self.refs = [re.compile(x) for x in refs]
+ self.comments = [re.compile(x) for x in comments]
+ self.emails = [re.compile(x) for x in emails]
+ self.usernames = [re.compile(x) for x in usernames]
+ self.event_approvals = event_approvals
+ self.ignore_deletes = ignore_deletes
+
+ def __repr__(self):
+ ret = '<GerritEventFilter'
+
+ if self._types:
+ ret += ' types: %s' % ', '.join(self._types)
+ if self._branches:
+ ret += ' branches: %s' % ', '.join(self._branches)
+ if self._refs:
+ ret += ' refs: %s' % ', '.join(self._refs)
+ if self.ignore_deletes:
+ ret += ' ignore_deletes: %s' % self.ignore_deletes
+ if self.event_approvals:
+ ret += ' event_approvals: %s' % ', '.join(
+ ['%s:%s' % a for a in self.event_approvals.items()])
+ if self.required_approvals:
+ ret += ' required_approvals: %s' % ', '.join(
+ ['%s' % a for a in self._required_approvals])
+ if self.reject_approvals:
+ ret += ' reject_approvals: %s' % ', '.join(
+ ['%s' % a for a in self._reject_approvals])
+ if self._comments:
+ ret += ' comments: %s' % ', '.join(self._comments)
+ if self._emails:
+ ret += ' emails: %s' % ', '.join(self._emails)
+ if self._usernames:
+ ret += ' usernames: %s' % ', '.join(self._usernames)
+ ret += '>'
+
+ return ret
+
+ def matches(self, event, change):
+ # event types are ORed
+ matches_type = False
+ for etype in self.types:
+ if etype.match(event.type):
+ matches_type = True
+ if self.types and not matches_type:
+ return False
+
+ # branches are ORed
+ matches_branch = False
+ for branch in self.branches:
+ if branch.match(event.branch):
+ matches_branch = True
+ if self.branches and not matches_branch:
+ return False
+
+ # refs are ORed
+ matches_ref = False
+ if event.ref is not None:
+ for ref in self.refs:
+ if ref.match(event.ref):
+ matches_ref = True
+ if self.refs and not matches_ref:
+ return False
+ if self.ignore_deletes and event.newrev == EMPTY_GIT_REF:
+ # If the updated ref has an empty git sha (all 0s),
+ # then the ref is being deleted
+ return False
+
+ # comments are ORed
+ matches_comment_re = False
+ for comment_re in self.comments:
+ if (event.comment is not None and
+ comment_re.search(event.comment)):
+ matches_comment_re = True
+ if self.comments and not matches_comment_re:
+ return False
+
+ # We better have an account provided by Gerrit to do
+ # email filtering.
+ if event.account is not None:
+ account_email = event.account.get('email')
+ # emails are ORed
+ matches_email_re = False
+ for email_re in self.emails:
+ if (account_email is not None and
+ email_re.search(account_email)):
+ matches_email_re = True
+ if self.emails and not matches_email_re:
+ return False
+
+ # usernames are ORed
+ account_username = event.account.get('username')
+ matches_username_re = False
+ for username_re in self.usernames:
+ if (account_username is not None and
+ username_re.search(account_username)):
+ matches_username_re = True
+ if self.usernames and not matches_username_re:
+ return False
+
+ # approvals are ANDed
+ for category, value in self.event_approvals.items():
+ matches_approval = False
+ for eapp in event.approvals:
+ if (normalize_category(eapp['description']) == category and
+ int(eapp['value']) == int(value)):
+ matches_approval = True
+ if not matches_approval:
+ return False
+
+ # required approvals are ANDed (reject approvals are ORed)
+ if not self.matchesApprovals(change):
+ return False
+
+ return True
+
+
+class GerritRefFilter(RefFilter, GerritApprovalFilter):
+ def __init__(self, connection_name, open=None, current_patchset=None,
+ statuses=[], required_approvals=[],
+ reject_approvals=[]):
+ RefFilter.__init__(self, connection_name)
+
+ GerritApprovalFilter.__init__(self,
+ required_approvals=required_approvals,
+ reject_approvals=reject_approvals)
+
+ self.open = open
+ self.current_patchset = current_patchset
+ self.statuses = statuses
+
+ def __repr__(self):
+ ret = '<GerritRefFilter'
+
+ ret += ' connection_name: %s' % self.connection_name
+ if self.open is not None:
+ ret += ' open: %s' % self.open
+ if self.current_patchset is not None:
+ ret += ' current-patchset: %s' % self.current_patchset
+ if self.statuses:
+ ret += ' statuses: %s' % ', '.join(self.statuses)
+ if self.required_approvals:
+ ret += (' required-approvals: %s' %
+ str(self.required_approvals))
+ if self.reject_approvals:
+ ret += (' reject-approvals: %s' %
+ str(self.reject_approvals))
+ ret += '>'
+
+ return ret
+
+ def matches(self, change):
+ if self.open is not None:
+ # if a "change" has no number, it's not a change, but a push
+ # and cannot possibly pass this test.
+ if hasattr(change, 'number'):
+ if self.open != change.open:
+ return False
+ else:
+ return False
+
+ if self.current_patchset is not None:
+ # if a "change" has no number, it's not a change, but a push
+ # and cannot possibly pass this test.
+ if hasattr(change, 'number'):
+ if self.current_patchset != change.is_current_patchset:
+ return False
+ else:
+ return False
+
+ if self.statuses:
+ if change.status not in self.statuses:
+ return False
+
+ # required approvals are ANDed (reject approvals are ORed)
+ if not self.matchesApprovals(change):
+ return False
+
+ return True
diff --git a/zuul/driver/gerrit/gerritreporter.py b/zuul/driver/gerrit/gerritreporter.py
index 0ade355..f8e8b03 100644
--- a/zuul/driver/gerrit/gerritreporter.py
+++ b/zuul/driver/gerrit/gerritreporter.py
@@ -15,7 +15,7 @@
import logging
import voluptuous as v
-
+from zuul.driver.gerrit.gerritsource import GerritSource
from zuul.reporter import BaseReporter
@@ -25,15 +25,26 @@
name = 'gerrit'
log = logging.getLogger("zuul.GerritReporter")
- def report(self, source, pipeline, item):
+ def report(self, pipeline, item):
"""Send a message to gerrit."""
+
+ # If the source is no GerritSource we cannot report anything here.
+ if not isinstance(item.change.project.source, GerritSource):
+ return
+
+ # For supporting several Gerrit connections we also must filter by
+ # the canonical hostname.
+ if item.change.project.source.connection.canonical_hostname != \
+ self.connection.canonical_hostname:
+ return
+
message = self._formatItemReport(pipeline, item)
self.log.debug("Report change %s, params %s, message: %s" %
(item.change, self.config, message))
changeid = '%s,%s' % (item.change.number, item.change.patchset)
- item.change._ref_sha = source.getRefSha(
- item.change.project.name, 'refs/heads/' + item.change.branch)
+ item.change._ref_sha = item.change.project.source.getRefSha(
+ item.change.project, 'refs/heads/' + item.change.branch)
return self.connection.review(item.change.project.name, changeid,
message, self.config)
diff --git a/zuul/driver/gerrit/gerritsource.py b/zuul/driver/gerrit/gerritsource.py
index 2271cde..4571cc1 100644
--- a/zuul/driver/gerrit/gerritsource.py
+++ b/zuul/driver/gerrit/gerritsource.py
@@ -13,7 +13,11 @@
# under the License.
import logging
+import voluptuous as vs
from zuul.source import BaseSource
+from zuul.model import Project
+from zuul.driver.gerrit.gerritmodel import GerritRefFilter
+from zuul.driver.util import scalar_or_list, to_list
class GerritSource(BaseSource):
@@ -41,7 +45,11 @@
return self.connection.getChange(event, refresh)
def getProject(self, name):
- return self.connection.getProject(name)
+ p = self.connection.getProject(name)
+ if not p:
+ p = Project(name, self)
+ self.connection.addProject(p)
+ return p
def getProjectOpenChanges(self, project):
return self.connection.getProjectOpenChanges(project)
@@ -54,3 +62,43 @@
def _getGitwebUrl(self, project, sha=None):
return self.connection._getGitwebUrl(project, sha)
+
+ def getRequireFilters(self, config):
+ f = GerritRefFilter(
+ connection_name=self.connection.connection_name,
+ open=config.get('open'),
+ current_patchset=config.get('current-patchset'),
+ statuses=to_list(config.get('status')),
+ required_approvals=to_list(config.get('approval')),
+ )
+ return [f]
+
+ def getRejectFilters(self, config):
+ f = GerritRefFilter(
+ connection_name=self.connection.connection_name,
+ reject_approvals=to_list(config.get('approval')),
+ )
+ return [f]
+
+
+approval = vs.Schema({'username': str,
+ 'email-filter': str,
+ 'email': str,
+ 'older-than': str,
+ 'newer-than': str,
+ }, extra=vs.ALLOW_EXTRA)
+
+
+def getRequireSchema():
+ require = {'approval': scalar_or_list(approval),
+ 'open': bool,
+ 'current-patchset': bool,
+ 'status': scalar_or_list(str)}
+
+ return require
+
+
+def getRejectSchema():
+ reject = {'approval': scalar_or_list(approval)}
+
+ return reject
diff --git a/zuul/driver/gerrit/gerrittrigger.py b/zuul/driver/gerrit/gerrittrigger.py
index 70c65fd..706b7df 100644
--- a/zuul/driver/gerrit/gerrittrigger.py
+++ b/zuul/driver/gerrit/gerrittrigger.py
@@ -14,8 +14,9 @@
import logging
import voluptuous as v
-from zuul.model import EventFilter
from zuul.trigger import BaseTrigger
+from zuul.driver.gerrit.gerritmodel import GerritEventFilter
+from zuul.driver.util import scalar_or_list, to_list
class GerritTrigger(BaseTrigger):
@@ -23,43 +24,36 @@
log = logging.getLogger("zuul.GerritTrigger")
def getEventFilters(self, trigger_conf):
- def toList(item):
- if not item:
- return []
- if isinstance(item, list):
- return item
- return [item]
-
efilters = []
- for trigger in toList(trigger_conf):
+ for trigger in to_list(trigger_conf):
approvals = {}
- for approval_dict in toList(trigger.get('approval')):
+ for approval_dict in to_list(trigger.get('approval')):
for key, val in approval_dict.items():
approvals[key] = val
# Backwards compat for *_filter versions of these args
- comments = toList(trigger.get('comment'))
+ comments = to_list(trigger.get('comment'))
if not comments:
- comments = toList(trigger.get('comment_filter'))
- emails = toList(trigger.get('email'))
+ comments = to_list(trigger.get('comment_filter'))
+ emails = to_list(trigger.get('email'))
if not emails:
- emails = toList(trigger.get('email_filter'))
- usernames = toList(trigger.get('username'))
+ emails = to_list(trigger.get('email_filter'))
+ usernames = to_list(trigger.get('username'))
if not usernames:
- usernames = toList(trigger.get('username_filter'))
+ usernames = to_list(trigger.get('username_filter'))
ignore_deletes = trigger.get('ignore-deletes', True)
- f = EventFilter(
+ f = GerritEventFilter(
trigger=self,
- types=toList(trigger['event']),
- branches=toList(trigger.get('branch')),
- refs=toList(trigger.get('ref')),
+ types=to_list(trigger['event']),
+ branches=to_list(trigger.get('branch')),
+ refs=to_list(trigger.get('ref')),
event_approvals=approvals,
comments=comments,
emails=emails,
usernames=usernames,
required_approvals=(
- toList(trigger.get('require-approval'))
+ to_list(trigger.get('require-approval'))
),
- reject_approvals=toList(
+ reject_approvals=to_list(
trigger.get('reject-approval')
),
ignore_deletes=ignore_deletes
@@ -80,8 +74,6 @@
def getSchema():
- def toList(x):
- return v.Any([x], x)
variable_dict = v.Schema(dict)
approval = v.Schema({'username': str,
@@ -93,25 +85,25 @@
gerrit_trigger = {
v.Required('event'):
- toList(v.Any('patchset-created',
- 'draft-published',
- 'change-abandoned',
- 'change-restored',
- 'change-merged',
- 'comment-added',
- 'ref-updated')),
- 'comment_filter': toList(str),
- 'comment': toList(str),
- 'email_filter': toList(str),
- 'email': toList(str),
- 'username_filter': toList(str),
- 'username': toList(str),
- 'branch': toList(str),
- 'ref': toList(str),
+ scalar_or_list(v.Any('patchset-created',
+ 'draft-published',
+ 'change-abandoned',
+ 'change-restored',
+ 'change-merged',
+ 'comment-added',
+ 'ref-updated')),
+ 'comment_filter': scalar_or_list(str),
+ 'comment': scalar_or_list(str),
+ 'email_filter': scalar_or_list(str),
+ 'email': scalar_or_list(str),
+ 'username_filter': scalar_or_list(str),
+ 'username': scalar_or_list(str),
+ 'branch': scalar_or_list(str),
+ 'ref': scalar_or_list(str),
'ignore-deletes': bool,
- 'approval': toList(variable_dict),
- 'require-approval': toList(approval),
- 'reject-approval': toList(approval),
+ 'approval': scalar_or_list(variable_dict),
+ 'require-approval': scalar_or_list(approval),
+ 'reject-approval': scalar_or_list(approval),
}
return gerrit_trigger
diff --git a/zuul/driver/git/__init__.py b/zuul/driver/git/__init__.py
index abedf6a..0faa036 100644
--- a/zuul/driver/git/__init__.py
+++ b/zuul/driver/git/__init__.py
@@ -13,8 +13,8 @@
# under the License.
from zuul.driver import Driver, ConnectionInterface, SourceInterface
-import gitconnection
-import gitsource
+from zuul.driver.git import gitconnection
+from zuul.driver.git import gitsource
class GitDriver(Driver, ConnectionInterface, SourceInterface):
@@ -25,3 +25,9 @@
def getSource(self, connection):
return gitsource.GitSource(self, connection)
+
+ def getRequireSchema(self):
+ return {}
+
+ def getRejectSchema(self):
+ return {}
diff --git a/zuul/driver/git/gitconnection.py b/zuul/driver/git/gitconnection.py
index 67f195c..ca88d3f 100644
--- a/zuul/driver/git/gitconnection.py
+++ b/zuul/driver/git/gitconnection.py
@@ -19,7 +19,6 @@
import voluptuous as v
from zuul.connection import BaseConnection
-from zuul.model import Project
class GitConnection(BaseConnection):
@@ -44,9 +43,10 @@
self.projects = {}
def getProject(self, name):
- if name not in self.projects:
- self.projects[name] = Project(name, self.connection_name)
- return self.projects[name]
+ return self.projects.get(name)
+
+ def addProject(self, project):
+ self.projects[project.name] = project
def getProjectBranches(self, project):
# TODO(jeblair): implement; this will need to handle local or
diff --git a/zuul/driver/git/gitsource.py b/zuul/driver/git/gitsource.py
index 076e8b7..61a328e 100644
--- a/zuul/driver/git/gitsource.py
+++ b/zuul/driver/git/gitsource.py
@@ -14,6 +14,7 @@
import logging
from zuul.source import BaseSource
+from zuul.model import Project
class GitSource(BaseSource):
@@ -38,7 +39,11 @@
raise NotImplemented()
def getProject(self, name):
- return self.connection.getProject(name)
+ p = self.connection.getProject(name)
+ if not p:
+ p = Project(name, self)
+ self.connection.addProject(p)
+ return p
def getProjectBranches(self, project):
return self.connection.getProjectBranches(project)
@@ -48,3 +53,9 @@
def getProjectOpenChanges(self, project):
raise NotImplemented()
+
+ def getRequireFilters(self, config):
+ return []
+
+ def getRejectFilters(self, config):
+ return []
diff --git a/zuul/driver/github/__init__.py b/zuul/driver/github/__init__.py
new file mode 100644
index 0000000..f75e907
--- /dev/null
+++ b/zuul/driver/github/__init__.py
@@ -0,0 +1,49 @@
+# Copyright 2017 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from zuul.driver import Driver, ConnectionInterface, TriggerInterface
+from zuul.driver import SourceInterface
+from zuul.driver.github import githubconnection
+from zuul.driver.github import githubtrigger
+from zuul.driver.github import githubsource
+from zuul.driver.github import githubreporter
+
+
+class GithubDriver(Driver, ConnectionInterface, TriggerInterface,
+ SourceInterface):
+ name = 'github'
+
+ def getConnection(self, name, config):
+ return githubconnection.GithubConnection(self, name, config)
+
+ def getTrigger(self, connection, config=None):
+ return githubtrigger.GithubTrigger(self, connection, config)
+
+ def getSource(self, connection):
+ return githubsource.GithubSource(self, connection)
+
+ def getReporter(self, connection, config=None):
+ return githubreporter.GithubReporter(self, connection, config)
+
+ def getTriggerSchema(self):
+ return githubtrigger.getSchema()
+
+ def getReporterSchema(self):
+ return githubreporter.getSchema()
+
+ def getRequireSchema(self):
+ return githubsource.getRequireSchema()
+
+ def getRejectSchema(self):
+ return githubsource.getRejectSchema()
diff --git a/zuul/driver/github/githubconnection.py b/zuul/driver/github/githubconnection.py
new file mode 100644
index 0000000..6a3c09e
--- /dev/null
+++ b/zuul/driver/github/githubconnection.py
@@ -0,0 +1,779 @@
+# Copyright 2015 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import collections
+import datetime
+import logging
+import hmac
+import hashlib
+import time
+
+import cachecontrol
+from cachecontrol.cache import DictCache
+import iso8601
+import jwt
+import requests
+import webob
+import webob.dec
+import voluptuous as v
+import github3
+from github3.exceptions import MethodNotAllowed
+
+from zuul.connection import BaseConnection
+from zuul.model import Ref
+from zuul.exceptions import MergeFailure
+from zuul.driver.github.githubmodel import PullRequest, GithubTriggerEvent
+
+ACCESS_TOKEN_URL = 'https://api.github.com/installations/%s/access_tokens'
+PREVIEW_JSON_ACCEPT = 'application/vnd.github.machine-man-preview+json'
+
+
+class UTC(datetime.tzinfo):
+ """UTC"""
+
+ def utcoffset(self, dt):
+ return datetime.timedelta(0)
+
+ def tzname(self, dt):
+ return "UTC"
+
+ def dst(self, dt):
+ return datetime.timedelta(0)
+
+
+utc = UTC()
+
+
+class GithubWebhookListener():
+
+ log = logging.getLogger("zuul.GithubWebhookListener")
+
+ def __init__(self, connection):
+ self.connection = connection
+
+ def handle_request(self, path, tenant_name, request):
+ if request.method != 'POST':
+ self.log.debug("Only POST method is allowed.")
+ raise webob.exc.HTTPMethodNotAllowed(
+ 'Only POST method is allowed.')
+
+ self.log.debug("Github Webhook Received.")
+
+ self._validate_signature(request)
+
+ self.__dispatch_event(request)
+
+ def __dispatch_event(self, request):
+ try:
+ event = request.headers['X-Github-Event']
+ self.log.debug("X-Github-Event: " + event)
+ except KeyError:
+ self.log.debug("Request headers missing the X-Github-Event.")
+ raise webob.exc.HTTPBadRequest('Please specify a X-Github-Event '
+ 'header.')
+
+ try:
+ method = getattr(self, '_event_' + event)
+ except AttributeError:
+ message = "Unhandled X-Github-Event: {0}".format(event)
+ self.log.debug(message)
+ raise webob.exc.HTTPBadRequest(message)
+
+ try:
+ json_body = request.json_body
+ except:
+ message = 'Exception deserializing JSON body'
+ self.log.exception(message)
+ raise webob.exc.HTTPBadRequest(message)
+
+ # If there's any installation mapping information in the body then
+ # update the project mapping before any requests are made.
+ installation_id = json_body.get('installation', {}).get('id')
+ project_name = json_body.get('repository', {}).get('full_name')
+
+ if installation_id and project_name:
+ old_id = self.connection.installation_map.get(project_name)
+
+ if old_id and old_id != installation_id:
+ msg = "Unexpected installation_id change for %s. %d -> %d."
+ self.log.warning(msg, project_name, old_id, installation_id)
+
+ self.connection.installation_map[project_name] = installation_id
+
+ try:
+ event = method(json_body)
+ except:
+ self.log.exception('Exception when handling event:')
+ event = None
+
+ if event:
+ event.project_hostname = self.connection.canonical_hostname
+ self.connection.logEvent(event)
+ self.connection.sched.addEvent(event)
+
+ def _event_push(self, body):
+ base_repo = body.get('repository')
+
+ event = GithubTriggerEvent()
+ event.trigger_name = 'github'
+ event.project_name = base_repo.get('full_name')
+ event.type = 'push'
+
+ event.ref = body.get('ref')
+ event.oldrev = body.get('before')
+ event.newrev = body.get('after')
+
+ ref_parts = event.ref.split('/') # ie, ['refs', 'heads', 'master']
+
+ if ref_parts[1] == "heads":
+ # necessary for the scheduler to match against particular branches
+ event.branch = ref_parts[2]
+
+ return event
+
+ def _event_pull_request(self, body):
+ action = body.get('action')
+ pr_body = body.get('pull_request')
+
+ event = self._pull_request_to_event(pr_body)
+ event.account = self._get_sender(body)
+
+ event.type = 'pull_request'
+ if action == 'opened':
+ event.action = 'opened'
+ elif action == 'synchronize':
+ event.action = 'changed'
+ elif action == 'closed':
+ event.action = 'closed'
+ elif action == 'reopened':
+ event.action = 'reopened'
+ elif action == 'labeled':
+ event.action = 'labeled'
+ event.label = body['label']['name']
+ elif action == 'unlabeled':
+ event.action = 'unlabeled'
+ event.label = body['label']['name']
+ else:
+ return None
+
+ return event
+
+ def _event_issue_comment(self, body):
+ """Handles pull request comments"""
+ action = body.get('action')
+ if action != 'created':
+ return
+ pr_body = self._issue_to_pull_request(body)
+ number = body.get('issue').get('number')
+ project_name = body.get('repository').get('full_name')
+ pr_body = self.connection.getPull(project_name, number)
+ if pr_body is None:
+ return
+
+ event = self._pull_request_to_event(pr_body)
+ event.account = self._get_sender(body)
+ event.comment = body.get('comment').get('body')
+ event.type = 'pull_request'
+ event.action = 'comment'
+ return event
+
+ def _event_pull_request_review(self, body):
+ """Handles pull request reviews"""
+ pr_body = body.get('pull_request')
+ if pr_body is None:
+ return
+
+ review = body.get('review')
+ if review is None:
+ return
+
+ event = self._pull_request_to_event(pr_body)
+ event.state = review.get('state')
+ event.account = self._get_sender(body)
+ event.type = 'pull_request_review'
+ event.action = body.get('action')
+ return event
+
+ def _event_status(self, body):
+ action = body.get('action')
+ if action == 'pending':
+ return
+ pr_body = self.connection.getPullBySha(body['sha'])
+ if pr_body is None:
+ return
+
+ event = self._pull_request_to_event(pr_body)
+ event.account = self._get_sender(body)
+ event.type = 'pull_request'
+ event.action = 'status'
+ # Github API is silly. Webhook blob sets author data in
+ # 'sender', but API call to get status puts it in 'creator'.
+ # Duplicate the data so our code can look in one place
+ body['creator'] = body['sender']
+ event.status = "%s:%s:%s" % _status_as_tuple(body)
+ return event
+
+ def _issue_to_pull_request(self, body):
+ number = body.get('issue').get('number')
+ project_name = body.get('repository').get('full_name')
+ pr_body = self.connection.getPull(project_name, number)
+ if pr_body is None:
+ self.log.debug('Pull request #%s not found in project %s' %
+ (number, project_name))
+ return pr_body
+
+ def _validate_signature(self, request):
+ secret = self.connection.connection_config.get('webhook_token', None)
+ if secret is None:
+ return True
+
+ body = request.body
+ try:
+ request_signature = request.headers['X-Hub-Signature']
+ except KeyError:
+ raise webob.exc.HTTPUnauthorized(
+ 'Please specify a X-Hub-Signature header with secret.')
+
+ payload_signature = 'sha1=' + hmac.new(secret,
+ body,
+ hashlib.sha1).hexdigest()
+
+ self.log.debug("Payload Signature: {0}".format(str(payload_signature)))
+ self.log.debug("Request Signature: {0}".format(str(request_signature)))
+ if str(payload_signature) != str(request_signature):
+ raise webob.exc.HTTPUnauthorized(
+ 'Request signature does not match calculated payload '
+ 'signature. Check that secret is correct.')
+
+ return True
+
+ def _pull_request_to_event(self, pr_body):
+ event = GithubTriggerEvent()
+ event.trigger_name = 'github'
+
+ base = pr_body.get('base')
+ base_repo = base.get('repo')
+ head = pr_body.get('head')
+
+ event.project_name = base_repo.get('full_name')
+ event.change_number = pr_body.get('number')
+ event.change_url = self.connection.getPullUrl(event.project_name,
+ event.change_number)
+ event.updated_at = pr_body.get('updated_at')
+ event.branch = base.get('ref')
+ event.refspec = "refs/pull/" + str(pr_body.get('number')) + "/head"
+ event.patch_number = head.get('sha')
+
+ event.title = pr_body.get('title')
+
+ return event
+
+ def _get_sender(self, body):
+ login = body.get('sender').get('login')
+ if login:
+ return self.connection.getUser(login)
+
+
+class GithubUser(collections.Mapping):
+ log = logging.getLogger('zuul.GithubUser')
+
+ def __init__(self, github, username):
+ self._github = github
+ self._username = username
+ self._data = None
+
+ def __getitem__(self, key):
+ if self._data is None:
+ self._data = self._init_data()
+ return self._data[key]
+
+ def __iter__(self):
+ return iter(self._data)
+
+ def __len__(self):
+ return len(self._data)
+
+ def _init_data(self):
+ user = self._github.user(self._username)
+ log_rate_limit(self.log, self._github)
+ data = {
+ 'username': user.login,
+ 'name': user.name,
+ 'email': user.email
+ }
+ return data
+
+
+class GithubConnection(BaseConnection):
+ driver_name = 'github'
+ log = logging.getLogger("connection.github")
+ payload_path = 'payload'
+
+ def __init__(self, driver, connection_name, connection_config):
+ super(GithubConnection, self).__init__(
+ driver, connection_name, connection_config)
+ self._change_cache = {}
+ self.projects = {}
+ self.git_ssh_key = self.connection_config.get('sshkey')
+ self.git_host = self.connection_config.get('git_host', 'github.com')
+ self.canonical_hostname = self.connection_config.get(
+ 'canonical_hostname', self.git_host)
+ self.source = driver.getSource(self)
+
+ self._github = None
+ self.app_id = None
+ self.app_key = None
+
+ self.installation_map = {}
+ self.installation_token_cache = {}
+
+ # NOTE(jamielennox): Better here would be to cache to memcache or file
+ # or something external - but zuul already sucks at restarting so in
+ # memory probably doesn't make this much worse.
+ self.cache_adapter = cachecontrol.CacheControlAdapter(
+ DictCache(),
+ cache_etags=True)
+
+ def onLoad(self):
+ webhook_listener = GithubWebhookListener(self)
+ self.registerHttpHandler(self.payload_path,
+ webhook_listener.handle_request)
+ self._authenticateGithubAPI()
+
+ def onStop(self):
+ self.unregisterHttpHandler(self.payload_path)
+
+ def _createGithubClient(self):
+ if self.git_host != 'github.com':
+ url = 'https://%s/' % self.git_host
+ github = github3.GitHubEnterprise(url)
+ else:
+ github = github3.GitHub()
+
+ # anything going through requests to http/s goes through cache
+ github.session.mount('http://', self.cache_adapter)
+ github.session.mount('https://', self.cache_adapter)
+ return github
+
+ def _authenticateGithubAPI(self):
+ config = self.connection_config
+
+ api_token = config.get('api_token')
+
+ app_id = config.get('app_id')
+ app_key = None
+ app_key_file = config.get('app_key')
+
+ self._github = self._createGithubClient()
+
+ if api_token:
+ self._github.login(token=api_token)
+
+ if app_key_file:
+ try:
+ with open(app_key_file, 'r') as f:
+ app_key = f.read()
+ except IOError:
+ m = "Failed to open app key file for reading: %s"
+ self.log.error(m, app_key_file)
+
+ if (app_id or app_key) and \
+ not (app_id and app_key):
+ self.log.warning("You must provide an app_id and "
+ "app_key to use installation based "
+ "authentication")
+
+ return
+
+ if app_id:
+ self.app_id = int(app_id)
+ if app_key:
+ self.app_key = app_key
+
+ def _get_installation_key(self, project, user_id=None):
+ installation_id = self.installation_map.get(project)
+
+ if not installation_id:
+ self.log.error("No installation ID available for project %s",
+ project)
+ return ''
+
+ now = datetime.datetime.now(utc)
+ token, expiry = self.installation_token_cache.get(installation_id,
+ (None, None))
+
+ if ((not expiry) or (not token) or (now >= expiry)):
+ expiry = now + datetime.timedelta(minutes=5)
+
+ data = {'iat': now, 'exp': expiry, 'iss': self.app_id}
+ app_token = jwt.encode(data,
+ self.app_key,
+ algorithm='RS256')
+
+ url = ACCESS_TOKEN_URL % installation_id
+ headers = {'Accept': PREVIEW_JSON_ACCEPT,
+ 'Authorization': 'Bearer %s' % app_token}
+ json_data = {'user_id': user_id} if user_id else None
+
+ response = requests.post(url, headers=headers, json=json_data)
+ response.raise_for_status()
+
+ data = response.json()
+
+ expiry = iso8601.parse_date(data['expires_at'])
+ expiry -= datetime.timedelta(minutes=2)
+ token = data['token']
+
+ self.installation_token_cache[installation_id] = (token, expiry)
+
+ return token
+
+ def getGithubClient(self,
+ project=None,
+ user_id=None,
+ use_app=True):
+ # if you're authenticating for a project and you're an integration then
+ # you need to use the installation specific token. There are some
+ # operations that are not yet supported by integrations so
+ # use_app lets you use api_key auth.
+ if use_app and project and self.app_id:
+ github = self._createGithubClient()
+ github.login(token=self._get_installation_key(project, user_id))
+ return github
+
+ # if we're using api_key authentication then this is already token
+ # authenticated, if not then anonymous is the best we have.
+ return self._github
+
+ def maintainCache(self, relevant):
+ for key, change in self._change_cache.items():
+ if change not in relevant:
+ del self._change_cache[key]
+
+ def getChange(self, event):
+ """Get the change representing an event."""
+
+ project = self.source.getProject(event.project_name)
+ if event.change_number:
+ change = PullRequest(event.project_name)
+ change.project = project
+ change.number = event.change_number
+ change.refspec = event.refspec
+ change.branch = event.branch
+ change.url = event.change_url
+ change.updated_at = self._ghTimestampToDate(event.updated_at)
+ change.patchset = event.patch_number
+ change.files = self.getPullFileNames(project, change.number)
+ change.title = event.title
+ change.status = self._get_statuses(project, event.patch_number)
+ change.reviews = self.getPullReviews(project, change.number)
+ change.source_event = event
+ change.open = self.getPullOpen(event.project_name, change.number)
+ change.is_current_patchset = self.getIsCurrent(event.project_name,
+ change.number,
+ event.patch_number)
+ elif event.ref:
+ change = Ref(project)
+ change.ref = event.ref
+ change.oldrev = event.oldrev
+ change.newrev = event.newrev
+ change.url = self.getGitwebUrl(project, sha=event.newrev)
+ change.source_event = event
+ else:
+ change = Ref(project)
+ return change
+
+ def getGitUrl(self, project):
+ if self.git_ssh_key:
+ return 'ssh://git@%s/%s.git' % (self.git_host, project)
+
+ if self.app_id:
+ installation_key = self._get_installation_key(project)
+ return 'https://x-access-token:%s@%s/%s' % (installation_key,
+ self.git_host,
+ project)
+
+ return 'https://%s/%s' % (self.git_host, project)
+
+ def getGitwebUrl(self, project, sha=None):
+ url = 'https://%s/%s' % (self.git_host, project)
+ if sha is not None:
+ url += '/commit/%s' % sha
+ return url
+
+ def getProject(self, name):
+ return self.projects.get(name)
+
+ def addProject(self, project):
+ self.projects[project.name] = project
+
+ def getProjectBranches(self, project):
+ github = self.getGithubClient()
+ owner, proj = project.name.split('/')
+ repository = github.repository(owner, proj)
+ branches = [branch.name for branch in repository.branches()]
+ log_rate_limit(self.log, github)
+ return branches
+
+ def getPullUrl(self, project, number):
+ return '%s/pull/%s' % (self.getGitwebUrl(project), number)
+
+ def getPull(self, project_name, number):
+ github = self.getGithubClient(project_name)
+ owner, proj = project_name.split('/')
+ pr = github.pull_request(owner, proj, number).as_dict()
+ log_rate_limit(self.log, github)
+ return pr
+
+ def canMerge(self, change, allow_needs):
+ # This API call may get a false (null) while GitHub is calculating
+ # if it can merge. The github3.py library will just return that as
+ # false. This could lead to false negatives.
+ # Additionally, this only checks if the PR code could merge
+ # cleanly to the target branch. It does not evaluate any branch
+ # protection merge requirements (such as reviews and status states)
+ # At some point in the future this may be available through the API
+ # or we can fetch the branch protection settings and evaluate within
+ # Zuul whether or not those protections have been met
+ # For now, just send back a True value.
+ return True
+
+ def getPullBySha(self, sha):
+ query = '%s type:pr is:open' % sha
+ pulls = []
+ github = self.getGithubClient()
+ for issue in github.search_issues(query=query):
+ pr_url = issue.issue.pull_request().as_dict().get('url')
+ if not pr_url:
+ continue
+ # the issue provides no good description of the project :\
+ owner, project, _, number = pr_url.split('/')[4:]
+ github = self.getGithubClient("%s/%s" % (owner, project))
+ pr = github.pull_request(owner, project, number)
+ if pr.head.sha != sha:
+ continue
+ if pr.as_dict() in pulls:
+ continue
+ pulls.append(pr.as_dict())
+
+ log_rate_limit(self.log, github)
+ if len(pulls) > 1:
+ raise Exception('Multiple pulls found with head sha %s' % sha)
+
+ if len(pulls) == 0:
+ return None
+ return pulls.pop()
+
+ def getPullFileNames(self, project, number):
+ github = self.getGithubClient(project)
+ owner, proj = project.name.split('/')
+ filenames = [f.filename for f in
+ github.pull_request(owner, proj, number).files()]
+ log_rate_limit(self.log, github)
+ return filenames
+
+ def getPullReviews(self, project, number):
+ owner, proj = project.name.split('/')
+
+ revs = self._getPullReviews(owner, proj, number)
+
+ reviews = {}
+ for rev in revs:
+ user = rev.get('user').get('login')
+ review = {
+ 'by': {
+ 'username': user,
+ 'email': rev.get('user').get('email'),
+ },
+ 'grantedOn': int(time.mktime(self._ghTimestampToDate(
+ rev.get('submitted_at')))),
+ }
+
+ review['type'] = rev.get('state').lower()
+ review['submitted_at'] = rev.get('submitted_at')
+
+ # Get user's rights. A user always has read to leave a review
+ review['permission'] = 'read'
+ permission = self.getRepoPermission(project.name, user)
+ if permission == 'write':
+ review['permission'] = 'write'
+ if permission == 'admin':
+ review['permission'] = 'admin'
+
+ if user not in reviews:
+ reviews[user] = review
+ else:
+ # if there are multiple reviews per user, keep the newest
+ # note that this breaks the ability to set the 'older-than'
+ # option on a review requirement.
+ if review['grantedOn'] > reviews[user]['grantedOn']:
+ reviews[user] = review
+
+ return reviews.values()
+
+ def _getPullReviews(self, owner, project, number):
+ # make a list out of the reviews so that we complete our
+ # API transaction
+ # reviews are not yet supported by integrations, use api_key:
+ # https://platform.github.community/t/api-endpoint-for-pr-reviews/409
+ github = self.getGithubClient("%s/%s" % (owner, project),
+ use_app=False)
+ reviews = [review.as_dict() for review in
+ github.pull_request(owner, project, number).reviews()]
+
+ log_rate_limit(self.log, github)
+ return reviews
+
+ def getUser(self, login):
+ return GithubUser(self.getGithubClient(), login)
+
+ def getUserUri(self, login):
+ return 'https://%s/%s' % (self.git_host, login)
+
+ def getRepoPermission(self, project, login):
+ github = self.getGithubClient(project)
+ owner, proj = project.split('/')
+ # This gets around a missing API call
+ # need preview header
+ headers = {'Accept': 'application/vnd.github.korra-preview'}
+
+ # Create a repo object
+ repository = github.repository(owner, project)
+ # Build up a URL
+ url = repository._build_url('collaborators', login, 'permission',
+ base_url=repository._api)
+ # Get the data
+ perms = repository._get(url, headers=headers)
+
+ log_rate_limit(self.log, github)
+
+ # no known user, maybe deleted since review?
+ if perms.status_code == 404:
+ return 'none'
+
+ # get permissions from the data
+ return perms.json()['permission']
+
+ def commentPull(self, project, pr_number, message):
+ github = self.getGithubClient(project)
+ owner, proj = project.split('/')
+ repository = github.repository(owner, proj)
+ pull_request = repository.issue(pr_number)
+ pull_request.create_comment(message)
+ log_rate_limit(self.log, github)
+
+ def mergePull(self, project, pr_number, commit_message='', sha=None):
+ github = self.getGithubClient(project)
+ owner, proj = project.split('/')
+ pull_request = github.pull_request(owner, proj, pr_number)
+ try:
+ result = pull_request.merge(commit_message=commit_message, sha=sha)
+ except MethodNotAllowed as e:
+ raise MergeFailure('Merge was not successful due to mergeability'
+ ' conflict, original error is %s' % e)
+ log_rate_limit(self.log, github)
+ if not result:
+ raise Exception('Pull request was not merged')
+
+ def getCommitStatuses(self, project, sha):
+ github = self.getGithubClient(project)
+ owner, proj = project.split('/')
+ repository = github.repository(owner, proj)
+ commit = repository.commit(sha)
+ # make a list out of the statuses so that we complete our
+ # API transaction
+ statuses = [status.as_dict() for status in commit.statuses()]
+
+ log_rate_limit(self.log, github)
+ return statuses
+
+ def setCommitStatus(self, project, sha, state, url='', description='',
+ context=''):
+ github = self.getGithubClient(project)
+ owner, proj = project.split('/')
+ repository = github.repository(owner, proj)
+ repository.create_status(sha, state, url, description, context)
+ log_rate_limit(self.log, github)
+
+ def labelPull(self, project, pr_number, label):
+ github = self.getGithubClient(project)
+ owner, proj = project.split('/')
+ pull_request = github.issue(owner, proj, pr_number)
+ pull_request.add_labels(label)
+ log_rate_limit(self.log, github)
+
+ def unlabelPull(self, project, pr_number, label):
+ github = self.getGithubClient(project)
+ owner, proj = project.split('/')
+ pull_request = github.issue(owner, proj, pr_number)
+ pull_request.remove_label(label)
+ log_rate_limit(self.log, github)
+
+ def getPullOpen(self, project, number):
+ pr = self.getPull(project, number)
+ return pr.get('state') == 'open'
+
+ def getIsCurrent(self, project, number, sha):
+ pr = self.getPull(project, number)
+ return pr.get('head').get('sha') == sha
+
+ def _ghTimestampToDate(self, timestamp):
+ return time.strptime(timestamp, '%Y-%m-%dT%H:%M:%SZ')
+
+ def _get_statuses(self, project, sha):
+ # A ref can have more than one status from each context,
+ # however the API returns them in order, newest first.
+ # So we can keep track of which contexts we've already seen
+ # and throw out the rest. Our unique key is based on
+ # the user and the context, since context is free form and anybody
+ # can put whatever they want there. We want to ensure we track it
+ # by user, so that we can require/trigger by user too.
+ seen = []
+ statuses = []
+ for status in self.getCommitStatuses(project.name, sha):
+ stuple = _status_as_tuple(status)
+ if "%s:%s" % (stuple[0], stuple[1]) not in seen:
+ statuses.append("%s:%s:%s" % stuple)
+ seen.append("%s:%s" % (stuple[0], stuple[1]))
+
+ return statuses
+
+
+def _status_as_tuple(status):
+ """Translate a status into a tuple of user, context, state"""
+
+ creator = status.get('creator')
+ if not creator:
+ user = "Unknown"
+ else:
+ user = creator.get('login')
+ context = status.get('context')
+ state = status.get('state')
+ return (user, context, state)
+
+
+def log_rate_limit(log, github):
+ try:
+ rate_limit = github.rate_limit()
+ remaining = rate_limit['resources']['core']['remaining']
+ reset = rate_limit['resources']['core']['reset']
+ except:
+ return
+ log.debug('GitHub API rate limit remaining: %s reset: %s' %
+ (remaining, reset))
+
+
+def getSchema():
+ github_connection = v.Any(str, v.Schema({}, extra=True))
+ return github_connection
diff --git a/zuul/driver/github/githubmodel.py b/zuul/driver/github/githubmodel.py
new file mode 100644
index 0000000..9516097
--- /dev/null
+++ b/zuul/driver/github/githubmodel.py
@@ -0,0 +1,327 @@
+# Copyright 2015 Hewlett-Packard Development Company, L.P.
+# Copyright 2017 IBM Corp.
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import re
+import time
+
+from zuul.model import Change, TriggerEvent, EventFilter, RefFilter
+from zuul.driver.util import time_to_seconds
+
+
+EMPTY_GIT_REF = '0' * 40 # git sha of all zeros, used during creates/deletes
+
+
+class PullRequest(Change):
+ def __init__(self, project):
+ super(PullRequest, self).__init__(project)
+ self.updated_at = None
+ self.title = None
+ self.reviews = []
+
+ def isUpdateOf(self, other):
+ if (hasattr(other, 'number') and self.number == other.number and
+ hasattr(other, 'patchset') and self.patchset != other.patchset and
+ hasattr(other, 'updated_at') and
+ self.updated_at > other.updated_at):
+ return True
+ return False
+
+
+class GithubTriggerEvent(TriggerEvent):
+ def __init__(self):
+ super(GithubTriggerEvent, self).__init__()
+ self.title = None
+ self.label = None
+ self.unlabel = None
+
+ def isPatchsetCreated(self):
+ if self.type == 'pull_request':
+ return self.action in ['opened', 'changed']
+ return False
+
+ def isChangeAbandoned(self):
+ if self.type == 'pull_request':
+ return 'closed' == self.action
+ return False
+
+
+class GithubCommonFilter(object):
+ def __init__(self, required_reviews=[], required_statuses=[]):
+ self._required_reviews = copy.deepcopy(required_reviews)
+ self.required_reviews = self._tidy_reviews(required_reviews)
+ self.required_statuses = required_statuses
+
+ def _tidy_reviews(self, reviews):
+ for r in reviews:
+ for k, v in r.items():
+ if k == 'username':
+ r['username'] = re.compile(v)
+ elif k == 'email':
+ r['email'] = re.compile(v)
+ elif k == 'newer-than':
+ r[k] = time_to_seconds(v)
+ elif k == 'older-than':
+ r[k] = time_to_seconds(v)
+ return reviews
+
+ def _match_review_required_review(self, rreview, review):
+ # Check if the required review and review match
+ now = time.time()
+ by = review.get('by', {})
+ for k, v in rreview.items():
+ if k == 'username':
+ if (not v.search(by.get('username', ''))):
+ return False
+ elif k == 'email':
+ if (not v.search(by.get('email', ''))):
+ return False
+ elif k == 'newer-than':
+ t = now - v
+ if (review['grantedOn'] < t):
+ return False
+ elif k == 'older-than':
+ t = now - v
+ if (review['grantedOn'] >= t):
+ return False
+ elif k == 'type':
+ if review['type'] != v:
+ return False
+ elif k == 'permission':
+ # If permission is read, we've matched. You must have read
+ # to provide a review. Write or admin permission is different.
+ if v != 'read':
+ if review['permission'] != v:
+ return False
+ return True
+
+ def matchesReviews(self, change):
+ if self.required_reviews:
+ if not hasattr(change, 'number'):
+ # not a PR, no reviews
+ return False
+ if not change.reviews:
+ # No reviews means no matching
+ return False
+
+ return self.matchesRequiredReviews(change)
+
+ def matchesRequiredReviews(self, change):
+ for rreview in self.required_reviews:
+ matches_review = False
+ for review in change.reviews:
+ if self._match_review_required_review(rreview, review):
+ # Consider matched if any review matches
+ matches_review = True
+ break
+ if not matches_review:
+ return False
+ return True
+
+ def matchesRequiredStatuses(self, change):
+ # statuses are ORed
+ # A PR head can have multiple statuses on it. If the change
+ # statuses and the filter statuses are a null intersection, there
+ # are no matches and we return false
+ if self.required_statuses:
+ if not hasattr(change, 'number'):
+ # not a PR, no status
+ return False
+ if set(change.status).isdisjoint(set(self.required_statuses)):
+ return False
+ return True
+
+
+class GithubEventFilter(EventFilter, GithubCommonFilter):
+ def __init__(self, trigger, types=[], branches=[], refs=[],
+ comments=[], actions=[], labels=[], unlabels=[],
+ states=[], statuses=[], required_statuses=[],
+ ignore_deletes=True):
+
+ EventFilter.__init__(self, trigger)
+
+ GithubCommonFilter.__init__(self, required_statuses=required_statuses)
+
+ self._types = types
+ self._branches = branches
+ self._refs = refs
+ self._comments = comments
+ self.types = [re.compile(x) for x in types]
+ self.branches = [re.compile(x) for x in branches]
+ self.refs = [re.compile(x) for x in refs]
+ self.comments = [re.compile(x) for x in comments]
+ self.actions = actions
+ self.labels = labels
+ self.unlabels = unlabels
+ self.states = states
+ self.statuses = statuses
+ self.required_statuses = required_statuses
+ self.ignore_deletes = ignore_deletes
+
+ def __repr__(self):
+ ret = '<GithubEventFilter'
+
+ if self._types:
+ ret += ' types: %s' % ', '.join(self._types)
+ if self._branches:
+ ret += ' branches: %s' % ', '.join(self._branches)
+ if self._refs:
+ ret += ' refs: %s' % ', '.join(self._refs)
+ if self.ignore_deletes:
+ ret += ' ignore_deletes: %s' % self.ignore_deletes
+ if self._comments:
+ ret += ' comments: %s' % ', '.join(self._comments)
+ if self.actions:
+ ret += ' actions: %s' % ', '.join(self.actions)
+ if self.labels:
+ ret += ' labels: %s' % ', '.join(self.labels)
+ if self.unlabels:
+ ret += ' unlabels: %s' % ', '.join(self.unlabels)
+ if self.states:
+ ret += ' states: %s' % ', '.join(self.states)
+ if self.statuses:
+ ret += ' statuses: %s' % ', '.join(self.statuses)
+ if self.required_statuses:
+ ret += ' required_statuses: %s' % ', '.join(self.required_statuses)
+ ret += '>'
+
+ return ret
+
+ def matches(self, event, change):
+ # event types are ORed
+ matches_type = False
+ for etype in self.types:
+ if etype.match(event.type):
+ matches_type = True
+ if self.types and not matches_type:
+ return False
+
+ # branches are ORed
+ matches_branch = False
+ for branch in self.branches:
+ if branch.match(event.branch):
+ matches_branch = True
+ if self.branches and not matches_branch:
+ return False
+
+ # refs are ORed
+ matches_ref = False
+ if event.ref is not None:
+ for ref in self.refs:
+ if ref.match(event.ref):
+ matches_ref = True
+ if self.refs and not matches_ref:
+ return False
+ if self.ignore_deletes and event.newrev == EMPTY_GIT_REF:
+ # If the updated ref has an empty git sha (all 0s),
+ # then the ref is being deleted
+ return False
+
+ # comments are ORed
+ matches_comment_re = False
+ for comment_re in self.comments:
+ if (event.comment is not None and
+ comment_re.search(event.comment)):
+ matches_comment_re = True
+ if self.comments and not matches_comment_re:
+ return False
+
+ # actions are ORed
+ matches_action = False
+ for action in self.actions:
+ if (event.action == action):
+ matches_action = True
+ if self.actions and not matches_action:
+ return False
+
+ # labels are ORed
+ if self.labels and event.label not in self.labels:
+ return False
+
+ # unlabels are ORed
+ if self.unlabels and event.unlabel not in self.unlabels:
+ return False
+
+ # states are ORed
+ if self.states and event.state not in self.states:
+ return False
+
+ # statuses are ORed
+ if self.statuses and event.status not in self.statuses:
+ return False
+
+ if not self.matchesRequiredStatuses(change):
+ return False
+
+ return True
+
+
+class GithubRefFilter(RefFilter, GithubCommonFilter):
+ def __init__(self, connection_name, statuses=[], required_reviews=[],
+ open=None, current_patchset=None):
+ RefFilter.__init__(self, connection_name)
+
+ GithubCommonFilter.__init__(self, required_reviews=required_reviews,
+ required_statuses=statuses)
+ self.statuses = statuses
+ self.open = open
+ self.current_patchset = current_patchset
+
+ def __repr__(self):
+ ret = '<GithubRefFilter'
+
+ ret += ' connection_name: %s' % self.connection_name
+ if self.statuses:
+ ret += ' statuses: %s' % ', '.join(self.statuses)
+ if self.required_reviews:
+ ret += (' required-reviews: %s' %
+ str(self.required_reviews))
+ if self.open:
+ ret += ' open: %s' % self.open
+ if self.current_patchset:
+ ret += ' current-patchset: %s' % self.current_patchset
+
+ ret += '>'
+
+ return ret
+
+ def matches(self, change):
+ if not self.matchesRequiredStatuses(change):
+ return False
+
+ if self.open is not None:
+ # if a "change" has no number, it's not a change, but a push
+ # and cannot possibly pass this test.
+ if hasattr(change, 'number'):
+ if self.open != change.open:
+ return False
+ else:
+ return False
+
+ if self.current_patchset is not None:
+ # if a "change" has no number, it's not a change, but a push
+ # and cannot possibly pass this test.
+ if hasattr(change, 'number'):
+ if self.current_patchset != change.is_current_patchset:
+ return False
+ else:
+ return False
+
+ # required reviews are ANDed
+ if not self.matchesReviews(change):
+ return False
+
+ return True
diff --git a/zuul/driver/github/githubreporter.py b/zuul/driver/github/githubreporter.py
new file mode 100644
index 0000000..fc3b64d
--- /dev/null
+++ b/zuul/driver/github/githubreporter.py
@@ -0,0 +1,168 @@
+# Copyright 2015 Puppet Labs
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import voluptuous as v
+import time
+
+from zuul.reporter import BaseReporter
+from zuul.exceptions import MergeFailure
+from zuul.driver.util import scalar_or_list
+
+
+class GithubReporter(BaseReporter):
+ """Sends off reports to Github."""
+
+ name = 'github'
+ log = logging.getLogger("zuul.GithubReporter")
+
+ def __init__(self, driver, connection, config=None):
+ super(GithubReporter, self).__init__(driver, connection, config)
+ self._commit_status = self.config.get('status', None)
+ self._create_comment = self.config.get('comment', True)
+ self._merge = self.config.get('merge', False)
+ self._labels = self.config.get('label', [])
+ if not isinstance(self._labels, list):
+ self._labels = [self._labels]
+ self._unlabels = self.config.get('unlabel', [])
+ if not isinstance(self._unlabels, list):
+ self._unlabels = [self._unlabels]
+
+ def report(self, pipeline, item):
+ """Comment on PR and set commit status."""
+ if self._create_comment:
+ self.addPullComment(pipeline, item)
+ if (self._commit_status is not None and
+ hasattr(item.change, 'patchset') and
+ item.change.patchset is not None):
+ self.setPullStatus(pipeline, item)
+ if (self._merge and
+ hasattr(item.change, 'number')):
+ self.mergePull(item)
+ if not item.change.is_merged:
+ msg = self._formatItemReportMergeFailure(pipeline, item)
+ self.addPullComment(pipeline, item, msg)
+ if self._labels or self._unlabels:
+ self.setLabels(item)
+
+ def addPullComment(self, pipeline, item, comment=None):
+ message = comment or self._formatItemReport(pipeline, item)
+ project = item.change.project.name
+ pr_number = item.change.number
+ self.log.debug(
+ 'Reporting change %s, params %s, message: %s' %
+ (item.change, self.config, message))
+ self.connection.commentPull(project, pr_number, message)
+
+ def setPullStatus(self, pipeline, item):
+ project = item.change.project.name
+ sha = item.change.patchset
+ context = '%s/%s' % (pipeline.layout.tenant.name, pipeline.name)
+ state = self._commit_status
+
+ url_pattern = self.config.get('status-url')
+ if not url_pattern:
+ sched_config = self.connection.sched.config
+ if sched_config.has_option('zuul', 'status_url'):
+ url_pattern = sched_config.get('zuul', 'status_url')
+ url = item.formatUrlPattern(url_pattern) if url_pattern else ''
+
+ description = ''
+ if pipeline.description:
+ description = pipeline.description
+
+ self.log.debug(
+ 'Reporting change %s, params %s, status:\n'
+ 'context: %s, state: %s, description: %s, url: %s' %
+ (item.change, self.config, context, state,
+ description, url))
+
+ self.connection.setCommitStatus(
+ project, sha, state, url, description, context)
+
+ def mergePull(self, item):
+ project = item.change.project.name
+ pr_number = item.change.number
+ sha = item.change.patchset
+ self.log.debug('Reporting change %s, params %s, merging via API' %
+ (item.change, self.config))
+ message = self._formatMergeMessage(item.change)
+
+ for i in [1, 2]:
+ try:
+ self.connection.mergePull(project, pr_number, message, sha)
+ item.change.is_merged = True
+ return
+ except MergeFailure:
+ self.log.exception(
+ 'Merge attempt of change %s %s/2 failed.' %
+ (item.change, i), exc_info=True)
+ if i == 1:
+ time.sleep(2)
+ self.log.warning(
+ 'Merge of change %s failed after 2 attempts, giving up' %
+ item.change)
+
+ def setLabels(self, item):
+ project = item.change.project.name
+ pr_number = item.change.number
+ if self._labels:
+ self.log.debug('Reporting change %s, params %s, labels:\n%s' %
+ (item.change, self.config, self._labels))
+ for label in self._labels:
+ self.connection.labelPull(project, pr_number, label)
+ if self._unlabels:
+ self.log.debug('Reporting change %s, params %s, unlabels:\n%s' %
+ (item.change, self.config, self._unlabels))
+ for label in self._unlabels:
+ self.connection.unlabelPull(project, pr_number, label)
+
+ def _formatMergeMessage(self, change):
+ message = ''
+
+ if change.title:
+ message += change.title
+
+ account = change.source_event.account
+ if not account:
+ return message
+
+ username = account['username']
+ name = account['name']
+ email = account['email']
+ message += '\n\nReviewed-by: '
+
+ if name:
+ message += name
+ if email:
+ if name:
+ message += ' '
+ message += '<' + email + '>'
+ if name or email:
+ message += '\n '
+ message += self.connection.getUserUri(username)
+
+ return message
+
+
+def getSchema():
+ github_reporter = v.Schema({
+ 'status': v.Any('pending', 'success', 'failure'),
+ 'status-url': str,
+ 'comment': bool,
+ 'merge': bool,
+ 'label': scalar_or_list(str),
+ 'unlabel': scalar_or_list(str)
+ })
+ return github_reporter
diff --git a/zuul/driver/github/githubsource.py b/zuul/driver/github/githubsource.py
new file mode 100644
index 0000000..1350b10
--- /dev/null
+++ b/zuul/driver/github/githubsource.py
@@ -0,0 +1,130 @@
+# Copyright 2014 Puppet Labs Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import time
+import voluptuous as v
+
+from zuul.source import BaseSource
+from zuul.model import Project
+from zuul.driver.github.githubmodel import GithubRefFilter
+from zuul.driver.util import scalar_or_list, to_list
+
+
+class GithubSource(BaseSource):
+ name = 'github'
+ log = logging.getLogger("zuul.source.GithubSource")
+
+ def __init__(self, driver, connection, config=None):
+ hostname = connection.canonical_hostname
+ super(GithubSource, self).__init__(driver, connection,
+ hostname, config)
+
+ def getRefSha(self, project, ref):
+ """Return a sha for a given project ref."""
+ raise NotImplementedError()
+
+ def waitForRefSha(self, project, ref, old_sha=''):
+ """Block until a ref shows up in a given project."""
+ raise NotImplementedError()
+
+ def isMerged(self, change, head=None):
+ """Determine if change is merged."""
+ if not change.number:
+ # Not a pull request, considering merged.
+ return True
+ return change.is_merged
+
+ def canMerge(self, change, allow_needs):
+ """Determine if change can merge."""
+
+ if not change.number:
+ # Not a pull request, considering merged.
+ return True
+ return self.connection.canMerge(change, allow_needs)
+
+ def postConfig(self):
+ """Called after configuration has been processed."""
+ pass
+
+ def getChange(self, event):
+ return self.connection.getChange(event)
+
+ def getProject(self, name):
+ p = self.connection.getProject(name)
+ if not p:
+ p = Project(name, self)
+ self.connection.addProject(p)
+ return p
+
+ def getProjectBranches(self, project):
+ return self.connection.getProjectBranches(project)
+
+ def getProjectOpenChanges(self, project):
+ """Get the open changes for a project."""
+ raise NotImplementedError()
+
+ def updateChange(self, change, history=None):
+ """Update information for a change."""
+ raise NotImplementedError()
+
+ def getGitUrl(self, project):
+ """Get the git url for a project."""
+ return self.connection.getGitUrl(project)
+
+ def getGitwebUrl(self, project, sha=None):
+ """Get the git-web url for a project."""
+ return self.connection.getGitwebUrl(project, sha)
+
+ def getPullFiles(self, project, number):
+ """Get filenames of the pull request"""
+ return self.connection.getPullFileNames(project, number)
+
+ def _ghTimestampToDate(self, timestamp):
+ return time.strptime(timestamp, '%Y-%m-%dT%H:%M:%SZ')
+
+ def getRequireFilters(self, config):
+ f = GithubRefFilter(
+ connection_name=self.connection.connection_name,
+ statuses=to_list(config.get('status')),
+ required_reviews=to_list(config.get('review')),
+ open=config.get('open'),
+ current_patchset=config.get('current-patchset'),
+ )
+ return [f]
+
+ def getRejectFilters(self, config):
+ return []
+
+
+review = v.Schema({'username': str,
+ 'email': str,
+ 'older-than': str,
+ 'newer-than': str,
+ 'type': str,
+ 'permission': v.Any('read', 'write', 'admin'),
+ })
+
+
+def getRequireSchema():
+ require = {'status': scalar_or_list(str),
+ 'review': scalar_or_list(review),
+ 'open': bool,
+ 'current-patchset': bool}
+ return require
+
+
+def getRejectSchema():
+ reject = {'review': scalar_or_list(review)}
+ return reject
diff --git a/zuul/driver/github/githubtrigger.py b/zuul/driver/github/githubtrigger.py
new file mode 100644
index 0000000..328879d
--- /dev/null
+++ b/zuul/driver/github/githubtrigger.py
@@ -0,0 +1,67 @@
+# Copyright 2015 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import voluptuous as v
+from zuul.trigger import BaseTrigger
+from zuul.driver.github.githubmodel import GithubEventFilter
+from zuul.driver.util import scalar_or_list, to_list
+
+
+class GithubTrigger(BaseTrigger):
+ name = 'github'
+ log = logging.getLogger("zuul.trigger.GithubTrigger")
+
+ def getEventFilters(self, trigger_config):
+ efilters = []
+ for trigger in to_list(trigger_config):
+ f = GithubEventFilter(
+ trigger=self,
+ types=to_list(trigger['event']),
+ actions=to_list(trigger.get('action')),
+ branches=to_list(trigger.get('branch')),
+ refs=to_list(trigger.get('ref')),
+ comments=to_list(trigger.get('comment')),
+ labels=to_list(trigger.get('label')),
+ unlabels=to_list(trigger.get('unlabel')),
+ states=to_list(trigger.get('state')),
+ statuses=to_list(trigger.get('status')),
+ required_statuses=to_list(trigger.get('require-status'))
+ )
+ efilters.append(f)
+
+ return efilters
+
+ def onPullRequest(self, payload):
+ pass
+
+
+def getSchema():
+ github_trigger = {
+ v.Required('event'):
+ scalar_or_list(v.Any('pull_request',
+ 'pull_request_review',
+ 'push')),
+ 'action': scalar_or_list(str),
+ 'branch': scalar_or_list(str),
+ 'ref': scalar_or_list(str),
+ 'comment': scalar_or_list(str),
+ 'label': scalar_or_list(str),
+ 'unlabel': scalar_or_list(str),
+ 'state': scalar_or_list(str),
+ 'require-status': scalar_or_list(str),
+ 'status': scalar_or_list(str)
+ }
+
+ return github_trigger
diff --git a/zuul/driver/nullwrap/__init__.py b/zuul/driver/nullwrap/__init__.py
new file mode 100644
index 0000000..ebcd1da
--- /dev/null
+++ b/zuul/driver/nullwrap/__init__.py
@@ -0,0 +1,28 @@
+# Copyright 2012 Hewlett-Packard Development Company, L.P.
+# Copyright 2013 OpenStack Foundation
+# Copyright 2016 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import subprocess
+
+from zuul.driver import (Driver, WrapperInterface)
+
+
+class NullwrapDriver(Driver, WrapperInterface):
+ name = 'nullwrap'
+ log = logging.getLogger("zuul.NullwrapDriver")
+
+ def getPopen(self, **kwargs):
+ return subprocess.Popen
diff --git a/zuul/driver/smtp/__init__.py b/zuul/driver/smtp/__init__.py
index 0745644..b914c81 100644
--- a/zuul/driver/smtp/__init__.py
+++ b/zuul/driver/smtp/__init__.py
@@ -13,8 +13,8 @@
# under the License.
from zuul.driver import Driver, ConnectionInterface, ReporterInterface
-import smtpconnection
-import smtpreporter
+from zuul.driver.smtp import smtpconnection
+from zuul.driver.smtp import smtpreporter
class SMTPDriver(Driver, ConnectionInterface, ReporterInterface):
diff --git a/zuul/driver/smtp/smtpreporter.py b/zuul/driver/smtp/smtpreporter.py
index dd618ef..35eb69f 100644
--- a/zuul/driver/smtp/smtpreporter.py
+++ b/zuul/driver/smtp/smtpreporter.py
@@ -24,7 +24,7 @@
name = 'smtp'
log = logging.getLogger("zuul.SMTPReporter")
- def report(self, source, pipeline, item):
+ def report(self, pipeline, item):
"""Send the compiled report message via smtp."""
message = self._formatItemReport(pipeline, item)
diff --git a/zuul/driver/sql/__init__.py b/zuul/driver/sql/__init__.py
index a5f8923..3748e47 100644
--- a/zuul/driver/sql/__init__.py
+++ b/zuul/driver/sql/__init__.py
@@ -13,8 +13,8 @@
# under the License.
from zuul.driver import Driver, ConnectionInterface, ReporterInterface
-import sqlconnection
-import sqlreporter
+from zuul.driver.sql import sqlconnection
+from zuul.driver.sql import sqlreporter
class SQLDriver(Driver, ConnectionInterface, ReporterInterface):
diff --git a/zuul/driver/sql/alembic_reporter/env.py b/zuul/driver/sql/alembic_reporter/env.py
index 56a5b7e..4542a22 100644
--- a/zuul/driver/sql/alembic_reporter/env.py
+++ b/zuul/driver/sql/alembic_reporter/env.py
@@ -64,6 +64,7 @@
with context.begin_transaction():
context.run_migrations()
+
if context.is_offline_mode():
run_migrations_offline()
else:
diff --git a/zuul/driver/sql/sqlconnection.py b/zuul/driver/sql/sqlconnection.py
index 4b1b1a2..e478d33 100644
--- a/zuul/driver/sql/sqlconnection.py
+++ b/zuul/driver/sql/sqlconnection.py
@@ -43,6 +43,8 @@
self.engine = sa.create_engine(self.dburi)
self._migrate()
self._setup_tables()
+ self.zuul_buildset_table, self.zuul_build_table \
+ = self._setup_tables()
self.tables_established = True
except sa.exc.NoSuchModuleError:
self.log.exception(
@@ -68,10 +70,11 @@
alembic.command.upgrade(config, 'head')
- def _setup_tables(self):
+ @staticmethod
+ def _setup_tables():
metadata = sa.MetaData()
- self.zuul_buildset_table = sa.Table(
+ zuul_buildset_table = sa.Table(
BUILDSET_TABLE, metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('zuul_ref', sa.String(255)),
@@ -84,7 +87,7 @@
sa.Column('message', sa.TEXT()),
)
- self.zuul_build_table = sa.Table(
+ zuul_build_table = sa.Table(
BUILD_TABLE, metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('buildset_id', sa.Integer,
@@ -99,6 +102,8 @@
sa.Column('node_name', sa.String(255)),
)
+ return zuul_buildset_table, zuul_build_table
+
def getSchema():
sql_connection = v.Any(str, v.Schema(dict))
diff --git a/zuul/driver/sql/sqlreporter.py b/zuul/driver/sql/sqlreporter.py
index d6e547d..349abe8 100644
--- a/zuul/driver/sql/sqlreporter.py
+++ b/zuul/driver/sql/sqlreporter.py
@@ -31,7 +31,7 @@
# TODO(jeblair): document this is stored as NULL if unspecified
self.result_score = config.get('score', None)
- def report(self, source, pipeline, item):
+ def report(self, pipeline, item):
"""Create an entry into a database."""
if not self.connection.tables_established:
@@ -39,13 +39,16 @@
return
with self.connection.engine.begin() as conn:
+ change = getattr(item.change, 'number', '')
+ patchset = getattr(item.change, 'patchset', '')
+ refspec = getattr(item.change, 'refspec', item.change.newrev)
buildset_ins = self.connection.zuul_buildset_table.insert().values(
zuul_ref=item.current_build_set.ref,
pipeline=item.pipeline.name,
project=item.change.project.name,
- change=item.change.number,
- patchset=item.change.patchset,
- ref=item.change.refspec,
+ change=change,
+ patchset=patchset,
+ ref=refspec,
score=self.result_score,
message=self._formatItemReport(
pipeline, item, with_jobs=False),
diff --git a/zuul/driver/timer/__init__.py b/zuul/driver/timer/__init__.py
index 3ce0b8d..cdaea74 100644
--- a/zuul/driver/timer/__init__.py
+++ b/zuul/driver/timer/__init__.py
@@ -20,8 +20,8 @@
from apscheduler.triggers.cron import CronTrigger
from zuul.driver import Driver, TriggerInterface
-from zuul.model import TriggerEvent
-import timertrigger
+from zuul.driver.timer import timertrigger
+from zuul.driver.timer.timermodel import TimerTriggerEvent
class TimerDriver(Driver, TriggerInterface):
@@ -38,6 +38,10 @@
def reconfigure(self, tenant):
self._removeJobs(tenant)
+ if not self.apsched:
+ # Handle possible reuse of the driver without connection objects.
+ self.apsched = BackgroundScheduler()
+ self.apsched.start()
self._addJobs(tenant)
def _removeJobs(self, tenant):
@@ -76,16 +80,20 @@
def _onTrigger(self, tenant, pipeline_name, timespec):
for project_name in tenant.layout.project_configs.keys():
- event = TriggerEvent()
+ project_hostname, project_name = project_name.split('/', 1)
+ event = TimerTriggerEvent()
event.type = 'timer'
event.timespec = timespec
event.forced_pipeline = pipeline_name
+ event.project_hostname = project_hostname
event.project_name = project_name
self.log.debug("Adding event %s" % event)
self.sched.addEvent(event)
def stop(self):
- self.apsched.shutdown()
+ if self.apsched:
+ self.apsched.shutdown()
+ self.apsched = None
def getTrigger(self, connection_name, config=None):
return timertrigger.TimerTrigger(self, config)
diff --git a/zuul/driver/timer/timermodel.py b/zuul/driver/timer/timermodel.py
new file mode 100644
index 0000000..d6f1415
--- /dev/null
+++ b/zuul/driver/timer/timermodel.py
@@ -0,0 +1,62 @@
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import re
+
+from zuul.model import EventFilter, TriggerEvent
+
+
+class TimerEventFilter(EventFilter):
+ def __init__(self, trigger, types=[], timespecs=[]):
+ EventFilter.__init__(self, trigger)
+
+ self._types = types
+ self.types = [re.compile(x) for x in types]
+ self.timespecs = timespecs
+
+ def __repr__(self):
+ ret = '<TimerEventFilter'
+
+ if self._types:
+ ret += ' types: %s' % ', '.join(self._types)
+ if self.timespecs:
+ ret += ' timespecs: %s' % ', '.join(self.timespecs)
+ ret += '>'
+
+ return ret
+
+ def matches(self, event, change):
+ # event types are ORed
+ matches_type = False
+ for etype in self.types:
+ if etype.match(event.type):
+ matches_type = True
+ if self.types and not matches_type:
+ return False
+
+ # timespecs are ORed
+ matches_timespec = False
+ for timespec in self.timespecs:
+ if (event.timespec == timespec):
+ matches_timespec = True
+ if self.timespecs and not matches_timespec:
+ return False
+
+ return True
+
+
+class TimerTriggerEvent(TriggerEvent):
+ def __init__(self):
+ super(TimerTriggerEvent, self).__init__()
+ self.timespec = None
diff --git a/zuul/driver/timer/timertrigger.py b/zuul/driver/timer/timertrigger.py
index b0f282c..81b41a1 100644
--- a/zuul/driver/timer/timertrigger.py
+++ b/zuul/driver/timer/timertrigger.py
@@ -15,26 +15,20 @@
import voluptuous as v
-from zuul.model import EventFilter
from zuul.trigger import BaseTrigger
+from zuul.driver.timer.timermodel import TimerEventFilter
+from zuul.driver.util import to_list
class TimerTrigger(BaseTrigger):
name = 'timer'
def getEventFilters(self, trigger_conf):
- def toList(item):
- if not item:
- return []
- if isinstance(item, list):
- return item
- return [item]
-
efilters = []
- for trigger in toList(trigger_conf):
- f = EventFilter(trigger=self,
- types=['timer'],
- timespecs=toList(trigger['time']))
+ for trigger in to_list(trigger_conf):
+ f = TimerEventFilter(trigger=self,
+ types=['timer'],
+ timespecs=to_list(trigger['time']))
efilters.append(f)
diff --git a/zuul/driver/util.py b/zuul/driver/util.py
new file mode 100644
index 0000000..902ce76
--- /dev/null
+++ b/zuul/driver/util.py
@@ -0,0 +1,43 @@
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# Utility methods to promote consistent configuration among drivers.
+
+import voluptuous as vs
+
+
+def time_to_seconds(s):
+ if s.endswith('s'):
+ return int(s[:-1])
+ if s.endswith('m'):
+ return int(s[:-1]) * 60
+ if s.endswith('h'):
+ return int(s[:-1]) * 60 * 60
+ if s.endswith('d'):
+ return int(s[:-1]) * 24 * 60 * 60
+ if s.endswith('w'):
+ return int(s[:-1]) * 7 * 24 * 60 * 60
+ raise Exception("Unable to parse time value: %s" % s)
+
+
+def scalar_or_list(x):
+ return vs.Any([x], x)
+
+
+def to_list(item):
+ if not item:
+ return []
+ if isinstance(item, list):
+ return item
+ return [item]
diff --git a/zuul/driver/zuul/__init__.py b/zuul/driver/zuul/__init__.py
index 47ccec0..08612dc 100644
--- a/zuul/driver/zuul/__init__.py
+++ b/zuul/driver/zuul/__init__.py
@@ -15,9 +15,9 @@
import logging
from zuul.driver import Driver, TriggerInterface
-from zuul.model import TriggerEvent
+from zuul.driver.zuul.zuulmodel import ZuulTriggerEvent
-import zuultrigger
+from zuul.driver.zuul import zuultrigger
PARENT_CHANGE_ENQUEUED = 'parent-change-enqueued'
PROJECT_CHANGE_MERGED = 'project-change-merged'
@@ -73,9 +73,10 @@
self._createProjectChangeMergedEvent(open_change)
def _createProjectChangeMergedEvent(self, change):
- event = TriggerEvent()
+ event = ZuulTriggerEvent()
event.type = PROJECT_CHANGE_MERGED
event.trigger_name = self.name
+ event.project_hostname = change.project.canonical_hostname
event.project_name = change.project.name
event.change_number = change.number
event.branch = change.branch
@@ -93,10 +94,11 @@
self._createParentChangeEnqueuedEvent(needs, pipeline)
def _createParentChangeEnqueuedEvent(self, change, pipeline):
- event = TriggerEvent()
+ event = ZuulTriggerEvent()
event.type = PARENT_CHANGE_ENQUEUED
event.trigger_name = self.name
event.pipeline_name = pipeline.name
+ event.project_hostname = change.project.canonical_hostname
event.project_name = change.project.name
event.change_number = change.number
event.branch = change.branch
diff --git a/zuul/driver/zuul/zuulmodel.py b/zuul/driver/zuul/zuulmodel.py
new file mode 100644
index 0000000..036f6d2
--- /dev/null
+++ b/zuul/driver/zuul/zuulmodel.py
@@ -0,0 +1,63 @@
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import re
+
+from zuul.model import EventFilter, TriggerEvent
+
+
+class ZuulEventFilter(EventFilter):
+ def __init__(self, trigger, types=[], pipelines=[]):
+ EventFilter.__init__(self, trigger)
+
+ self._types = types
+ self._pipelines = pipelines
+ self.types = [re.compile(x) for x in types]
+ self.pipelines = [re.compile(x) for x in pipelines]
+
+ def __repr__(self):
+ ret = '<ZuulEventFilter'
+
+ if self._types:
+ ret += ' types: %s' % ', '.join(self._types)
+ if self._pipelines:
+ ret += ' pipelines: %s' % ', '.join(self._pipelines)
+ ret += '>'
+
+ return ret
+
+ def matches(self, event, change):
+ # event types are ORed
+ matches_type = False
+ for etype in self.types:
+ if etype.match(event.type):
+ matches_type = True
+ if self.types and not matches_type:
+ return False
+
+ # pipelines are ORed
+ matches_pipeline = False
+ for epipe in self.pipelines:
+ if epipe.match(event.pipeline_name):
+ matches_pipeline = True
+ if self.pipelines and not matches_pipeline:
+ return False
+
+ return True
+
+
+class ZuulTriggerEvent(TriggerEvent):
+ def __init__(self):
+ super(ZuulTriggerEvent, self).__init__()
+ self.pipeline_name = None
diff --git a/zuul/driver/zuul/zuultrigger.py b/zuul/driver/zuul/zuultrigger.py
index c0c2fb3..628687e 100644
--- a/zuul/driver/zuul/zuultrigger.py
+++ b/zuul/driver/zuul/zuultrigger.py
@@ -15,8 +15,9 @@
import logging
import voluptuous as v
-from zuul.model import EventFilter
from zuul.trigger import BaseTrigger
+from zuul.driver.zuul.zuulmodel import ZuulEventFilter
+from zuul.driver.util import scalar_or_list, to_list
class ZuulTrigger(BaseTrigger):
@@ -29,25 +30,12 @@
self._handle_project_change_merged_events = False
def getEventFilters(self, trigger_conf):
- def toList(item):
- if not item:
- return []
- if isinstance(item, list):
- return item
- return [item]
-
efilters = []
- for trigger in toList(trigger_conf):
- f = EventFilter(
+ for trigger in to_list(trigger_conf):
+ f = ZuulEventFilter(
trigger=self,
- types=toList(trigger['event']),
- pipelines=toList(trigger.get('pipeline')),
- required_approvals=(
- toList(trigger.get('require-approval'))
- ),
- reject_approvals=toList(
- trigger.get('reject-approval')
- ),
+ types=to_list(trigger['event']),
+ pipelines=to_list(trigger.get('pipeline')),
)
efilters.append(f)
@@ -55,9 +43,6 @@
def getSchema():
- def toList(x):
- return v.Any([x], x)
-
approval = v.Schema({'username': str,
'email-filter': str,
'email': str,
@@ -67,11 +52,11 @@
zuul_trigger = {
v.Required('event'):
- toList(v.Any('parent-change-enqueued',
- 'project-change-merged')),
- 'pipeline': toList(str),
- 'require-approval': toList(approval),
- 'reject-approval': toList(approval),
+ scalar_or_list(v.Any('parent-change-enqueued',
+ 'project-change-merged')),
+ 'pipeline': scalar_or_list(str),
+ 'require-approval': scalar_or_list(approval),
+ 'reject-approval': scalar_or_list(approval),
}
return zuul_trigger
diff --git a/zuul/executor/ansiblelaunchserver.py b/zuul/executor/ansiblelaunchserver.py
index 0202bdd..18762b2 100644
--- a/zuul/executor/ansiblelaunchserver.py
+++ b/zuul/executor/ansiblelaunchserver.py
@@ -59,7 +59,7 @@
return bool(x)
-class LaunchGearWorker(gear.Worker):
+class LaunchGearWorker(gear.TextWorker):
def __init__(self, *args, **kw):
self.__launch_server = kw.pop('launch_server')
super(LaunchGearWorker, self).__init__(*args, **kw)
@@ -71,7 +71,7 @@
return super(LaunchGearWorker, self).handleNoop(packet)
-class NodeGearWorker(gear.Worker):
+class NodeGearWorker(gear.TextWorker):
MASS_DO = 101
def sendMassDo(self, functions):
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index 461af0b..cf8d973 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -24,45 +24,6 @@
from zuul.model import Build
-def make_merger_item(item):
- # Create a dictionary with all info about the item needed by
- # the merger.
- number = None
- patchset = None
- oldrev = None
- newrev = None
- refspec = None
- if hasattr(item.change, 'number'):
- number = item.change.number
- patchset = item.change.patchset
- refspec = item.change.refspec
- branch = item.change.branch
- elif hasattr(item.change, 'newrev'):
- oldrev = item.change.oldrev
- newrev = item.change.newrev
- branch = item.change.ref
- else:
- oldrev = None
- newrev = None
- branch = None
- connection_name = item.pipeline.source.connection.connection_name
- project = item.change.project.name
-
- return dict(project=project,
- url=item.pipeline.source.getGitUrl(
- item.change.project),
- connection_name=connection_name,
- merge_mode=item.current_build_set.getMergeMode(project),
- refspec=refspec,
- branch=branch,
- ref=item.current_build_set.ref,
- number=number,
- patchset=patchset,
- oldrev=oldrev,
- newrev=newrev,
- )
-
-
class GearmanCleanup(threading.Thread):
""" A thread that checks to see if outstanding builds have
completed without reporting back. """
@@ -208,7 +169,9 @@
self.log.debug("Function %s is not registered" % name)
return False
- def execute(self, job, item, pipeline, dependent_items=[]):
+ def execute(self, job, item, pipeline, dependent_items=[],
+ merger_items=[]):
+ tenant = pipeline.layout.tenant
uuid = str(uuid4().hex)
self.log.info(
"Execute job %s (uuid: %s) on nodes %s for change %s "
@@ -217,14 +180,22 @@
item.current_build_set.getJobNodeSet(job.name),
item.change,
[x.change for x in dependent_items]))
+
dependent_items = dependent_items[:]
dependent_items.reverse()
+ all_items = dependent_items + [item]
+
# TODOv3(jeblair): This ansible vars data structure will
# replace the environment variables below.
+ project = dict(
+ name=item.change.project.name,
+ canonical_hostname=item.change.project.canonical_hostname,
+ canonical_name=item.change.project.canonical_name)
+
zuul_params = dict(uuid=uuid,
pipeline=pipeline.name,
job=job.name,
- project=item.change.project.name,
+ project=project,
tags=' '.join(sorted(job.tags)))
if hasattr(item.change, 'branch'):
@@ -243,7 +214,7 @@
changes_str = '^'.join(
['%s:%s:%s' % (i.change.project.name, i.change.branch,
i.change.refspec)
- for i in dependent_items + [item]])
+ for i in all_items])
params['ZUUL_BRANCH'] = item.change.branch
params['ZUUL_CHANGES'] = changes_str
params['ZUUL_REF'] = ('refs/zuul/%s/%s' %
@@ -253,7 +224,7 @@
zuul_changes = ' '.join(['%s,%s' % (i.change.number,
i.change.patchset)
- for i in dependent_items + [item]])
+ for i in all_items])
params['ZUUL_CHANGE_IDS'] = zuul_changes
params['ZUUL_CHANGE'] = str(item.change.number)
params['ZUUL_PATCHSET'] = str(item.change.patchset)
@@ -286,13 +257,16 @@
# ZUUL_OLDREV
# ZUUL_NEWREV
- all_items = dependent_items + [item]
- merger_items = map(make_merger_item, all_items)
-
params['job'] = job.name
params['timeout'] = job.timeout
params['items'] = merger_items
params['projects'] = []
+ if hasattr(item.change, 'branch'):
+ params['branch'] = item.change.branch
+ else:
+ params['branch'] = None
+ params['override_branch'] = job.override_branch
+ params['repo_state'] = item.current_build_set.repo_state
if job.name != 'noop':
params['playbooks'] = [x.toDict() for x in job.run]
@@ -300,8 +274,9 @@
params['post_playbooks'] = [x.toDict() for x in job.post_run]
params['roles'] = [x.toDict() for x in job.roles]
+ nodeset = item.current_build_set.getJobNodeSet(job.name)
nodes = []
- for node in item.current_build_set.getJobNodeSet(job.name).getNodes():
+ for node in nodeset.getNodes():
nodes.append(dict(name=node.name, image=node.image,
az=node.az,
host_keys=node.host_keys,
@@ -311,26 +286,44 @@
public_ipv6=node.public_ipv6,
public_ipv4=node.public_ipv4))
params['nodes'] = nodes
+ params['groups'] = [group.toDict() for group in nodeset.getGroups()]
params['vars'] = copy.deepcopy(job.variables)
if job.auth:
for secret in job.auth.secrets:
params['vars'][secret.name] = copy.deepcopy(secret.secret_data)
params['vars']['zuul'] = zuul_params
projects = set()
- if job.repos:
- for repo in job.repos:
- project = item.pipeline.source.getProject(repo)
+
+ def make_project_dict(project, override_branch=None):
+ project_config = item.current_build_set.layout.project_configs.get(
+ project.canonical_name, None)
+ if project_config:
+ project_default_branch = project_config.default_branch
+ else:
+ project_default_branch = 'master'
+ connection = project.source.connection
+ return dict(connection=connection.connection_name,
+ name=project.name,
+ canonical_name=project.canonical_name,
+ override_branch=override_branch,
+ default_branch=project_default_branch)
+
+ if job.required_projects:
+ for job_project in job.required_projects.values():
+ (trusted, project) = tenant.getProject(
+ job_project.project_name)
+ if project is None:
+ raise Exception("Unknown project %s" %
+ (job_project.project_name,))
params['projects'].append(
- dict(name=repo,
- url=item.pipeline.source.getGitUrl(project)))
+ make_project_dict(project,
+ job_project.override_branch))
projects.add(project)
for item in all_items:
if item.change.project not in projects:
- params['projects'].append(
- dict(name=item.change.project.name,
- url=item.pipeline.source.getGitUrl(
- item.change.project)))
- projects.add(item.change.project)
+ project = item.change.project
+ params['projects'].append(make_project_dict(project))
+ projects.add(project)
build = Build(job, uuid)
build.parameters = params
@@ -339,8 +332,8 @@
self.sched.onBuildCompleted(build, 'SUCCESS')
return build
- gearman_job = gear.Job('executor:execute', json.dumps(params),
- unique=uuid)
+ gearman_job = gear.TextJob('executor:execute', json.dumps(params),
+ unique=uuid)
build.__gearman_job = gearman_job
build.__gearman_manager = None
self.builds[uuid] = build
@@ -469,7 +462,7 @@
job.connection.sendAdminRequest(req, timeout=300)
self.log.debug("Response to cancel build %s request: %s" %
(build, req.response.strip()))
- if req.response.startswith("OK"):
+ if req.response.startswith(b"OK"):
try:
del self.builds[job.unique]
except:
@@ -483,8 +476,8 @@
(build,))
stop_uuid = str(uuid4().hex)
data = dict(uuid=build.__gearman_job.unique)
- stop_job = gear.Job("executor:stop:%s" % build.__gearman_manager,
- json.dumps(data), unique=stop_uuid)
+ stop_job = gear.TextJob("executor:stop:%s" % build.__gearman_manager,
+ json.dumps(data), unique=stop_uuid)
self.meta_jobs[stop_uuid] = stop_job
self.log.debug("Submitting stop job: %s", stop_job)
self.gearman.submitJob(stop_job, precedence=gear.PRECEDENCE_HIGH,
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index 5aa9700..8d2d577 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -27,14 +27,10 @@
from zuul.lib.yamlutil import yaml
import gear
-import git
from six.moves import shlex_quote
import zuul.merger.merger
-import zuul.ansible.action
-import zuul.ansible.callback
-import zuul.ansible.library
-import zuul.ansible.lookup
+import zuul.ansible
from zuul.lib import commandsocket
COMMANDS = ['stop', 'pause', 'unpause', 'graceful', 'verbose',
@@ -46,7 +42,8 @@
self.timeout = timeout
self.function = function
self.args = args
- self.thread = threading.Thread(target=self._run)
+ self.thread = threading.Thread(target=self._run,
+ name='executor-watchdog')
self.thread.daemon = True
self.timed_out = None
@@ -56,7 +53,11 @@
if self._running:
self.timed_out = True
self.function(*self.args)
- self.timed_out = False
+ else:
+ # Only set timed_out to false if we aren't _running
+ # anymore. This means that we stopped running not because
+ # of a timeout but because normal execution ended.
+ self.timed_out = False
def start(self):
self._running = True
@@ -66,11 +67,6 @@
def stop(self):
self._running = False
-# TODOv3(mordred): put git repos in a hierarchy that includes source
-# hostname, eg: git.openstack.org/openstack/nova. Also, configure
-# sources to have an alias, so that the review.openstack.org source
-# repos end up in git.openstack.org.
-
class JobDirPlaybook(object):
def __init__(self, root):
@@ -79,8 +75,88 @@
self.path = None
+class SshAgent(object):
+ log = logging.getLogger("zuul.ExecutorServer")
+
+ def __init__(self):
+ self.env = {}
+ self.ssh_agent = None
+
+ def start(self):
+ if self.ssh_agent:
+ return
+ with open('/dev/null', 'r+') as devnull:
+ ssh_agent = subprocess.Popen(['ssh-agent'], close_fds=True,
+ stdout=subprocess.PIPE,
+ stderr=devnull,
+ stdin=devnull)
+ (output, _) = ssh_agent.communicate()
+ output = output.decode('utf8')
+ for line in output.split("\n"):
+ if '=' in line:
+ line = line.split(";", 1)[0]
+ (key, value) = line.split('=')
+ self.env[key] = value
+ self.log.info('Started SSH Agent, {}'.format(self.env))
+
+ def stop(self):
+ if 'SSH_AGENT_PID' in self.env:
+ try:
+ os.kill(int(self.env['SSH_AGENT_PID']), signal.SIGTERM)
+ except OSError:
+ self.log.exception(
+ 'Problem sending SIGTERM to agent {}'.format(self.env))
+ self.log.info('Sent SIGTERM to SSH Agent, {}'.format(self.env))
+ self.env = {}
+
+ def add(self, key_path):
+ env = os.environ.copy()
+ env.update(self.env)
+ key_path = os.path.expanduser(key_path)
+ self.log.debug('Adding SSH Key {}'.format(key_path))
+ output = ''
+ try:
+ output = subprocess.check_output(['ssh-add', key_path], env=env,
+ stderr=subprocess.PIPE)
+ except subprocess.CalledProcessError:
+ self.log.error('ssh-add failed: {}'.format(output))
+ raise
+ self.log.info('Added SSH Key {}'.format(key_path))
+
+ def remove(self, key_path):
+ env = os.environ.copy()
+ env.update(self.env)
+ key_path = os.path.expanduser(key_path)
+ self.log.debug('Removing SSH Key {}'.format(key_path))
+ subprocess.check_output(['ssh-add', '-d', key_path], env=env,
+ stderr=subprocess.PIPE)
+ self.log.info('Removed SSH Key {}'.format(key_path))
+
+ def list(self):
+ if 'SSH_AUTH_SOCK' not in self.env:
+ return None
+ env = os.environ.copy()
+ env.update(self.env)
+ result = []
+ for line in subprocess.Popen(['ssh-add', '-L'], env=env,
+ stdout=subprocess.PIPE).stdout:
+ line = line.decode('utf8')
+ if line.strip() == 'The agent has no identities.':
+ break
+ result.append(line.strip())
+ return result
+
+
class JobDir(object):
- def __init__(self, root=None, keep=False):
+ def __init__(self, root, keep, build_uuid):
+ '''
+ :param str root: Root directory for the individual job directories.
+ Can be None to use the default system temp root directory.
+ :param bool keep: If True, do not delete the job directory.
+ :param str build_uuid: The unique build UUID. If supplied, this will
+ be used as the temp job directory name. Using this will help the
+ log streaming daemon find job logs.
+ '''
# root
# ansible
# trusted.cfg
@@ -89,7 +165,12 @@
# src
# logs
self.keep = keep
- self.root = tempfile.mkdtemp(dir=root)
+ if root:
+ tmpdir = root
+ else:
+ tmpdir = tempfile.gettempdir()
+ self.root = os.path.join(tmpdir, build_uuid)
+ os.mkdir(self.root, 0o700)
# Work
self.work_root = os.path.join(self.root, 'work')
os.makedirs(self.work_root)
@@ -101,14 +182,14 @@
self.ansible_root = os.path.join(self.root, 'ansible')
os.makedirs(self.ansible_root)
self.known_hosts = os.path.join(self.ansible_root, 'known_hosts')
- self.inventory = os.path.join(self.ansible_root, 'inventory')
- self.vars = os.path.join(self.ansible_root, 'vars.yaml')
+ self.inventory = os.path.join(self.ansible_root, 'inventory.yaml')
self.playbooks = [] # The list of candidate playbooks
self.playbook = None # A pointer to the candidate we have chosen
self.pre_playbooks = []
self.post_playbooks = []
self.roles = []
- self.roles_path = []
+ self.trusted_roles_path = []
+ self.untrusted_roles_path = []
self.untrusted_config = os.path.join(
self.ansible_root, 'untrusted.cfg')
self.trusted_config = os.path.join(self.ansible_root, 'trusted.cfg')
@@ -142,6 +223,10 @@
count = len(self.roles)
root = os.path.join(self.ansible_root, 'role_%i' % (count,))
os.makedirs(root)
+ trusted = os.path.join(root, 'trusted')
+ os.makedirs(trusted)
+ untrusted = os.path.join(root, 'untrusted')
+ os.makedirs(untrusted)
self.roles.append(root)
return root
@@ -157,13 +242,14 @@
class UpdateTask(object):
- def __init__(self, project, url):
- self.project = project
- self.url = url
+ def __init__(self, connection_name, project_name):
+ self.connection_name = connection_name
+ self.project_name = project_name
self.event = threading.Event()
def __eq__(self, other):
- if other.project == self.project:
+ if (other and other.connection_name == self.connection_name and
+ other.project_name == self.project_name):
return True
return False
@@ -213,6 +299,57 @@
self.condition.release()
+def _copy_ansible_files(python_module, target_dir):
+ library_path = os.path.dirname(os.path.abspath(python_module.__file__))
+ for fn in os.listdir(library_path):
+ if fn == "__pycache__":
+ continue
+ full_path = os.path.join(library_path, fn)
+ if os.path.isdir(full_path):
+ shutil.copytree(full_path, os.path.join(target_dir, fn))
+ else:
+ shutil.copy(os.path.join(library_path, fn), target_dir)
+
+
+def make_inventory_dict(nodes, groups, all_vars):
+
+ hosts = {}
+ for node in nodes:
+ hosts[node['name']] = node['host_vars']
+
+ inventory = {
+ 'all': {
+ 'hosts': hosts,
+ 'vars': all_vars,
+ }
+ }
+
+ for group in groups:
+ group_hosts = {}
+ for node_name in group['nodes']:
+ # children is a dict with None as values because we don't have
+ # and per-group variables. If we did, None would be a dict
+ # with the per-group variables
+ group_hosts[node_name] = None
+ inventory[group['name']] = {'hosts': group_hosts}
+
+ return inventory
+
+
+class ExecutorMergeWorker(gear.TextWorker):
+ def __init__(self, executor_server, *args, **kw):
+ self.zuul_executor_server = executor_server
+ super(ExecutorMergeWorker, self).__init__(*args, **kw)
+
+ def handleNoop(self, packet):
+ # Wait until the update queue is empty before responding
+ while self.zuul_executor_server.update_queue.qsize():
+ time.sleep(1)
+
+ with self.zuul_executor_server.merger_lock:
+ super(ExecutorMergeWorker, self).handleNoop(packet)
+
+
class ExecutorServer(object):
log = logging.getLogger("zuul.ExecutorServer")
@@ -225,6 +362,7 @@
# perhaps hostname+pid.
self.hostname = socket.gethostname()
self.zuul_url = config.get('merger', 'zuul_url')
+ self.merger_lock = threading.Lock()
self.command_map = dict(
stop=self.stop,
pause=self.pause,
@@ -255,6 +393,13 @@
else:
self.merge_name = None
+ if self.config.has_option('executor', 'untrusted_wrapper'):
+ untrusted_wrapper_name = self.config.get(
+ 'executor', 'untrusted_wrapper').split()
+ else:
+ untrusted_wrapper_name = 'bubblewrap'
+ self.untrusted_wrapper = connections.drivers[untrusted_wrapper_name]
+
self.connections = connections
# This merger and its git repos are used to maintain
# up-to-date copies of all the repos that are used by jobs, as
@@ -271,46 +416,38 @@
path = os.path.join(state_dir, 'executor.socket')
self.command_socket = commandsocket.CommandSocket(path)
ansible_dir = os.path.join(state_dir, 'ansible')
- self.library_dir = os.path.join(ansible_dir, 'library')
- if not os.path.exists(self.library_dir):
- os.makedirs(self.library_dir)
- self.action_dir = os.path.join(ansible_dir, 'action')
- if not os.path.exists(self.action_dir):
- os.makedirs(self.action_dir)
+ self.ansible_dir = ansible_dir
- self.callback_dir = os.path.join(ansible_dir, 'callback')
- if not os.path.exists(self.callback_dir):
- os.makedirs(self.callback_dir)
+ zuul_dir = os.path.join(ansible_dir, 'zuul')
+ plugin_dir = os.path.join(zuul_dir, 'ansible')
- self.lookup_dir = os.path.join(ansible_dir, 'lookup')
- if not os.path.exists(self.lookup_dir):
- os.makedirs(self.lookup_dir)
+ if not os.path.exists(plugin_dir):
+ os.makedirs(plugin_dir)
- library_path = os.path.dirname(os.path.abspath(
- zuul.ansible.library.__file__))
- for fn in os.listdir(library_path):
- shutil.copy(os.path.join(library_path, fn), self.library_dir)
+ self.library_dir = os.path.join(plugin_dir, 'library')
+ self.action_dir = os.path.join(plugin_dir, 'action')
+ self.callback_dir = os.path.join(plugin_dir, 'callback')
+ self.lookup_dir = os.path.join(plugin_dir, 'lookup')
- action_path = os.path.dirname(os.path.abspath(
- zuul.ansible.action.__file__))
- for fn in os.listdir(action_path):
- shutil.copy(os.path.join(action_path, fn), self.action_dir)
+ _copy_ansible_files(zuul.ansible, plugin_dir)
- callback_path = os.path.dirname(os.path.abspath(
- zuul.ansible.callback.__file__))
- for fn in os.listdir(callback_path):
- shutil.copy(os.path.join(callback_path, fn), self.callback_dir)
-
- lookup_path = os.path.dirname(os.path.abspath(
- zuul.ansible.lookup.__file__))
- for fn in os.listdir(lookup_path):
- shutil.copy(os.path.join(lookup_path, fn), self.lookup_dir)
+ # We're copying zuul.ansible.* into a directory we are going
+ # to add to pythonpath, so our plugins can "import
+ # zuul.ansible". But we're not installing all of zuul, so
+ # create a __init__.py file for the stub "zuul" module.
+ with open(os.path.join(zuul_dir, '__init__.py'), 'w'):
+ pass
self.job_workers = {}
- def _getMerger(self, root):
+ def _getMerger(self, root, logger=None):
+ if root != self.merge_root:
+ cache_root = self.merge_root
+ else:
+ cache_root = None
return zuul.merger.merger.Merger(root, self.connections,
- self.merge_email, self.merge_name)
+ self.merge_email, self.merge_name,
+ cache_root, logger)
def start(self):
self._running = True
@@ -320,10 +457,13 @@
port = self.config.get('gearman', 'port')
else:
port = 4730
- self.worker = gear.Worker('Zuul Executor Server')
- self.worker.addServer(server, port)
+ self.merger_worker = ExecutorMergeWorker(self, 'Zuul Executor Merger')
+ self.merger_worker.addServer(server, port)
+ self.executor_worker = gear.TextWorker('Zuul Executor Server')
+ self.executor_worker.addServer(server, port)
self.log.debug("Waiting for server")
- self.worker.waitForServer()
+ self.merger_worker.waitForServer()
+ self.executor_worker.waitForServer()
self.log.debug("Registering")
self.register()
@@ -337,23 +477,35 @@
self.update_thread = threading.Thread(target=self._updateLoop)
self.update_thread.daemon = True
self.update_thread.start()
- self.thread = threading.Thread(target=self.run)
- self.thread.daemon = True
- self.thread.start()
+ self.merger_thread = threading.Thread(target=self.run_merger)
+ self.merger_thread.daemon = True
+ self.merger_thread.start()
+ self.executor_thread = threading.Thread(target=self.run_executor)
+ self.executor_thread.daemon = True
+ self.executor_thread.start()
def register(self):
- self.worker.registerFunction("executor:execute")
- self.worker.registerFunction("executor:stop:%s" % self.hostname)
- self.worker.registerFunction("merger:merge")
- self.worker.registerFunction("merger:cat")
+ self.executor_worker.registerFunction("executor:execute")
+ self.executor_worker.registerFunction("executor:stop:%s" %
+ self.hostname)
+ self.merger_worker.registerFunction("merger:merge")
+ self.merger_worker.registerFunction("merger:cat")
def stop(self):
self.log.debug("Stopping")
self._running = False
- self.worker.shutdown()
self._command_running = False
self.command_socket.stop()
self.update_queue.put(None)
+
+ for job_worker in list(self.job_workers.values()):
+ try:
+ job_worker.stop()
+ except Exception:
+ self.log.exception("Exception sending stop command "
+ "to worker:")
+ self.merger_worker.shutdown()
+ self.executor_worker.shutdown()
self.log.debug("Stopped")
def pause(self):
@@ -378,12 +530,13 @@
def join(self):
self.update_thread.join()
- self.thread.join()
+ self.merger_thread.join()
+ self.executor_thread.join()
def runCommand(self):
while self._command_running:
try:
- command = self.command_socket.get()
+ command = self.command_socket.get().decode('utf8')
if command != '_stop':
self.command_map[command]()
except Exception:
@@ -402,31 +555,27 @@
if task is None:
# We are asked to stop
return
- self.log.info("Updating repo %s from %s" % (task.project, task.url))
- self.merger.updateRepo(task.project, task.url)
- self.log.debug("Finished updating repo %s from %s" %
- (task.project, task.url))
+ with self.merger_lock:
+ self.log.info("Updating repo %s/%s" % (
+ task.connection_name, task.project_name))
+ self.merger.updateRepo(task.connection_name, task.project_name)
+ self.log.debug("Finished updating repo %s/%s" %
+ (task.connection_name, task.project_name))
task.setComplete()
- def update(self, project, url):
+ def update(self, connection_name, project_name):
# Update a repository in the main merger
- task = UpdateTask(project, url)
+ task = UpdateTask(connection_name, project_name)
task = self.update_queue.put(task)
return task
- def run(self):
- self.log.debug("Starting executor listener")
+ def run_merger(self):
+ self.log.debug("Starting merger listener")
while self._running:
try:
- job = self.worker.getJob()
+ job = self.merger_worker.getJob()
try:
- if job.name == 'executor:execute':
- self.log.debug("Got execute job: %s" % job.unique)
- self.executeJob(job)
- elif job.name.startswith('executor:stop'):
- self.log.debug("Got stop job: %s" % job.unique)
- self.stopJob(job)
- elif job.name == 'merger:cat':
+ if job.name == 'merger:cat':
self.log.debug("Got cat job: %s" % job.unique)
self.cat(job)
elif job.name == 'merger:merge':
@@ -437,7 +586,32 @@
job.sendWorkFail()
except Exception:
self.log.exception("Exception while running job")
- job.sendWorkException(traceback.format_exc())
+ job.sendWorkException(
+ traceback.format_exc().encode('utf8'))
+ except gear.InterruptedError:
+ pass
+ except Exception:
+ self.log.exception("Exception while getting job")
+
+ def run_executor(self):
+ self.log.debug("Starting executor listener")
+ while self._running:
+ try:
+ job = self.executor_worker.getJob()
+ try:
+ if job.name == 'executor:execute':
+ self.log.debug("Got execute job: %s" % job.unique)
+ self.executeJob(job)
+ elif job.name.startswith('executor:stop'):
+ self.log.debug("Got stop job: %s" % job.unique)
+ self.stopJob(job)
+ else:
+ self.log.error("Unable to handle job %s" % job.name)
+ job.sendWorkFail()
+ except Exception:
+ self.log.exception("Exception while running job")
+ job.sendWorkException(
+ traceback.format_exc().encode('utf8'))
except gear.InterruptedError:
pass
except Exception:
@@ -469,10 +643,11 @@
def cat(self, job):
args = json.loads(job.arguments)
- task = self.update(args['project'], args['url'])
+ task = self.update(args['connection'], args['project'])
task.wait()
- files = self.merger.getFiles(args['project'], args['url'],
- args['branch'], args['files'])
+ with self.merger_lock:
+ files = self.merger.getFiles(args['connection'], args['project'],
+ args['branch'], args['files'])
result = dict(updated=True,
files=files,
zuul_url=self.zuul_url)
@@ -480,28 +655,35 @@
def merge(self, job):
args = json.loads(job.arguments)
- ret = self.merger.mergeChanges(args['items'], args.get('files'))
+ with self.merger_lock:
+ ret = self.merger.mergeChanges(args['items'], args.get('files'),
+ args.get('repo_state'))
result = dict(merged=(ret is not None),
zuul_url=self.zuul_url)
- if args.get('files'):
- if ret:
- result['commit'], result['files'] = ret
- else:
- result['commit'], result['files'] = (None, None)
+ if ret is None:
+ result['commit'] = result['files'] = result['repo_state'] = None
else:
- result['commit'] = ret
+ (result['commit'], result['files'], result['repo_state'],
+ recent) = ret
job.sendWorkComplete(json.dumps(result))
-class AnsibleJob(object):
- log = logging.getLogger("zuul.AnsibleJob")
+class AnsibleJobLogAdapter(logging.LoggerAdapter):
+ def process(self, msg, kwargs):
+ msg, kwargs = super(AnsibleJobLogAdapter, self).process(msg, kwargs)
+ msg = '[build: %s] %s' % (kwargs['extra']['job'], msg)
+ return msg, kwargs
+
+class AnsibleJob(object):
RESULT_NORMAL = 1
RESULT_TIMED_OUT = 2
RESULT_UNREACHABLE = 3
RESULT_ABORTED = 4
def __init__(self, executor_server, job):
+ logger = logging.getLogger("zuul.AnsibleJob")
+ self.log = AnsibleJobLogAdapter(logger, {'job': job.unique})
self.executor_server = executor_server
self.job = job
self.jobdir = None
@@ -509,6 +691,8 @@
self.proc_lock = threading.Lock()
self.running = False
self.aborted = False
+ self.thread = None
+ self.ssh_agent = None
if self.executor_server.config.has_option(
'executor', 'private_key_file'):
@@ -516,8 +700,11 @@
'executor', 'private_key_file')
else:
self.private_key_file = '~/.ssh/id_rsa'
+ self.ssh_agent = SshAgent()
def run(self):
+ self.ssh_agent.start()
+ self.ssh_agent.add(self.private_key_file)
self.running = True
self.thread = threading.Thread(target=self.execute)
self.thread.start()
@@ -525,12 +712,14 @@
def stop(self):
self.aborted = True
self.abortRunningProc()
- self.thread.join()
+ if self.thread:
+ self.thread.join()
def execute(self):
try:
- self.jobdir = JobDir(root=self.executor_server.jobdir_root,
- keep=self.executor_server.keep_jobdir)
+ self.jobdir = JobDir(self.executor_server.jobdir_root,
+ self.executor_server.keep_jobdir,
+ str(self.job.unique))
self._execute()
except Exception:
self.log.exception("Exception while executing job")
@@ -545,6 +734,11 @@
self.executor_server.finishJob(self.job.unique)
except Exception:
self.log.exception("Error finalizing job thread:")
+ if self.ssh_agent:
+ try:
+ self.ssh_agent.stop()
+ except Exception:
+ self.log.exception("Error stopping SSH agent:")
def _execute(self):
self.log.debug("Job %s: beginning" % (self.job.unique,))
@@ -556,36 +750,49 @@
tasks = []
for project in args['projects']:
self.log.debug("Job %s: updating project %s" %
- (self.job.unique, project['name']))
+ (self.job.unique, project))
tasks.append(self.executor_server.update(
- project['name'], project['url']))
+ project['connection'], project['name']))
for task in tasks:
task.wait()
self.log.debug("Job %s: git updates complete" % (self.job.unique,))
+ merger = self.executor_server._getMerger(self.jobdir.src_root,
+ self.log)
+ repos = {}
for project in args['projects']:
- self.log.debug("Cloning %s" % (project['name'],))
- repo = git.Repo.clone_from(
- os.path.join(self.executor_server.merge_root,
- project['name']),
- os.path.join(self.jobdir.src_root,
- project['name']))
- repo.remotes.origin.config_writer.set('url', project['url'])
+ self.log.debug("Cloning %s/%s" % (project['connection'],
+ project['name'],))
+ repo = merger.getRepo(project['connection'],
+ project['name'])
+ repos[project['canonical_name']] = repo
merge_items = [i for i in args['items'] if i.get('refspec')]
if merge_items:
- commit = self.doMergeChanges(merge_items)
- if not commit:
+ if not self.doMergeChanges(merger, merge_items,
+ args['repo_state']):
# There was a merge conflict and we have already sent
# a work complete result, don't run any jobs
return
- else:
- commit = args['items'][-1]['newrev'] # noqa
+
+ for project in args['projects']:
+ repo = repos[project['canonical_name']]
+ self.checkoutBranch(repo,
+ project['name'],
+ args['branch'],
+ args['override_branch'],
+ project['override_branch'],
+ project['default_branch'])
+
+ # Delete the origin remote from each repo we set up since
+ # it will not be valid within the jobs.
+ for repo in repos.values():
+ repo.deleteRemote('origin')
# is the playbook in a repo that we have already prepared?
- self.preparePlaybookRepos(args)
+ trusted, untrusted = self.preparePlaybookRepos(args)
- self.prepareRoles(args)
+ self.prepareRoles(args, trusted, untrusted)
# TODOv3: Ansible the ansible thing here.
self.prepareAnsibleFiles(args)
@@ -618,14 +825,42 @@
result = dict(result=result)
self.job.sendWorkComplete(json.dumps(result))
- def doMergeChanges(self, items):
- # Get a merger in order to update the repos involved in this job.
- merger = self.executor_server._getMerger(self.jobdir.src_root)
- commit = merger.mergeChanges(items) # noqa
- if not commit: # merge conflict
+ def doMergeChanges(self, merger, items, repo_state):
+ ret = merger.mergeChanges(items, repo_state=repo_state)
+ if not ret: # merge conflict
result = dict(result='MERGER_FAILURE')
self.job.sendWorkComplete(json.dumps(result))
- return commit
+ return False
+ recent = ret[3]
+ for key, commit in recent.items():
+ (connection, project, branch) = key
+ repo = merger.getRepo(connection, project)
+ repo.setRef('refs/heads/' + branch, commit)
+ return True
+
+ def checkoutBranch(self, repo, project_name, zuul_branch,
+ job_branch, project_override_branch,
+ project_default_branch):
+ branches = repo.getBranches()
+ if project_override_branch in branches:
+ self.log.info("Checking out %s project override branch %s",
+ project_name, project_override_branch)
+ repo.checkoutLocalBranch(project_override_branch)
+ elif job_branch in branches:
+ self.log.info("Checking out %s job branch %s",
+ project_name, job_branch)
+ repo.checkoutLocalBranch(job_branch)
+ elif zuul_branch and zuul_branch in branches:
+ self.log.info("Checking out %s zuul branch %s",
+ project_name, zuul_branch)
+ repo.checkoutLocalBranch(zuul_branch)
+ elif project_default_branch in branches:
+ self.log.info("Checking out %s project default branch %s",
+ project_name, project_default_branch)
+ repo.checkoutLocalBranch(project_default_branch)
+ else:
+ raise Exception("Project %s does not have the default branch %s" %
+ (project_name, project_default_branch))
def runPlaybooks(self, args):
result = None
@@ -720,15 +955,24 @@
return None
def preparePlaybookRepos(self, args):
+ trusted = untrusted = False
for playbook in args['pre_playbooks']:
jobdir_playbook = self.jobdir.addPrePlaybook()
self.preparePlaybookRepo(jobdir_playbook, playbook,
args, required=True)
+ if playbook['trusted']:
+ trusted = True
+ else:
+ untrusted = True
for playbook in args['playbooks']:
jobdir_playbook = self.jobdir.addPlaybook()
self.preparePlaybookRepo(jobdir_playbook, playbook,
args, required=False)
+ if playbook['trusted']:
+ trusted = True
+ else:
+ untrusted = True
if jobdir_playbook.path is not None:
self.jobdir.playbook = jobdir_playbook
break
@@ -739,6 +983,11 @@
jobdir_playbook = self.jobdir.addPostPlaybook()
self.preparePlaybookRepo(jobdir_playbook, playbook,
args, required=True)
+ if playbook['trusted']:
+ trusted = True
+ else:
+ untrusted = True
+ return (trusted, untrusted)
def preparePlaybookRepo(self, jobdir_playbook, playbook, args, required):
self.log.debug("Prepare playbook repo for %s" % (playbook,))
@@ -748,17 +997,16 @@
source = self.executor_server.connections.getSource(
playbook['connection'])
project = source.getProject(playbook['project'])
- # TODO(jeblair): construct the url in the merger itself
- url = source.getGitUrl(project)
if not playbook['trusted']:
# This is a project repo, so it is safe to use the already
# checked out version (from speculative merging) of the
# playbook
for i in args['items']:
- if (i['connection_name'] == playbook['connection'] and
+ if (i['connection'] == playbook['connection'] and
i['project'] == playbook['project']):
# We already have this repo prepared
path = os.path.join(self.jobdir.src_root,
+ project.canonical_hostname,
project.name,
playbook['path'])
jobdir_playbook.path = self.findPlaybook(
@@ -770,10 +1018,13 @@
# the stack of changes we are testing, so check out the branch
# tip into a dedicated space.
- merger = self.executor_server._getMerger(jobdir_playbook.root)
- merger.checkoutBranch(project.name, url, playbook['branch'])
+ merger = self.executor_server._getMerger(jobdir_playbook.root,
+ self.log)
+ merger.checkoutBranch(playbook['connection'], project.name,
+ playbook['branch'])
path = os.path.join(jobdir_playbook.root,
+ project.canonical_hostname,
project.name,
playbook['path'])
jobdir_playbook.path = self.findPlaybook(
@@ -781,11 +1032,11 @@
required=required,
trusted=playbook['trusted'])
- def prepareRoles(self, args):
+ def prepareRoles(self, args, trusted, untrusted):
for role in args['roles']:
if role['type'] == 'zuul':
root = self.jobdir.addRole()
- self.prepareZuulRole(args, role, root)
+ self.prepareZuulRole(args, role, root, trusted, untrusted)
def findRole(self, path, trusted=False):
d = os.path.join(path, 'tasks')
@@ -808,71 +1059,117 @@
self._blockPluginDirs(os.path.join(path, entry))
return path
- def prepareZuulRole(self, args, role, root):
+ def prepareZuulRole(self, args, role, root, trusted, untrusted):
self.log.debug("Prepare zuul role for %s" % (role,))
# Check out the role repo if needed
source = self.executor_server.connections.getSource(
role['connection'])
project = source.getProject(role['project'])
- # TODO(jeblair): construct the url in the merger itself
- url = source.getGitUrl(project)
- role_repo = None
- if not role['trusted']:
- # This is a project repo, so it is safe to use the already
- # checked out version (from speculative merging) of the
- # role
+ untrusted_role_repo = None
+ trusted_role_repo = None
+ trusted_root = os.path.join(root, 'trusted')
+ untrusted_root = os.path.join(root, 'untrusted')
+ name = role['target_name']
+
+ if untrusted:
+ # There is at least one untrusted playbook. For that
+ # case, use the already checked out version (from
+ # speculative merging) of the role.
for i in args['items']:
- if (i['connection_name'] == role['connection'] and
+ if (i['connection'] == role['connection'] and
i['project'] == role['project']):
# We already have this repo prepared;
# copy it into location.
path = os.path.join(self.jobdir.src_root,
+ project.canonical_hostname,
project.name)
- link = os.path.join(root, role['name'])
+ # The name of the symlink is the requested name of
+ # the role (which may be the repo name or may be
+ # something else; this can come into play if this
+ # is a bare role).
+ link = os.path.join(untrusted_root, name)
+ link = os.path.realpath(link)
+ if not link.startswith(os.path.realpath(untrusted_root)):
+ raise Exception("Invalid role name %s", name)
os.symlink(path, link)
- role_repo = link
+ untrusted_role_repo = link
break
- # The role repo is either a config repo, or it isn't in
- # the stack of changes we are testing, so check out the branch
- # tip into a dedicated space.
+ if trusted or not untrusted_role_repo:
+ # There is at least one trusted playbook which will need a
+ # trusted checkout of the role, or the role did not appear
+ # in the dependency chain for the change (in which case,
+ # there is no existing untrusted checkout of it). Check
+ # out the branch tip into a dedicated space.
+ merger = self.executor_server._getMerger(trusted_root,
+ self.log)
+ merger.checkoutBranch(role['connection'], project.name,
+ 'master')
+ orig_repo_path = os.path.join(trusted_root,
+ project.canonical_hostname,
+ project.name)
+ if name != project.name:
+ # The requested name of the role is not the same as
+ # the project name, so rename the git repo as the
+ # requested name. It is the only item in this
+ # directory, so we don't need to worry about
+ # collisions.
+ target = os.path.join(trusted_root,
+ project.canonical_hostname,
+ name)
+ target = os.path.realpath(target)
+ if not target.startswith(os.path.realpath(trusted_root)):
+ raise Exception("Invalid role name %s", name)
+ os.rename(orig_repo_path, target)
+ trusted_role_repo = target
+ else:
+ trusted_role_repo = orig_repo_path
- if not role_repo:
- merger = self.executor_server._getMerger(root)
- merger.checkoutBranch(project.name, url, 'master')
- role_repo = os.path.join(root, project.name)
+ if not untrusted_role_repo:
+ # In the case that there was no untrusted checkout,
+ # use the trusted checkout.
+ untrusted_role_repo = trusted_role_repo
+ untrusted_root = trusted_root
- role_path = self.findRole(role_repo, trusted=role['trusted'])
- if role_path is None:
- # In the case of a bare role, add the containing directory
- role_path = root
- self.jobdir.roles_path.append(role_path)
+ if untrusted:
+ untrusted_role_path = self.findRole(untrusted_role_repo,
+ trusted=False)
+ if untrusted_role_path is None:
+ # In the case of a bare role, add the containing directory
+ untrusted_role_path = os.path.join(untrusted_root,
+ project.canonical_hostname)
+ self.jobdir.untrusted_roles_path.append(untrusted_role_path)
+
+ if trusted:
+ trusted_role_path = self.findRole(trusted_role_repo,
+ trusted=True)
+ if trusted_role_path is None:
+ # In the case of a bare role, add the containing directory
+ trusted_role_path = os.path.join(trusted_root,
+ project.canonical_hostname)
+ self.jobdir.trusted_roles_path.append(trusted_role_path)
def prepareAnsibleFiles(self, args):
- keys = []
- with open(self.jobdir.inventory, 'w') as inventory:
- for item in self.getHostList(args):
- inventory.write(item['name'])
- for k, v in item['host_vars'].items():
- inventory.write(' %s=%s' % (k, v))
- inventory.write('\n')
- for key in item['host_keys']:
- keys.append(key)
+ all_vars = dict(args['vars'])
+ all_vars['zuul']['executor'] = dict(
+ hostname=self.executor_server.hostname,
+ src_root=self.jobdir.src_root,
+ log_root=self.jobdir.log_root)
+
+ nodes = self.getHostList(args)
+ inventory = make_inventory_dict(nodes, args['groups'], all_vars)
+
+ with open(self.jobdir.inventory, 'w') as inventory_yaml:
+ inventory_yaml.write(
+ yaml.safe_dump(inventory, default_flow_style=False))
with open(self.jobdir.known_hosts, 'w') as known_hosts:
- for key in keys:
- known_hosts.write('%s\n' % key)
+ for node in nodes:
+ for key in node['host_keys']:
+ known_hosts.write('%s\n' % key)
- with open(self.jobdir.vars, 'w') as vars_yaml:
- zuul_vars = dict(args['vars'])
- zuul_vars['zuul']['executor'] = dict(
- hostname=self.executor_server.hostname,
- src_root=self.jobdir.src_root,
- log_root=self.jobdir.log_root)
- vars_yaml.write(
- yaml.safe_dump(zuul_vars, default_flow_style=False))
self.writeAnsibleConfig(self.jobdir.untrusted_config)
self.writeAnsibleConfig(self.jobdir.trusted_config, trusted=True)
@@ -890,9 +1187,6 @@
config.write('gathering = explicit\n')
config.write('library = %s\n'
% self.executor_server.library_dir)
- if self.jobdir.roles_path:
- config.write('roles_path = %s\n' %
- ':'.join(self.jobdir.roles_path))
config.write('command_warnings = False\n')
config.write('callback_plugins = %s\n'
% self.executor_server.callback_dir)
@@ -905,6 +1199,12 @@
% self.executor_server.action_dir)
config.write('lookup_plugins = %s\n'
% self.executor_server.lookup_dir)
+ roles_path = self.jobdir.untrusted_roles_path
+ else:
+ roles_path = self.jobdir.trusted_roles_path
+
+ if roles_path:
+ config.write('roles_path = %s\n' % ':'.join(roles_path))
# On trusted jobs, we want to prevent the printing of args,
# since trusted jobs might have access to secrets that they may
@@ -947,12 +1247,26 @@
def runAnsible(self, cmd, timeout, trusted=False):
env_copy = os.environ.copy()
+ env_copy.update(self.ssh_agent.env)
env_copy['LOGNAME'] = 'zuul'
+ pythonpath = env_copy.get('PYTHONPATH')
+ if pythonpath:
+ pythonpath = [pythonpath]
+ else:
+ pythonpath = []
+ pythonpath = [self.executor_server.ansible_dir] + pythonpath
+ env_copy['PYTHONPATH'] = os.path.pathsep.join(pythonpath)
if trusted:
config_file = self.jobdir.trusted_config
+ popen = subprocess.Popen
else:
config_file = self.jobdir.untrusted_config
+ driver = self.executor_server.untrusted_wrapper
+ popen = driver.getPopen(
+ work_dir=self.jobdir.root,
+ ansible_dir=self.executor_server.ansible_dir,
+ ssh_auth_sock=env_copy.get('SSH_AUTH_SOCK'))
env_copy['ANSIBLE_CONFIG'] = config_file
@@ -961,7 +1275,7 @@
return (self.RESULT_ABORTED, None)
self.log.debug("Ansible command: ANSIBLE_CONFIG=%s %s",
config_file, " ".join(shlex_quote(c) for c in cmd))
- self.proc = subprocess.Popen(
+ self.proc = popen(
cmd,
cwd=self.jobdir.work_root,
stdout=subprocess.PIPE,
@@ -1009,12 +1323,10 @@
else:
verbose = '-v'
- cmd = ['ansible-playbook', playbook.path]
+ cmd = ['ansible-playbook', verbose, playbook.path]
if success is not None:
cmd.extend(['-e', 'success=%s' % str(bool(success))])
- cmd.extend(['-e@%s' % self.jobdir.vars, verbose])
-
return self.runAnsible(
cmd=cmd, timeout=timeout, trusted=playbook.trusted)
diff --git a/zuul/lib/cloner.py b/zuul/lib/cloner.py
index bec8ebe..3070be6 100644
--- a/zuul/lib/cloner.py
+++ b/zuul/lib/cloner.py
@@ -102,7 +102,8 @@
new_repo = git.Repo.clone_from(git_cache, dest)
self.log.info("Updating origin remote in repo %s to %s",
project, git_upstream)
- new_repo.remotes.origin.config_writer.set('url', git_upstream)
+ new_repo.remotes.origin.config_writer.set('url',
+ git_upstream).release()
else:
self.log.info("Creating repo %s from upstream %s",
project, git_upstream)
diff --git a/zuul/lib/commandsocket.py b/zuul/lib/commandsocket.py
index 1b7fed9..ae62204 100644
--- a/zuul/lib/commandsocket.py
+++ b/zuul/lib/commandsocket.py
@@ -18,7 +18,7 @@
import os
import socket
import threading
-import Queue
+from six.moves import queue
class CommandSocket(object):
@@ -27,7 +27,7 @@
def __init__(self, path):
self.running = False
self.path = path
- self.queue = Queue.Queue()
+ self.queue = queue.Queue()
def start(self):
self.running = True
@@ -46,14 +46,14 @@
self.running = False
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect(self.path)
- s.sendall('_stop\n')
+ s.sendall(b'_stop\n')
# The command '_stop' will be ignored by our listener, so
# directly inject it into the queue so that consumers of this
# class which are waiting in .get() are awakened. They can
# either handle '_stop' or just ignore the unknown command and
# then check to see if they should continue to run before
# re-entering their loop.
- self.queue.put('_stop')
+ self.queue.put(b'_stop')
self.socket_thread.join()
def _socketListener(self):
@@ -61,10 +61,10 @@
try:
s, addr = self.socket.accept()
self.log.debug("Accepted socket connection %s" % (s,))
- buf = ''
+ buf = b''
while True:
buf += s.recv(1)
- if buf[-1] == '\n':
+ if buf[-1:] == b'\n':
break
buf = buf.strip()
self.log.debug("Received %s from socket" % (buf,))
@@ -72,7 +72,7 @@
# Because we use '_stop' internally to wake up a
# waiting thread, don't allow it to actually be
# injected externally.
- if buf != '_stop':
+ if buf != b'_stop':
self.queue.put(buf)
except Exception:
self.log.exception("Exception in socket handler")
diff --git a/zuul/lib/connections.py b/zuul/lib/connections.py
index 9964ba9..79d78f4 100644
--- a/zuul/lib/connections.py
+++ b/zuul/lib/connections.py
@@ -18,10 +18,14 @@
import zuul.driver.zuul
import zuul.driver.gerrit
import zuul.driver.git
+import zuul.driver.github
import zuul.driver.smtp
import zuul.driver.timer
import zuul.driver.sql
+import zuul.driver.bubblewrap
+import zuul.driver.nullwrap
from zuul.connection import BaseConnection
+from zuul.driver import SourceInterface
class DefaultConnection(BaseConnection):
@@ -40,9 +44,12 @@
self.registerDriver(zuul.driver.zuul.ZuulDriver())
self.registerDriver(zuul.driver.gerrit.GerritDriver())
self.registerDriver(zuul.driver.git.GitDriver())
+ self.registerDriver(zuul.driver.github.GithubDriver())
self.registerDriver(zuul.driver.smtp.SMTPDriver())
self.registerDriver(zuul.driver.timer.TimerDriver())
self.registerDriver(zuul.driver.sql.SQLDriver())
+ self.registerDriver(zuul.driver.bubblewrap.BubblewrapDriver())
+ self.registerDriver(zuul.driver.nullwrap.NullwrapDriver())
def registerDriver(self, driver):
if driver.name in self.drivers:
@@ -58,6 +65,13 @@
if load:
connection.onLoad()
+ def registerWebapp(self, webapp):
+ for driver_name, driver in self.drivers.items():
+ if hasattr(driver, 'registerWebapp'):
+ driver.registerWebapp(webapp)
+ for connection_name, connection in self.connections.items():
+ connection.registerWebapp(webapp)
+
def reconfigureDrivers(self, tenant):
for driver in self.drivers.values():
if hasattr(driver, 'reconfigure'):
@@ -66,10 +80,11 @@
def stop(self):
for connection_name, connection in self.connections.items():
connection.onStop()
+ for driver in self.drivers.values():
+ driver.stop()
- def configure(self, config):
+ def configure(self, config, source_only=False):
# Register connections from the config
- # TODO(jhesketh): import connection modules dynamically
connections = {}
for section_name in config.sections():
@@ -90,6 +105,13 @@
% (con_config['driver'], con_name))
driver = self.drivers[con_driver]
+
+ # The merger and the reporter only needs source driver.
+ # This makes sure Reporter like the SQLDriver are only created by
+ # the scheduler process
+ if source_only and not isinstance(driver, SourceInterface):
+ continue
+
connection = driver.getConnection(con_name, con_config)
connections[con_name] = connection
@@ -120,10 +142,11 @@
# Create default connections for drivers which need no
# connection information (e.g., 'timer' or 'zuul').
- for driver in self.drivers.values():
- if not hasattr(driver, 'getConnection'):
- connections[driver.name] = DefaultConnection(
- driver, driver.name, {})
+ if not source_only:
+ for driver in self.drivers.values():
+ if not hasattr(driver, 'getConnection'):
+ connections[driver.name] = DefaultConnection(
+ driver, driver.name, {})
self.connections = connections
diff --git a/zuul/lib/log_streamer.py b/zuul/lib/log_streamer.py
new file mode 100644
index 0000000..de072b6
--- /dev/null
+++ b/zuul/lib/log_streamer.py
@@ -0,0 +1,236 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2016 IBM Corp.
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import os.path
+import pwd
+import re
+import select
+import socket
+import threading
+import time
+
+try:
+ import SocketServer as ss # python 2.x
+except ImportError:
+ import socketserver as ss # python 3
+
+
+class Log(object):
+
+ def __init__(self, path):
+ self.path = path
+ self.file = open(path)
+ self.stat = os.stat(path)
+ self.size = self.stat.st_size
+
+
+class RequestHandler(ss.BaseRequestHandler):
+ '''
+ Class to handle a single log streaming request.
+
+ The log streaming code was blatantly stolen from zuul_console.py. Only
+ the (class/method/attribute) names were changed to protect the innocent.
+ '''
+
+ MAX_REQUEST_LEN = 1024
+ REQUEST_TIMEOUT = 10
+
+ def get_command(self):
+ poll = select.poll()
+ bitmask = (select.POLLIN | select.POLLERR |
+ select.POLLHUP | select.POLLNVAL)
+ poll.register(self.request, bitmask)
+ buffer = b''
+ ret = None
+ start = time.time()
+ while True:
+ elapsed = time.time() - start
+ timeout = max(self.REQUEST_TIMEOUT - elapsed, 0)
+ if not timeout:
+ raise Exception("Timeout while waiting for input")
+ for fd, event in poll.poll(timeout):
+ if event & select.POLLIN:
+ buffer += self.request.recv(self.MAX_REQUEST_LEN)
+ else:
+ raise Exception("Received error event")
+ if len(buffer) >= self.MAX_REQUEST_LEN:
+ raise Exception("Request too long")
+ try:
+ ret = buffer.decode('utf-8')
+ x = ret.find('\n')
+ if x > 0:
+ return ret[:x]
+ except UnicodeDecodeError:
+ pass
+
+ def handle(self):
+ build_uuid = self.get_command()
+ build_uuid = build_uuid.rstrip()
+
+ # validate build ID
+ if not re.match("[0-9A-Fa-f]+$", build_uuid):
+ msg = 'Build ID %s is not valid' % build_uuid
+ self.request.sendall(msg.encode("utf-8"))
+ return
+
+ job_dir = os.path.join(self.server.jobdir_root, build_uuid)
+ if not os.path.exists(job_dir):
+ msg = 'Build ID %s not found' % build_uuid
+ self.request.sendall(msg.encode("utf-8"))
+ return
+
+ # check if log file exists
+ log_file = os.path.join(job_dir, 'ansible', 'ansible_log.txt')
+ if not os.path.exists(log_file):
+ msg = 'Log not found for build ID %s' % build_uuid
+ self.request.sendall(msg.encode("utf-8"))
+ return
+
+ self.stream_log(log_file)
+
+ def stream_log(self, log_file):
+ log = None
+ while True:
+ if log is not None:
+ try:
+ log.file.close()
+ except:
+ pass
+ while True:
+ log = self.chunk_log(log_file)
+ if log:
+ break
+ time.sleep(0.5)
+ while True:
+ if self.follow_log(log):
+ break
+ else:
+ return
+
+ def chunk_log(self, log_file):
+ try:
+ log = Log(log_file)
+ except Exception:
+ return
+ while True:
+ chunk = log.file.read(4096)
+ if not chunk:
+ break
+ self.request.send(chunk.encode('utf-8'))
+ return log
+
+ def follow_log(self, log):
+ while True:
+ # As long as we have unread data, keep reading/sending
+ while True:
+ chunk = log.file.read(4096)
+ if chunk:
+ self.request.send(chunk.encode('utf-8'))
+ else:
+ break
+
+ # At this point, we are waiting for more data to be written
+ time.sleep(0.5)
+
+ # Check to see if the remote end has sent any data, if so,
+ # discard
+ r, w, e = select.select([self.request], [], [self.request], 0)
+ if self.request in e:
+ return False
+ if self.request in r:
+ ret = self.request.recv(1024)
+ # Discard anything read, if input is eof, it has
+ # disconnected.
+ if not ret:
+ return False
+
+ # See if the file has been truncated
+ try:
+ st = os.stat(log.path)
+ if (st.st_ino != log.stat.st_ino or
+ st.st_size < log.size):
+ return True
+ except Exception:
+ return True
+ log.size = st.st_size
+
+
+class CustomForkingTCPServer(ss.ForkingTCPServer):
+ '''
+ Custom version that allows us to drop privileges after port binding.
+ '''
+ def __init__(self, *args, **kwargs):
+ self.user = kwargs.pop('user')
+ self.jobdir_root = kwargs.pop('jobdir_root')
+ # For some reason, setting custom attributes does not work if we
+ # call the base class __init__ first. Wha??
+ ss.ForkingTCPServer.__init__(self, *args, **kwargs)
+
+ def change_privs(self):
+ '''
+ Drop our privileges to the zuul user.
+ '''
+ if os.getuid() != 0:
+ return
+ pw = pwd.getpwnam(self.user)
+ os.setgroups([])
+ os.setgid(pw.pw_gid)
+ os.setuid(pw.pw_uid)
+ os.umask(0o022)
+
+ def server_bind(self):
+ self.allow_reuse_address = True
+ ss.ForkingTCPServer.server_bind(self)
+ if self.user:
+ self.change_privs()
+
+ def server_close(self):
+ '''
+ Overridden from base class to shutdown the socket immediately.
+ '''
+ try:
+ self.socket.shutdown(socket.SHUT_RD)
+ self.socket.close()
+ except socket.error as e:
+ # If it's already closed, don't error.
+ if e.errno == socket.EBADF:
+ return
+ raise
+
+
+class LogStreamer(object):
+ '''
+ Class implementing log streaming over the finger daemon port.
+ '''
+
+ def __init__(self, user, host, port, jobdir_root):
+ self.server = CustomForkingTCPServer((host, port),
+ RequestHandler,
+ user=user,
+ jobdir_root=jobdir_root)
+
+ # We start the actual serving within a thread so we can return to
+ # the owner.
+ self.thd = threading.Thread(target=self.server.serve_forever)
+ self.thd.daemon = True
+ self.thd.start()
+
+ def stop(self):
+ if self.thd.isAlive():
+ self.server.shutdown()
+ self.server.server_close()
diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py
index 9507d15..4005b01 100644
--- a/zuul/manager/__init__.py
+++ b/zuul/manager/__init__.py
@@ -54,7 +54,6 @@
def _postConfig(self, layout):
self.log.info("Configured Pipeline Manager %s" % self.pipeline.name)
- self.log.info(" Source: %s" % self.pipeline.source)
self.log.info(" Requirements:")
for f in self.changeish_filters:
self.log.info(" %s" % f)
@@ -150,16 +149,14 @@
try:
self.log.info("Reporting start, action %s item %s" %
(self.pipeline.start_actions, item))
- ret = self.sendReport(self.pipeline.start_actions,
- self.pipeline.source, item)
+ ret = self.sendReport(self.pipeline.start_actions, item)
if ret:
self.log.error("Reporting item start %s received: %s" %
(item, ret))
except:
self.log.exception("Exception while reporting start:")
- def sendReport(self, action_reporters, source, item,
- message=None):
+ def sendReport(self, action_reporters, item, message=None):
"""Sends the built message off to configured reporters.
Takes the action_reporters, item, message and extra options and
@@ -168,7 +165,7 @@
report_errors = []
if len(action_reporters) > 0:
for reporter in action_reporters:
- ret = reporter.report(source, self.pipeline, item)
+ ret = reporter.report(self.pipeline, item)
if ret:
report_errors.append(ret)
if len(report_errors) == 0:
@@ -192,17 +189,6 @@
def getFailingDependentItems(self, item):
return None
- def getDependentItems(self, item):
- orig_item = item
- items = []
- while item.item_ahead:
- items.append(item.item_ahead)
- item = item.item_ahead
- self.log.info("Change %s depends on changes %s" %
- (orig_item.change,
- [x.change for x in items]))
- return items
-
def getItemForChange(self, change):
for item in self.pipeline.getAllItems():
if item.change.equals(change):
@@ -296,6 +282,10 @@
if not ignore_requirements:
for f in self.changeish_filters:
+ if f.connection_name != change.project.connection_name:
+ self.log.debug("Filter %s skipped for change %s due "
+ "to mismatched connections" % (f, change))
+ continue
if not f.matches(change):
self.log.debug("Change %s does not match pipeline "
"requirement %s" % (change, f))
@@ -326,9 +316,7 @@
item.enqueue_time = enqueue_time
item.live = live
self.reportStats(item)
- if not quiet:
- if len(self.pipeline.start_actions) > 0:
- self.reportStart(item)
+ item.quiet = quiet
self.enqueueChangesBehind(change, quiet, ignore_requirements,
change_queue)
zuul_driver = self.sched.connections.drivers['zuul']
@@ -364,7 +352,7 @@
def _executeJobs(self, item, jobs):
self.log.debug("Executing jobs for change %s" % item.change)
- dependent_items = self.getDependentItems(item)
+ build_set = item.current_build_set
for job in jobs:
self.log.debug("Found job %s for change %s" % (job, item.change))
try:
@@ -372,7 +360,8 @@
self.sched.nodepool.useNodeSet(nodeset)
build = self.sched.executor.execute(job, item,
self.pipeline,
- dependent_items)
+ build_set.dependent_items,
+ build_set.merger_items)
self.log.debug("Adding build %s of job %s to item %s" %
(build, job, item))
item.addBuild(build)
@@ -426,7 +415,7 @@
build.result = 'CANCELED'
canceled = True
canceled_jobs.add(build.job.name)
- for jobname, nodeset in old_build_set.nodesets.items()[:]:
+ for jobname, nodeset in list(old_build_set.nodesets.items()):
if jobname in canceled_jobs:
continue
self.sched.nodepool.returnNodeSet(nodeset)
@@ -437,40 +426,6 @@
canceled = True
return canceled
- def _makeMergerItem(self, item):
- # Create a dictionary with all info about the item needed by
- # the merger.
- number = None
- patchset = None
- refspec = None
- branch = None
- oldrev = None
- newrev = None
- if hasattr(item.change, 'number'):
- number = item.change.number
- patchset = item.change.patchset
- refspec = item.change.refspec
- branch = item.change.branch
- elif hasattr(item.change, 'newrev'):
- oldrev = item.change.oldrev
- newrev = item.change.newrev
- connection_name = self.pipeline.source.connection.connection_name
-
- project = item.change.project.name
- return dict(project=project,
- url=self.pipeline.source.getGitUrl(
- item.change.project),
- connection_name=connection_name,
- merge_mode=item.current_build_set.getMergeMode(project),
- refspec=refspec,
- branch=branch,
- ref=item.current_build_set.ref,
- number=number,
- patchset=patchset,
- oldrev=oldrev,
- newrev=newrev,
- )
-
def _loadDynamicLayout(self, item):
# Load layout
# Late import to break an import loop
@@ -487,14 +442,14 @@
loader.createDynamicLayout(
item.pipeline.layout.tenant,
build_set.files,
- include_config_repos=True)
+ include_config_projects=True)
# Then create the config a second time but without changes
# to config repos so that we actually use this config.
layout = loader.createDynamicLayout(
item.pipeline.layout.tenant,
build_set.files,
- include_config_repos=False)
+ include_config_projects=False)
except zuul.configloader.ConfigurationSyntaxError as e:
self.log.info("Configuration syntax error "
"in dynamic layout %s" %
@@ -536,16 +491,11 @@
self.log.debug("Scheduling merge for item %s (files: %s)" %
(item, files))
- dependent_items = self.getDependentItems(item)
- dependent_items.reverse()
- all_items = dependent_items + [item]
- merger_items = map(self._makeMergerItem, all_items)
build_set = item.current_build_set
build_set.merge_state = build_set.PENDING
- self.sched.merger.mergeChanges(merger_items,
- item.current_build_set,
- files,
- self.pipeline.precedence)
+ self.sched.merger.mergeChanges(build_set.merger_items,
+ item.current_build_set, files,
+ precedence=self.pipeline.precedence)
return False
def prepareItem(self, item):
@@ -556,6 +506,10 @@
build_set.setConfiguration()
if build_set.merge_state == build_set.NEW:
return self.scheduleMerge(item, ['zuul.yaml', '.zuul.yaml'])
+ if build_set.merge_state == build_set.PENDING:
+ return False
+ if build_set.unable_to_merge:
+ return False
if build_set.config_error:
return False
return True
@@ -628,6 +582,14 @@
self.cancelJobs(item)
if actionable:
ready = self.prepareItem(item) and self.prepareJobs(item)
+ # Starting jobs reporting should only be done once if there are
+ # jobs to run for this item.
+ if ready and len(self.pipeline.start_actions) > 0 \
+ and len(item.job_graph.jobs) > 0 \
+ and not item.reported_start \
+ and not item.quiet:
+ self.reportStart(item)
+ item.reported_start = True
if item.current_build_set.unable_to_merge:
failing_reasons.append("it has a merge conflict")
if item.current_build_set.config_error:
@@ -714,6 +676,7 @@
if event.merged:
build_set.commit = event.commit
build_set.files.setFiles(event.files)
+ build_set.repo_state = event.repo_state
elif event.updated:
build_set.commit = item.change.newrev
if not build_set.commit:
@@ -742,9 +705,9 @@
if self.changes_merge:
succeeded = item.didAllJobsSucceed()
merged = item.reported
+ source = item.change.project.source
if merged:
- merged = self.pipeline.source.isMerged(item.change,
- item.change.branch)
+ merged = source.isMerged(item.change, item.change.branch)
self.log.info("Reported change %s status: all-succeeded: %s, "
"merged: %s" % (item.change, succeeded, merged))
change_queue = item.queue
@@ -763,13 +726,25 @@
zuul_driver = self.sched.connections.drivers['zuul']
tenant = self.pipeline.layout.tenant
- zuul_driver.onChangeMerged(tenant, item.change,
- self.pipeline.source)
+ zuul_driver.onChangeMerged(tenant, item.change, source)
def _reportItem(self, item):
self.log.debug("Reporting change %s" % item.change)
ret = True # Means error as returned by trigger.report
- if item.getConfigError():
+
+ # In the case of failure, we may not hove completed an initial
+ # merge which would get the layout for this item, so in order
+ # to determine whether this item's project is in this
+ # pipeline, use the dynamic layout if available, otherwise,
+ # fall back to the current static layout as a best
+ # approximation.
+ layout = item.layout or self.pipeline.layout
+
+ if not layout.hasProject(item.change.project):
+ self.log.debug("Project %s not in pipeline %s for change %s" % (
+ item.change.project, self.pipeline, item.change))
+ actions = []
+ elif item.getConfigError():
self.log.debug("Invalid config for change %s" % item.change)
# TODOv3(jeblair): consider a new reporter action for this
actions = self.pipeline.merge_failure_actions
@@ -777,9 +752,12 @@
elif item.didMergerFail():
actions = self.pipeline.merge_failure_actions
item.setReportedResult('MERGER_FAILURE')
+ elif item.wasDequeuedNeedingChange():
+ actions = self.pipeline.failure_actions
+ item.setReportedResult('FAILURE')
elif not item.getJobs():
# We don't send empty reports with +1
- self.log.debug("No jobs for change %s" % item.change)
+ self.log.debug("No jobs for change %s" % (item.change,))
actions = []
elif item.didAllJobsSucceed():
self.log.debug("success %s" % (self.pipeline.success_actions))
@@ -790,7 +768,7 @@
actions = self.pipeline.failure_actions
item.setReportedResult('FAILURE')
self.pipeline._consecutive_failures += 1
- if self.pipeline._disabled:
+ if layout.hasProject(item.change.project) and self.pipeline._disabled:
actions = self.pipeline.disabled_actions
# Check here if we should disable so that we only use the disabled
# reporters /after/ the last disable_at failure is still reported as
@@ -802,7 +780,7 @@
try:
self.log.info("Reporting item %s, actions: %s" %
(item, actions))
- ret = self.sendReport(actions, self.pipeline.source, item)
+ ret = self.sendReport(actions, item)
if ret:
self.log.error("Reporting item %s received: %s" %
(item, ret))
diff --git a/zuul/manager/dependent.py b/zuul/manager/dependent.py
index 4c48568..6c56a30 100644
--- a/zuul/manager/dependent.py
+++ b/zuul/manager/dependent.py
@@ -38,13 +38,14 @@
self.log.debug("Building shared change queues")
change_queues = {}
project_configs = self.pipeline.layout.project_configs
+ tenant = self.pipeline.layout.tenant
for project_config in project_configs.values():
project_pipeline_config = project_config.pipelines.get(
self.pipeline.name)
if project_pipeline_config is None:
continue
- project = self.pipeline.source.getProject(project_config.name)
+ (trusted, project) = tenant.getProject(project_config.name)
queue_name = project_pipeline_config.queue_name
if queue_name and queue_name in change_queues:
change_queue = change_queues[queue_name]
@@ -78,16 +79,17 @@
self.pipeline.getQueue(change.project))
def isChangeReadyToBeEnqueued(self, change):
- if not self.pipeline.source.canMerge(change,
- self.getSubmitAllowNeeds()):
+ source = change.project.source
+ if not source.canMerge(change, self.getSubmitAllowNeeds()):
self.log.debug("Change %s can not merge, ignoring" % change)
return False
return True
def enqueueChangesBehind(self, change, quiet, ignore_requirements,
change_queue):
- to_enqueue = []
self.log.debug("Checking for changes needing %s:" % change)
+ to_enqueue = []
+ source = change.project.source
if not hasattr(change, 'needed_by_changes'):
self.log.debug(" %s does not support dependencies" % type(change))
return
@@ -99,8 +101,7 @@
(other_change, other_change.project,
change_queue))
continue
- if self.pipeline.source.canMerge(other_change,
- self.getSubmitAllowNeeds()):
+ if source.canMerge(other_change, self.getSubmitAllowNeeds()):
self.log.debug(" Change %s needs %s and is ready to merge" %
(other_change, change))
to_enqueue.append(other_change)
@@ -130,6 +131,7 @@
def checkForChangesNeededBy(self, change, change_queue):
self.log.debug("Checking for changes needed by %s:" % change)
+ source = change.project.source
# Return true if okay to proceed enqueing this change,
# false if the change should not be enqueued.
if not hasattr(change, 'needs_changes'):
@@ -163,8 +165,7 @@
self.log.debug(" Needed change is already ahead "
"in the queue")
continue
- if self.pipeline.source.canMerge(needed_change,
- self.getSubmitAllowNeeds()):
+ if source.canMerge(needed_change, self.getSubmitAllowNeeds()):
self.log.debug(" Change %s is needed" % needed_change)
if needed_change not in changes_needed:
changes_needed.append(needed_change)
diff --git a/zuul/merger/client.py b/zuul/merger/client.py
index 990d33e..c98f20e 100644
--- a/zuul/merger/client.py
+++ b/zuul/merger/client.py
@@ -56,7 +56,7 @@
self.__merge_client.onBuildCompleted(job)
-class MergeJob(gear.Job):
+class MergeJob(gear.TextJob):
def __init__(self, *args, **kw):
super(MergeJob, self).__init__(*args, **kw)
self.__event = threading.Event()
@@ -107,22 +107,17 @@
timeout=300)
return job
- def mergeChanges(self, items, build_set, files=None,
+ def mergeChanges(self, items, build_set, files=None, repo_state=None,
precedence=zuul.model.PRECEDENCE_NORMAL):
data = dict(items=items,
- files=files)
+ files=files,
+ repo_state=repo_state)
self.submitJob('merger:merge', data, build_set, precedence)
- def updateRepo(self, project, url, build_set,
- precedence=zuul.model.PRECEDENCE_NORMAL):
- data = dict(project=project,
- url=url)
- self.submitJob('merger:update', data, build_set, precedence)
-
- def getFiles(self, project, url, branch, files,
+ def getFiles(self, connection_name, project_name, branch, files,
precedence=zuul.model.PRECEDENCE_HIGH):
- data = dict(project=project,
- url=url,
+ data = dict(connection=connection_name,
+ project=project_name,
branch=branch,
files=files)
job = self.submitJob('merger:cat', data, None, precedence)
@@ -135,6 +130,7 @@
updated = data.get('updated', False)
commit = data.get('commit')
files = data.get('files', {})
+ repo_state = data.get('repo_state', {})
job.files = files
self.log.info("Merge %s complete, merged: %s, updated: %s, "
"commit: %s" %
@@ -142,7 +138,8 @@
job.setComplete()
if job.build_set:
self.sched.onMergeCompleted(job.build_set, zuul_url,
- merged, updated, commit, files)
+ merged, updated, commit, files,
+ repo_state)
# The test suite expects the job to be removed from the
# internal account after the wake flag is set.
self.jobs.remove(job)
diff --git a/zuul/merger/merger.py b/zuul/merger/merger.py
index d07a95b..6cfd904 100644
--- a/zuul/merger/merger.py
+++ b/zuul/merger/merger.py
@@ -14,6 +14,7 @@
# under the License.
import git
+import gitdb
import os
import logging
@@ -41,13 +42,17 @@
class Repo(object):
- log = logging.getLogger("zuul.Repo")
-
- def __init__(self, remote, local, email, username):
+ def __init__(self, remote, local, email, username,
+ cache_path=None, logger=None):
+ if logger is None:
+ self.log = logging.getLogger("zuul.Repo")
+ else:
+ self.log = logger
self.remote_url = remote
self.local_path = local
self.email = email
self.username = username
+ self.cache_path = cache_path
self._initialized = False
try:
self._ensure_cloned()
@@ -59,18 +64,32 @@
if self._initialized and repo_is_cloned:
return
# If the repo does not exist, clone the repo.
+ rewrite_url = False
if not repo_is_cloned:
self.log.debug("Cloning from %s to %s" % (self.remote_url,
self.local_path))
- git.Repo.clone_from(self.remote_url, self.local_path)
+ if self.cache_path:
+ git.Repo.clone_from(self.cache_path, self.local_path)
+ rewrite_url = True
+ else:
+ git.Repo.clone_from(self.remote_url, self.local_path)
repo = git.Repo(self.local_path)
- if self.email:
- repo.config_writer().set_value('user', 'email',
- self.email)
- if self.username:
- repo.config_writer().set_value('user', 'name',
- self.username)
- repo.config_writer().write()
+ # Create local branches corresponding to all the remote branches
+ if not repo_is_cloned:
+ origin = repo.remotes.origin
+ for ref in origin.refs:
+ if ref.remote_head == 'HEAD':
+ continue
+ repo.create_head(ref.remote_head, ref, force=True)
+ with repo.config_writer() as config_writer:
+ if self.email:
+ config_writer.set_value('user', 'email', self.email)
+ if self.username:
+ config_writer.set_value('user', 'name', self.username)
+ config_writer.write()
+ if rewrite_url:
+ with repo.remotes.origin.config_writer as config_writer:
+ config_writer.set('url', self.remote_url)
self._initialized = True
def isInitialized(self):
@@ -118,6 +137,10 @@
origin = repo.remotes.origin
return branch in origin.refs
+ def getBranches(self):
+ repo = self.createRepoObject()
+ return [x.name for x in repo.heads]
+
def getCommitFromRef(self, refname):
repo = self.createRepoObject()
if refname not in repo.refs:
@@ -125,6 +148,31 @@
ref = repo.refs[refname]
return ref.commit
+ def getRefs(self):
+ repo = self.createRepoObject()
+ return repo.refs
+
+ def setRef(self, path, hexsha, repo=None):
+ if repo is None:
+ repo = self.createRepoObject()
+ binsha = gitdb.util.to_bin_sha(hexsha)
+ obj = git.objects.Object.new_from_sha(repo, binsha)
+ self.log.debug("Create reference %s", path)
+ git.refs.Reference.create(repo, path, obj, force=True)
+
+ def setRefs(self, refs):
+ repo = self.createRepoObject()
+ current_refs = {}
+ for ref in repo.refs:
+ current_refs[ref.path] = ref
+ unseen = set(current_refs.keys())
+ for path, hexsha in refs.items():
+ self.setRef(path, hexsha, repo)
+ unseen.discard(path)
+ for path in unseen:
+ self.log.debug("Delete reference %s", path)
+ git.refs.SymbolicReference.delete(repo, ref.path)
+
def checkout(self, ref):
repo = self.createRepoObject()
self.log.debug("Checking out %s" % ref)
@@ -132,6 +180,13 @@
reset_repo_to_head(repo)
return repo.head.commit
+ def checkoutLocalBranch(self, branch):
+ repo = self.createRepoObject()
+ # Perform a hard reset before checking out so that we clean up
+ # anything that might be left over from a merge.
+ reset_repo_to_head(repo)
+ repo.heads[branch].checkout()
+
def cherryPick(self, ref):
repo = self.createRepoObject()
self.log.debug("Cherry-picking %s" % ref)
@@ -200,16 +255,24 @@
tree = repo.commit(commit).tree
for fn in files:
if fn in tree:
- ret[fn] = tree[fn].data_stream.read()
+ ret[fn] = tree[fn].data_stream.read().decode('utf8')
else:
ret[fn] = None
return ret
+ def deleteRemote(self, remote):
+ repo = self.createRepoObject()
+ repo.delete_remote(repo.remotes[remote])
+
class Merger(object):
- log = logging.getLogger("zuul.Merger")
-
- def __init__(self, working_root, connections, email, username):
+ def __init__(self, working_root, connections, email, username,
+ cache_root=None, logger=None):
+ self.logger = logger
+ if logger is None:
+ self.log = logging.getLogger("zuul.Merger")
+ else:
+ self.log = logger
self.repos = {}
self.working_root = working_root
if not os.path.exists(working_root):
@@ -217,6 +280,7 @@
self.connections = connections
self.email = email
self.username = username
+ self.cache_root = cache_root
def _get_ssh_cmd(self, connection_name):
sshkey = self.connections.connections.get(connection_name).\
@@ -234,49 +298,88 @@
elif 'GIT_SSH' in os.environ:
del os.environ['GIT_SSH']
- def addProject(self, project, url):
+ def _addProject(self, hostname, project_name, url):
repo = None
+ key = '/'.join([hostname, project_name])
try:
- path = os.path.join(self.working_root, project)
- repo = Repo(url, path, self.email, self.username)
+ path = os.path.join(self.working_root, hostname, project_name)
+ if self.cache_root:
+ cache_path = os.path.join(self.cache_root, hostname,
+ project_name)
+ else:
+ cache_path = None
+ repo = Repo(url, path, self.email, self.username, cache_path,
+ self.logger)
- self.repos[project] = repo
+ self.repos[key] = repo
except Exception:
- self.log.exception("Unable to add project %s" % project)
+ self.log.exception("Unable to add project %s/%s" %
+ (hostname, project_name))
return repo
- def getRepo(self, project, url):
- if project in self.repos:
- return self.repos[project]
+ def getRepo(self, connection_name, project_name):
+ source = self.connections.getSource(connection_name)
+ project = source.getProject(project_name)
+ hostname = project.canonical_hostname
+ url = source.getGitUrl(project)
+ key = '/'.join([hostname, project_name])
+ if key in self.repos:
+ return self.repos[key]
if not url:
- raise Exception("Unable to set up repo for project %s"
- " without a url" % (project,))
- return self.addProject(project, url)
+ raise Exception("Unable to set up repo for project %s/%s"
+ " without a url" %
+ (connection_name, project_name,))
+ return self._addProject(hostname, project_name, url)
- def updateRepo(self, project, url):
+ def updateRepo(self, connection_name, project_name):
# TODOv3(jhesketh): Reimplement
# da90a50b794f18f74de0e2c7ec3210abf79dda24 after merge..
# Likely we'll handle connection context per projects differently.
# self._setGitSsh()
- repo = self.getRepo(project, url)
+ repo = self.getRepo(connection_name, project_name)
try:
- self.log.info("Updating local repository %s", project)
+ self.log.info("Updating local repository %s/%s",
+ connection_name, project_name)
repo.reset()
except Exception:
- self.log.exception("Unable to update %s", project)
+ self.log.exception("Unable to update %s/%s",
+ connection_name, project_name)
- def checkoutBranch(self, project, url, branch):
- repo = self.getRepo(project, url)
- if repo.hasBranch(branch):
- self.log.info("Checking out branch %s of %s" % (branch, project))
- head = repo.getBranchHead(branch)
- repo.checkout(head)
- else:
- raise Exception("Project %s does not have branch %s" %
- (project, branch))
+ def checkoutBranch(self, connection_name, project_name, branch):
+ self.log.info("Checking out %s/%s branch %s",
+ connection_name, project_name, branch)
+ repo = self.getRepo(connection_name, project_name)
+ repo.checkoutLocalBranch(branch)
+
+ def _saveRepoState(self, connection_name, project_name, repo,
+ repo_state, recent):
+ projects = repo_state.setdefault(connection_name, {})
+ project = projects.setdefault(project_name, {})
+ for ref in repo.getRefs():
+ if ref.path.startswith('refs/zuul/'):
+ continue
+ if ref.path.startswith('refs/remotes/'):
+ continue
+ if ref.path.startswith('refs/heads/'):
+ branch = ref.path[len('refs/heads/'):]
+ key = (connection_name, project_name, branch)
+ if key not in recent:
+ recent[key] = ref.object
+ project[ref.path] = ref.object.hexsha
+
+ def _restoreRepoState(self, connection_name, project_name, repo,
+ repo_state):
+ projects = repo_state.get(connection_name, {})
+ project = projects.get(project_name, {})
+ if not project:
+ # We don't have a state for this project.
+ return
+ self.log.debug("Restore repo state for project %s/%s",
+ connection_name, project_name)
+ repo.setRefs(project)
def _mergeChange(self, item, ref):
- repo = self.getRepo(item['project'], item['url'])
+ repo = self.getRepo(item['connection'], item['project'])
try:
repo.checkout(ref)
except Exception:
@@ -304,27 +407,13 @@
return commit
- def _mergeItem(self, item, recent):
- self.log.debug("Processing refspec %s for project %s / %s ref %s" %
- (item['refspec'], item['project'], item['branch'],
- item['ref']))
- repo = self.getRepo(item['project'], item['url'])
- key = (item['project'], item['branch'])
+ def _mergeItem(self, item, recent, repo_state):
+ self.log.debug("Processing refspec %s for project %s/%s / %s ref %s" %
+ (item['refspec'], item['connection'],
+ item['project'], item['branch'], item['ref']))
+ repo = self.getRepo(item['connection'], item['project'])
+ key = (item['connection'], item['project'], item['branch'])
- # See if we have a commit for this change already in this repo
- zuul_ref = item['branch'] + '/' + item['ref']
- with repo.createRepoObject().git.custom_environment(
- GIT_SSH_COMMAND=self._get_ssh_cmd(item['connection_name'])):
- commit = repo.getCommitFromRef(zuul_ref)
- if commit:
- self.log.debug(
- "Found commit %s for ref %s" % (commit, zuul_ref))
- # Store this as the most recent commit for this
- # project-branch
- recent[key] = commit
- return commit
-
- self.log.debug("Unable to find commit for ref %s" % (zuul_ref,))
# We need to merge the change
# Get the most recent commit for this project-branch
base = recent.get(key)
@@ -337,12 +426,19 @@
except Exception:
self.log.exception("Unable to reset repo %s" % repo)
return None
+ self._restoreRepoState(item['connection'], item['project'], repo,
+ repo_state)
+
base = repo.getBranchHead(item['branch'])
+ # Save the repo state so that later mergers can repeat
+ # this process.
+ self._saveRepoState(item['connection'], item['project'], repo,
+ repo_state, recent)
else:
self.log.debug("Found base commit %s for %s" % (base, key,))
# Merge the change
with repo.createRepoObject().git.custom_environment(
- GIT_SSH_COMMAND=self._get_ssh_cmd(item['connection_name'])):
+ GIT_SSH_COMMAND=self._get_ssh_cmd(item['connection'])):
commit = self._mergeChange(item, base)
if not commit:
return None
@@ -351,21 +447,27 @@
# Set the Zuul ref for this item to point to the most recent
# commits of each project-branch
for key, mrc in recent.items():
- project, branch = key
+ connection, project, branch = key
+ zuul_ref = None
try:
- repo = self.getRepo(project, None)
+ repo = self.getRepo(connection, project)
zuul_ref = branch + '/' + item['ref']
- repo.createZuulRef(zuul_ref, mrc)
+ if not repo.getCommitFromRef(zuul_ref):
+ repo.createZuulRef(zuul_ref, mrc)
except Exception:
self.log.exception("Unable to set zuul ref %s for "
"item %s" % (zuul_ref, item))
return None
return commit
- def mergeChanges(self, items, files=None):
+ def mergeChanges(self, items, files=None, repo_state=None):
+ # connection+project+branch -> commit
recent = {}
commit = None
read_files = []
+ # connection -> project -> ref -> commit
+ if repo_state is None:
+ repo_state = {}
for item in items:
if item.get("number") and item.get("patchset"):
self.log.debug("Merging for change %s,%s." %
@@ -373,19 +475,22 @@
elif item.get("newrev") and item.get("oldrev"):
self.log.debug("Merging for rev %s with oldrev %s." %
(item["newrev"], item["oldrev"]))
- commit = self._mergeItem(item, recent)
+ commit = self._mergeItem(item, recent, repo_state)
if not commit:
return None
if files:
- repo = self.getRepo(item['project'], item['url'])
+ repo = self.getRepo(item['connection'], item['project'])
repo_files = repo.getFiles(files, commit=commit)
- read_files.append(dict(project=item['project'],
- branch=item['branch'],
- files=repo_files))
- if files:
- return commit.hexsha, read_files
- return commit.hexsha
+ read_files.append(dict(
+ connection=item['connection'],
+ project=item['project'],
+ branch=item['branch'],
+ files=repo_files))
+ ret_recent = {}
+ for k, v in recent.items():
+ ret_recent[k] = v.hexsha
+ return commit.hexsha, read_files, repo_state, ret_recent
- def getFiles(self, project, url, branch, files):
- repo = self.getRepo(project, url)
+ def getFiles(self, connection_name, project_name, branch, files):
+ repo = self.getRepo(connection_name, project_name)
return repo.getFiles(files, branch=branch)
diff --git a/zuul/merger/server.py b/zuul/merger/server.py
index 540105e..1a32f96 100644
--- a/zuul/merger/server.py
+++ b/zuul/merger/server.py
@@ -54,7 +54,7 @@
port = self.config.get('gearman', 'port')
else:
port = 4730
- self.worker = gear.Worker('Zuul Merger')
+ self.worker = gear.TextWorker('Zuul Merger')
self.worker.addServer(server, port)
self.log.debug("Waiting for server")
self.worker.waitForServer()
@@ -67,7 +67,6 @@
def register(self):
self.worker.registerFunction("merger:merge")
- self.worker.registerFunction("merger:update")
self.worker.registerFunction("merger:cat")
def stop(self):
@@ -88,9 +87,6 @@
if job.name == 'merger:merge':
self.log.debug("Got merge job: %s" % job.unique)
self.merge(job)
- elif job.name == 'merger:update':
- self.log.debug("Got update job: %s" % job.unique)
- self.update(job)
elif job.name == 'merger:cat':
self.log.debug("Got cat job: %s" % job.unique)
self.cat(job)
@@ -107,30 +103,21 @@
def merge(self, job):
args = json.loads(job.arguments)
- ret = self.merger.mergeChanges(args['items'], args.get('files'))
+ ret = self.merger.mergeChanges(args['items'], args.get('files'),
+ args.get('repo_state'))
result = dict(merged=(ret is not None),
zuul_url=self.zuul_url)
- if args.get('files'):
- if ret:
- result['commit'], result['files'] = ret
- else:
- result['commit'], result['files'] = (None, None)
+ if ret is None:
+ result['commit'] = result['files'] = result['repo_state'] = None
else:
- result['commit'] = ret
- job.sendWorkComplete(json.dumps(result))
-
- def update(self, job):
- args = json.loads(job.arguments)
- self.merger.updateRepo(args['project'],
- args['url'])
- result = dict(updated=True,
- zuul_url=self.zuul_url)
+ (result['commit'], result['files'], result['repo_state'],
+ recent) = ret
job.sendWorkComplete(json.dumps(result))
def cat(self, job):
args = json.loads(job.arguments)
- self.merger.updateRepo(args['project'], args['url'])
- files = self.merger.getFiles(args['project'], args['url'],
+ self.merger.updateRepo(args['connection'], args['project'])
+ files = self.merger.getFiles(args['connection'], args['project'],
args['branch'], args['files'])
result = dict(updated=True,
files=files,
diff --git a/zuul/model.py b/zuul/model.py
index d12abf4..6ad34ff 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -14,10 +14,8 @@
import abc
import copy
-
import logging
import os
-import re
import struct
import time
from uuid import uuid4
@@ -29,8 +27,6 @@
'ordereddict.OrderedDict'])
-EMPTY_GIT_REF = '0' * 40 # git sha of all zeros, used during creates/deletes
-
MERGER_MERGE = 1 # "git merge"
MERGER_MERGE_RESOLVE = 2 # "git merge -s resolve"
MERGER_CHERRY_PICK = 3 # "git cherry-pick"
@@ -79,25 +75,6 @@
STATE_DELETING])
-def time_to_seconds(s):
- if s.endswith('s'):
- return int(s[:-1])
- if s.endswith('m'):
- return int(s[:-1]) * 60
- if s.endswith('h'):
- return int(s[:-1]) * 60 * 60
- if s.endswith('d'):
- return int(s[:-1]) * 24 * 60 * 60
- if s.endswith('w'):
- return int(s[:-1]) * 7 * 24 * 60 * 60
- raise Exception("Unable to parse time value: %s" % s)
-
-
-def normalizeCategory(name):
- name = name.lower()
- return re.sub(' ', '-', name)
-
-
class Attributes(object):
"""A class to hold attributes for string formatting."""
@@ -106,11 +83,7 @@
class Pipeline(object):
- """A configuration that ties triggers, reporters, managers and sources.
-
- Source
- Where changes should come from. It is a named connection to
- an external service defined in zuul.conf
+ """A configuration that ties together triggers, reporters and managers
Trigger
A description of which events should be processed
@@ -136,7 +109,6 @@
self.manager = None
self.queues = []
self.precedence = PRECEDENCE_NORMAL
- self.source = None
self.triggers = []
self.start_actions = []
self.success_actions = []
@@ -351,9 +323,12 @@
# This makes a Project instance a unique identifier for a given
# project from a given source.
- def __init__(self, name, connection_name, foreign=False):
+ def __init__(self, name, source, foreign=False):
self.name = name
- self.connection_name = connection_name
+ self.source = source
+ self.connection_name = source.connection.connection_name
+ self.canonical_hostname = source.canonical_hostname
+ self.canonical_name = source.canonical_hostname + '/' + name
# foreign projects are those referenced in dependencies
# of layout projects, this should matter
# when deciding whether to enqueue their changes
@@ -435,6 +410,37 @@
self._keys = keys
+class Group(object):
+ """A logical group of nodes for use by a job.
+
+ A Group is a named set of node names that will be provided to
+ jobs in the inventory to describe logical units where some subset of tasks
+ run.
+ """
+
+ def __init__(self, name, nodes):
+ self.name = name
+ self.nodes = nodes
+
+ def __repr__(self):
+ return '<Group %s %s>' % (self.name, str(self.nodes))
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __eq__(self, other):
+ if not isinstance(other, Group):
+ return False
+ return (self.name == other.name and
+ self.nodes == other.nodes)
+
+ def toDict(self):
+ return {
+ 'name': self.name,
+ 'nodes': self.nodes
+ }
+
+
class NodeSet(object):
"""A set of nodes.
@@ -448,6 +454,7 @@
def __init__(self, name=None):
self.name = name or ''
self.nodes = OrderedDict()
+ self.groups = OrderedDict()
def __ne__(self, other):
return not self.__eq__(other)
@@ -462,6 +469,8 @@
n = NodeSet(self.name)
for name, node in self.nodes.items():
n.addNode(Node(node.name, node.image))
+ for name, group in self.groups.items():
+ n.addGroup(Group(group.name, group.nodes[:]))
return n
def addNode(self, node):
@@ -470,14 +479,22 @@
self.nodes[node.name] = node
def getNodes(self):
- return self.nodes.values()
+ return list(self.nodes.values())
+
+ def addGroup(self, group):
+ if group.name in self.groups:
+ raise Exception("Duplicate group in %s" % (self,))
+ self.groups[group.name] = group
+
+ def getGroups(self):
+ return list(self.groups.values())
def __repr__(self):
if self.name:
name = self.name + ' '
else:
name = ''
- return '<NodeSet %s%s>' % (name, self.nodes)
+ return '<NodeSet %s%s%s>' % (name, self.nodes, self.groups)
class NodeRequest(object):
@@ -493,9 +510,10 @@
self.stat = None
self.uid = uuid4().hex
self.id = None
- # Zuul internal failure flag (not stored in ZK so it's not
+ # Zuul internal flags (not stored in ZK so they are not
# overwritten).
self.failed = False
+ self.canceled = False
@property
def fulfilled(self):
@@ -676,22 +694,22 @@
class ZuulRole(Role):
"""A reference to an ansible role in a Zuul project."""
- def __init__(self, target_name, connection_name, project_name, trusted):
+ def __init__(self, target_name, connection_name, project_name):
super(ZuulRole, self).__init__(target_name)
self.connection_name = connection_name
self.project_name = project_name
- self.trusted = trusted
def __repr__(self):
return '<ZuulRole %s %s>' % (self.project_name, self.target_name)
+ __hash__ = object.__hash__
+
def __eq__(self, other):
if not isinstance(other, ZuulRole):
return False
return (super(ZuulRole, self).__eq__(other) and
self.connection_name == other.connection_name,
- self.project_name == other.project_name,
- self.trusted == other.trusted)
+ self.project_name == other.project_name)
def toDict(self):
# Render to a dict to use in passing json to the executor
@@ -699,7 +717,6 @@
d['type'] = 'zuul'
d['connection'] = self.connection_name
d['project'] = self.project_name
- d['trusted'] = self.trusted
return d
@@ -777,8 +794,9 @@
attempts=3,
final=False,
roles=frozenset(),
- repos=frozenset(),
+ required_projects={},
allowed_projects=None,
+ override_branch=None,
)
# These are generally internal attributes which are not
@@ -814,6 +832,8 @@
return False
return True
+ __hash__ = object.__hash__
+
def __str__(self):
return self.name
@@ -843,6 +863,11 @@
Job._deepUpdate(v, other_vars)
self.variables = v
+ def updateProjects(self, other_projects):
+ required_projects = self.required_projects
+ Job._deepUpdate(required_projects, other_projects)
+ self.required_projects = required_projects
+
@staticmethod
def _deepUpdate(a, b):
# Merge nested dictionaries if possible, otherwise, overwrite
@@ -894,7 +919,8 @@
"%s=%s with variant %s" % (
repr(self), k, other._get(k),
repr(other)))
- if k not in set(['pre_run', 'post_run', 'roles', 'variables']):
+ if k not in set(['pre_run', 'post_run', 'roles', 'variables',
+ 'required_projects']):
setattr(self, k, copy.deepcopy(other._get(k)))
# Don't set final above so that we don't trip an error halfway
@@ -910,6 +936,8 @@
self.roles = self.roles.union(other.roles)
if other._get('variables') is not None:
self.updateVariables(other.variables)
+ if other._get('required_projects') is not None:
+ self.updateProjects(other.required_projects)
for k in self.context_attributes:
if (other._get(k) is not None and
@@ -937,6 +965,14 @@
return True
+class JobProject(object):
+ """ A reference to a project from a job. """
+
+ def __init__(self, project_name, override_branch=None):
+ self.project_name = project_name
+ self.override_branch = override_branch
+
+
class JobList(object):
""" A list of jobs in a project's pipeline. """
@@ -952,7 +988,7 @@
def inheritFrom(self, other):
for jobname, jobs in other.jobs.items():
if jobname in self.jobs:
- self.jobs[jobname].append(jobs)
+ self.jobs[jobname].extend(jobs)
else:
self.jobs[jobname] = jobs
@@ -991,7 +1027,7 @@
raise
def getJobs(self):
- return self.jobs.values() # Report in the order of the layout config
+ return list(self.jobs.values()) # Report in the order of layout cfg
def _getDirectDependentJobs(self, parent_job):
ret = set()
@@ -1106,20 +1142,23 @@
"""
def __init__(self):
- self.projects = {}
+ self.connections = {}
def __repr__(self):
- return '<RepoFiles %s>' % self.projects
+ return '<RepoFiles %s>' % self.connections
def setFiles(self, items):
- self.projects = {}
+ self.hostnames = {}
for item in items:
- project = self.projects.setdefault(item['project'], {})
+ connection = self.connections.setdefault(
+ item['connection'], {})
+ project = connection.setdefault(item['project'], {})
branch = project.setdefault(item['branch'], {})
branch.update(item['files'])
- def getFile(self, project, branch, fn):
- return self.projects.get(project, {}).get(branch, {}).get(fn)
+ def getFile(self, connection_name, project_name, branch, fn):
+ host = self.connections.get(connection_name, {})
+ return host.get(project_name, {}).get(branch, {}).get(fn)
class BuildSet(object):
@@ -1151,7 +1190,6 @@
def __init__(self, item):
self.item = item
- self.other_changes = []
self.builds = {}
self.result = None
self.next_build_set = None
@@ -1159,6 +1197,8 @@
self.ref = None
self.commit = None
self.zuul_url = None
+ self.dependent_items = None
+ self.merger_items = None
self.unable_to_merge = False
self.config_error = None # None or an error message string.
self.failing_reasons = []
@@ -1166,6 +1206,7 @@
self.nodesets = {} # job -> nodeset
self.node_requests = {} # job -> reqs
self.files = RepoFiles()
+ self.repo_state = {}
self.layout = None
self.tries = {}
@@ -1179,13 +1220,19 @@
# The change isn't enqueued until after it's created
# so we don't know what the other changes ahead will be
# until jobs start.
- if not self.other_changes:
+ if self.dependent_items is None:
+ items = []
next_item = self.item.item_ahead
while next_item:
- self.other_changes.append(next_item.change)
+ items.append(next_item)
next_item = next_item.item_ahead
+ self.dependent_items = items
if not self.ref:
self.ref = 'Z' + uuid4().hex
+ if self.merger_items is None:
+ items = [self.item] + self.dependent_items
+ items.reverse()
+ self.merger_items = [i.makeMergerItem() for i in items]
def getStateName(self, state_num):
return self.states_map.get(
@@ -1205,7 +1252,7 @@
return self.builds.get(job_name)
def getBuilds(self):
- keys = self.builds.keys()
+ keys = list(self.builds.keys())
keys.sort()
return [self.builds.get(x) for x in keys]
@@ -1234,12 +1281,33 @@
del self.node_requests[job_name]
def getTries(self, job_name):
- return self.tries.get(job_name)
+ return self.tries.get(job_name, 0)
- def getMergeMode(self, job_name):
- if not self.layout or job_name not in self.layout.project_configs:
- return MERGER_MERGE_RESOLVE
- return self.layout.project_configs[job_name].merge_mode
+ def getMergeMode(self):
+ # We may be called before this build set has a shadow layout
+ # (ie, we are called to perform the merge to create that
+ # layout). It's possible that the change we are merging will
+ # update the merge-mode for the project, but there's not much
+ # we can do about that here. Instead, do the best we can by
+ # using the nearest shadow layout to determine the merge mode,
+ # or if that fails, the current live layout, or if that fails,
+ # use the default: merge-resolve.
+ item = self.item
+ layout = None
+ while item:
+ layout = item.current_build_set.layout
+ if layout:
+ break
+ item = item.item_ahead
+ if not layout:
+ layout = self.item.pipeline.layout
+ if layout:
+ project = self.item.change.project
+ project_config = layout.project_configs.get(
+ project.canonical_name)
+ if project_config:
+ return project_config.merge_mode
+ return MERGER_MERGE_RESOLVE
class QueueItem(object):
@@ -1249,6 +1317,7 @@
holds the current `BuildSet` as well as all previous `BuildSets` that were
produced for this `QueueItem`.
"""
+ log = logging.getLogger("zuul.QueueItem")
def __init__(self, queue, change):
self.pipeline = queue.pipeline
@@ -1263,6 +1332,8 @@
self.enqueue_time = None
self.dequeue_time = None
self.reported = False
+ self.reported_start = False
+ self.quiet = False
self.active = False # Whether an item is within an active window
self.live = True # Whether an item is intended to be processed at all
self.layout = None # This item's shadow layout
@@ -1370,6 +1441,9 @@
def getConfigError(self):
return self.current_build_set.config_error
+ def wasDequeuedNeedingChange(self):
+ return self.dequeued_needing_change
+
def isHoldingFollowingChanges(self):
if not self.live:
return False
@@ -1506,6 +1580,35 @@
fakebuild.result = 'SKIPPED'
self.addBuild(fakebuild)
+ def formatUrlPattern(self, url_pattern, job=None, build=None):
+ url = None
+ # Produce safe versions of objects which may be useful in
+ # result formatting, but don't allow users to crawl through
+ # the entire data structure where they might be able to access
+ # secrets, etc.
+ safe_change = self.change.getSafeAttributes()
+ safe_pipeline = self.pipeline.getSafeAttributes()
+ safe_job = job.getSafeAttributes() if job else {}
+ safe_build = build.getSafeAttributes() if build else {}
+ try:
+ url = url_pattern.format(change=safe_change,
+ pipeline=safe_pipeline,
+ job=safe_job,
+ build=safe_build)
+ except KeyError as e:
+ self.log.error("Error while formatting url for job %s: unknown "
+ "key %s in pattern %s"
+ % (job, e.args[0], url_pattern))
+ except AttributeError as e:
+ self.log.error("Error while formatting url for job %s: unknown "
+ "attribute %s in pattern %s"
+ % (job, e.args[0], url_pattern))
+ except Exception:
+ self.log.exception("Error while formatting url for job %s with "
+ "pattern %s:" % (job, url_pattern))
+
+ return url
+
def formatJobResult(self, job):
build = self.current_build_set.getBuild(job.name)
result = build.result
@@ -1521,22 +1624,8 @@
if job.failure_url:
pattern = job.failure_url
url = None
- # Produce safe versions of objects which may be useful in
- # result formatting, but don't allow users to crawl through
- # the entire data structure where they might be able to access
- # secrets, etc.
- safe_change = self.change.getSafeAttributes()
- safe_pipeline = self.pipeline.getSafeAttributes()
- safe_job = job.getSafeAttributes()
- safe_build = build.getSafeAttributes()
if pattern:
- try:
- url = pattern.format(change=safe_change,
- pipeline=safe_pipeline,
- job=safe_job,
- build=safe_build)
- except Exception:
- pass # FIXME: log this or something?
+ url = self.formatUrlPattern(pattern, job, build)
if not url:
url = build.url or job.name
return (result, url)
@@ -1673,6 +1762,43 @@
ret += '\n'
return ret
+ def makeMergerItem(self):
+ # Create a dictionary with all info about the item needed by
+ # the merger.
+ number = None
+ patchset = None
+ oldrev = None
+ newrev = None
+ refspec = None
+ if hasattr(self.change, 'number'):
+ number = self.change.number
+ patchset = self.change.patchset
+ refspec = self.change.refspec
+ branch = self.change.branch
+ elif hasattr(self.change, 'newrev'):
+ oldrev = self.change.oldrev
+ newrev = self.change.newrev
+ branch = self.change.ref
+ else:
+ oldrev = None
+ newrev = None
+ branch = None
+ source = self.change.project.source
+ connection_name = source.connection.connection_name
+ project = self.change.project
+
+ return dict(project=project.name,
+ connection=connection_name,
+ merge_mode=self.current_build_set.getMergeMode(),
+ refspec=refspec,
+ branch=branch,
+ ref=self.current_build_set.ref,
+ number=number,
+ patchset=patchset,
+ oldrev=oldrev,
+ newrev=newrev,
+ )
+
class Ref(object):
"""An existing state of a Project."""
@@ -1751,11 +1877,12 @@
self.can_merge = False
self.is_merged = False
self.failed_to_merge = False
- self.approvals = []
self.open = None
self.status = None
self.owner = None
+ self.source_event = None
+
def _id(self):
return '%s,%s' % (self.number, self.patchset)
@@ -1765,7 +1892,7 @@
def getBasePath(self):
if hasattr(self, 'refspec'):
return "%s/%s/%s" % (
- self.number[-2:], self.number, self.patchset)
+ str(self.number)[-2:], self.number, self.patchset)
return super(Change, self).getBasePath()
def equals(self, other):
@@ -1805,11 +1932,13 @@
class TriggerEvent(object):
"""Incoming event from an external system."""
def __init__(self):
+ # TODO(jeblair): further reduce this list
self.data = None
# common
self.type = None
# For management events (eg: enqueue / promote)
self.tenant_name = None
+ self.project_hostname = None
self.project_name = None
self.trigger_name = None
# Representation of the user account that performed the event.
@@ -1819,333 +1948,51 @@
self.change_url = None
self.patch_number = None
self.refspec = None
- self.approvals = []
self.branch = None
self.comment = None
+ self.state = None
# ref-updated
self.ref = None
self.oldrev = None
self.newrev = None
- # timer
- self.timespec = None
- # zuultrigger
- self.pipeline_name = None
# For events that arrive with a destination pipeline (eg, from
# an admin command, etc):
self.forced_pipeline = None
- def __repr__(self):
- ret = '<TriggerEvent %s %s' % (self.type, self.project_name)
+ @property
+ def canonical_project_name(self):
+ return self.project_hostname + '/' + self.project_name
- if self.branch:
- ret += " %s" % self.branch
- if self.change_number:
- ret += " %s,%s" % (self.change_number, self.patch_number)
- if self.approvals:
- ret += ' ' + ', '.join(
- ['%s:%s' % (a['type'], a['value']) for a in self.approvals])
- ret += '>'
+ def isPatchsetCreated(self):
+ return False
- return ret
+ def isChangeAbandoned(self):
+ return False
class BaseFilter(object):
"""Base Class for filtering which Changes and Events to process."""
- def __init__(self, required_approvals=[], reject_approvals=[]):
- self._required_approvals = copy.deepcopy(required_approvals)
- self.required_approvals = self._tidy_approvals(required_approvals)
- self._reject_approvals = copy.deepcopy(reject_approvals)
- self.reject_approvals = self._tidy_approvals(reject_approvals)
-
- def _tidy_approvals(self, approvals):
- for a in approvals:
- for k, v in a.items():
- if k == 'username':
- a['username'] = re.compile(v)
- elif k in ['email', 'email-filter']:
- a['email'] = re.compile(v)
- elif k == 'newer-than':
- a[k] = time_to_seconds(v)
- elif k == 'older-than':
- a[k] = time_to_seconds(v)
- if 'email-filter' in a:
- del a['email-filter']
- return approvals
-
- def _match_approval_required_approval(self, rapproval, approval):
- # Check if the required approval and approval match
- if 'description' not in approval:
- return False
- now = time.time()
- by = approval.get('by', {})
- for k, v in rapproval.items():
- if k == 'username':
- if (not v.search(by.get('username', ''))):
- return False
- elif k == 'email':
- if (not v.search(by.get('email', ''))):
- return False
- elif k == 'newer-than':
- t = now - v
- if (approval['grantedOn'] < t):
- return False
- elif k == 'older-than':
- t = now - v
- if (approval['grantedOn'] >= t):
- return False
- else:
- if not isinstance(v, list):
- v = [v]
- if (normalizeCategory(approval['description']) != k or
- int(approval['value']) not in v):
- return False
- return True
-
- def matchesApprovals(self, change):
- if (self.required_approvals and not change.approvals
- or self.reject_approvals and not change.approvals):
- # A change with no approvals can not match
- return False
-
- # TODO(jhesketh): If we wanted to optimise this slightly we could
- # analyse both the REQUIRE and REJECT filters by looping over the
- # approvals on the change and keeping track of what we have checked
- # rather than needing to loop on the change approvals twice
- return (self.matchesRequiredApprovals(change) and
- self.matchesNoRejectApprovals(change))
-
- def matchesRequiredApprovals(self, change):
- # Check if any approvals match the requirements
- for rapproval in self.required_approvals:
- matches_rapproval = False
- for approval in change.approvals:
- if self._match_approval_required_approval(rapproval, approval):
- # We have a matching approval so this requirement is
- # fulfilled
- matches_rapproval = True
- break
- if not matches_rapproval:
- return False
- return True
-
- def matchesNoRejectApprovals(self, change):
- # Check to make sure no approvals match a reject criteria
- for rapproval in self.reject_approvals:
- for approval in change.approvals:
- if self._match_approval_required_approval(rapproval, approval):
- # A reject approval has been matched, so we reject
- # immediately
- return False
- # To get here no rejects can have been matched so we should be good to
- # queue
- return True
+ pass
class EventFilter(BaseFilter):
"""Allows a Pipeline to only respond to certain events."""
- def __init__(self, trigger, types=[], branches=[], refs=[],
- event_approvals={}, comments=[], emails=[], usernames=[],
- timespecs=[], required_approvals=[], reject_approvals=[],
- pipelines=[], ignore_deletes=True):
- super(EventFilter, self).__init__(
- required_approvals=required_approvals,
- reject_approvals=reject_approvals)
+ def __init__(self, trigger):
+ super(EventFilter, self).__init__()
self.trigger = trigger
- self._types = types
- self._branches = branches
- self._refs = refs
- self._comments = comments
- self._emails = emails
- self._usernames = usernames
- self._pipelines = pipelines
- self.types = [re.compile(x) for x in types]
- self.branches = [re.compile(x) for x in branches]
- self.refs = [re.compile(x) for x in refs]
- self.comments = [re.compile(x) for x in comments]
- self.emails = [re.compile(x) for x in emails]
- self.usernames = [re.compile(x) for x in usernames]
- self.pipelines = [re.compile(x) for x in pipelines]
- self.event_approvals = event_approvals
- self.timespecs = timespecs
- self.ignore_deletes = ignore_deletes
- def __repr__(self):
- ret = '<EventFilter'
-
- if self._types:
- ret += ' types: %s' % ', '.join(self._types)
- if self._pipelines:
- ret += ' pipelines: %s' % ', '.join(self._pipelines)
- if self._branches:
- ret += ' branches: %s' % ', '.join(self._branches)
- if self._refs:
- ret += ' refs: %s' % ', '.join(self._refs)
- if self.ignore_deletes:
- ret += ' ignore_deletes: %s' % self.ignore_deletes
- if self.event_approvals:
- ret += ' event_approvals: %s' % ', '.join(
- ['%s:%s' % a for a in self.event_approvals.items()])
- if self.required_approvals:
- ret += ' required_approvals: %s' % ', '.join(
- ['%s' % a for a in self._required_approvals])
- if self.reject_approvals:
- ret += ' reject_approvals: %s' % ', '.join(
- ['%s' % a for a in self._reject_approvals])
- if self._comments:
- ret += ' comments: %s' % ', '.join(self._comments)
- if self._emails:
- ret += ' emails: %s' % ', '.join(self._emails)
- if self._usernames:
- ret += ' username_filters: %s' % ', '.join(self._usernames)
- if self.timespecs:
- ret += ' timespecs: %s' % ', '.join(self.timespecs)
- ret += '>'
-
- return ret
-
- def matches(self, event, change):
- # event types are ORed
- matches_type = False
- for etype in self.types:
- if etype.match(event.type):
- matches_type = True
- if self.types and not matches_type:
- return False
-
- # pipelines are ORed
- matches_pipeline = False
- for epipe in self.pipelines:
- if epipe.match(event.pipeline_name):
- matches_pipeline = True
- if self.pipelines and not matches_pipeline:
- return False
-
- # branches are ORed
- matches_branch = False
- for branch in self.branches:
- if branch.match(event.branch):
- matches_branch = True
- if self.branches and not matches_branch:
- return False
-
- # refs are ORed
- matches_ref = False
- if event.ref is not None:
- for ref in self.refs:
- if ref.match(event.ref):
- matches_ref = True
- if self.refs and not matches_ref:
- return False
- if self.ignore_deletes and event.newrev == EMPTY_GIT_REF:
- # If the updated ref has an empty git sha (all 0s),
- # then the ref is being deleted
- return False
-
- # comments are ORed
- matches_comment_re = False
- for comment_re in self.comments:
- if (event.comment is not None and
- comment_re.search(event.comment)):
- matches_comment_re = True
- if self.comments and not matches_comment_re:
- return False
-
- # We better have an account provided by Gerrit to do
- # email filtering.
- if event.account is not None:
- account_email = event.account.get('email')
- # emails are ORed
- matches_email_re = False
- for email_re in self.emails:
- if (account_email is not None and
- email_re.search(account_email)):
- matches_email_re = True
- if self.emails and not matches_email_re:
- return False
-
- # usernames are ORed
- account_username = event.account.get('username')
- matches_username_re = False
- for username_re in self.usernames:
- if (account_username is not None and
- username_re.search(account_username)):
- matches_username_re = True
- if self.usernames and not matches_username_re:
- return False
-
- # approvals are ANDed
- for category, value in self.event_approvals.items():
- matches_approval = False
- for eapproval in event.approvals:
- if (normalizeCategory(eapproval['description']) == category and
- int(eapproval['value']) == int(value)):
- matches_approval = True
- if not matches_approval:
- return False
-
- # required approvals are ANDed (reject approvals are ORed)
- if not self.matchesApprovals(change):
- return False
-
- # timespecs are ORed
- matches_timespec = False
- for timespec in self.timespecs:
- if (event.timespec == timespec):
- matches_timespec = True
- if self.timespecs and not matches_timespec:
- return False
-
+ def matches(self, event, ref):
+ # TODO(jeblair): consider removing ref argument
return True
-class ChangeishFilter(BaseFilter):
+class RefFilter(BaseFilter):
"""Allows a Manager to only enqueue Changes that meet certain criteria."""
- def __init__(self, open=None, current_patchset=None,
- statuses=[], required_approvals=[],
- reject_approvals=[]):
- super(ChangeishFilter, self).__init__(
- required_approvals=required_approvals,
- reject_approvals=reject_approvals)
- self.open = open
- self.current_patchset = current_patchset
- self.statuses = statuses
-
- def __repr__(self):
- ret = '<ChangeishFilter'
-
- if self.open is not None:
- ret += ' open: %s' % self.open
- if self.current_patchset is not None:
- ret += ' current-patchset: %s' % self.current_patchset
- if self.statuses:
- ret += ' statuses: %s' % ', '.join(self.statuses)
- if self.required_approvals:
- ret += (' required_approvals: %s' %
- str(self.required_approvals))
- if self.reject_approvals:
- ret += (' reject_approvals: %s' %
- str(self.reject_approvals))
- ret += '>'
-
- return ret
+ def __init__(self, connection_name):
+ super(RefFilter, self).__init__()
+ self.connection_name = connection_name
def matches(self, change):
- if self.open is not None:
- if self.open != change.open:
- return False
-
- if self.current_patchset is not None:
- if self.current_patchset != change.is_current_patchset:
- return False
-
- if self.statuses:
- if change.status not in self.statuses:
- return False
-
- # required approvals are ANDed (reject approvals are ORed)
- if not self.matchesApprovals(change):
- return False
-
return True
@@ -2162,6 +2009,7 @@
def __init__(self, name):
self.name = name
self.merge_mode = None
+ self.default_branch = None
self.pipelines = {}
self.private_key_file = None
@@ -2193,7 +2041,7 @@
raise Exception("Configuration item dictionaries must have "
"a single key (when parsing %s)" %
(conf,))
- key, value = item.items()[0]
+ key, value = list(item.items())[0]
if key == 'tenant':
self.tenants.append(value)
else:
@@ -2251,7 +2099,7 @@
raise Exception("Configuration item dictionaries must have "
"a single key (when parsing %s)" %
(conf,))
- key, value = item.items()[0]
+ key, value = list(item.items())[0]
if key == 'project':
name = value['name']
self.projects.setdefault(name, []).append(value)
@@ -2393,7 +2241,7 @@
def createJobGraph(self, item):
project_config = self.project_configs.get(
- item.change.project.name, None)
+ item.change.project.canonical_name, None)
ret = JobGraph()
# NOTE(pabelanger): It is possible for a foreign project not to have a
# configured pipeline, if so return an empty JobGraph.
@@ -2403,6 +2251,9 @@
self._createJobGraph(item, project_job_list, ret)
return ret
+ def hasProject(self, project):
+ return project.canonical_name in self.project_configs
+
class Semaphore(object):
def __init__(self, name, max=1):
@@ -2500,51 +2351,88 @@
# The unparsed configuration from the main zuul config for
# this tenant.
self.unparsed_config = None
- # The list of repos from which we will read main
- # configuration. (source, project)
- self.config_repos = []
- # The unparsed config from those repos.
- self.config_repos_config = None
- # The list of projects from which we will read in-repo
- # configuration. (source, project)
- self.project_repos = []
- # The unparsed config from those repos.
- self.project_repos_config = None
- # A mapping of source -> {config_repos: {}, project_repos: {}}
- self.sources = {}
-
+ # The list of projects from which we will read full
+ # configuration.
+ self.config_projects = []
+ # The unparsed config from those projects.
+ self.config_projects_config = None
+ # The list of projects from which we will read untrusted
+ # in-repo configuration.
+ self.untrusted_projects = []
+ # The unparsed config from those projects.
+ self.untrusted_projects_config = None
self.semaphore_handler = SemaphoreHandler()
- def addConfigRepo(self, source, project):
- sd = self.sources.setdefault(source.name,
- {'config_repos': {},
- 'project_repos': {}})
- sd['config_repos'][project.name] = project
+ # A mapping of project names to projects. project_name ->
+ # VALUE where VALUE is a further dictionary of
+ # canonical_hostname -> Project.
+ self.projects = {}
+ self.canonical_hostnames = set()
- def addProjectRepo(self, source, project):
- sd = self.sources.setdefault(source.name,
- {'config_repos': {},
- 'project_repos': {}})
- sd['project_repos'][project.name] = project
+ def _addProject(self, project):
+ """Add a project to the project index
- def getRepo(self, source, project_name):
- """Get a project given a source and project name
+ :arg Project project: The project to add.
+ """
+ self.canonical_hostnames.add(project.canonical_hostname)
+ hostname_dict = self.projects.setdefault(project.name, {})
+ if project.canonical_hostname in hostname_dict:
+ raise Exception("Project %s is already in project index" %
+ (project,))
+ hostname_dict[project.canonical_hostname] = project
- Returns a tuple (trusted, project) or (None, None) if the
- project is not found.
+ def getProject(self, name):
+ """Return a project given its name.
- Trusted indicates the project is a config repo.
+ :arg str name: The name of the project. It may be fully
+ qualified (E.g., "git.example.com/subpath/project") or may
+ contain only the project name name may be supplied (E.g.,
+ "subpath/project").
+
+ :returns: A tuple (trusted, project) or (None, None) if the
+ project is not found or ambiguous. The "trusted" boolean
+ indicates whether or not the project is trusted by this
+ tenant.
+ :rtype: (bool, Project)
"""
-
- sd = self.sources.get(source)
- if not sd:
+ path = name.split('/', 1)
+ if path[0] in self.canonical_hostnames:
+ hostname = path[0]
+ project_name = path[1]
+ else:
+ hostname = None
+ project_name = name
+ hostname_dict = self.projects.get(project_name)
+ project = None
+ if hostname_dict:
+ if hostname:
+ project = hostname_dict.get(hostname)
+ else:
+ values = list(hostname_dict.values())
+ if len(values) == 1:
+ project = values[0]
+ else:
+ raise Exception("Project name '%s' is ambiguous, "
+ "please fully qualify the project "
+ "with a hostname" % (name,))
+ if project is None:
return (None, None)
- if project_name in sd['config_repos']:
- return (True, sd['config_repos'][project_name])
- if project_name in sd['project_repos']:
- return (False, sd['project_repos'][project_name])
- return (None, None)
+ if project in self.config_projects:
+ return (True, project)
+ if project in self.untrusted_projects:
+ return (False, project)
+ # This should never happen:
+ raise Exception("Project %s is neither trusted nor untrusted" %
+ (project,))
+
+ def addConfigProject(self, project):
+ self.config_projects.append(project)
+ self._addProject(project)
+
+ def addUntrustedProject(self, project):
+ self.untrusted_projects.append(project)
+ self._addProject(project)
class Abide(object):
@@ -2565,7 +2453,7 @@
def load(self):
if not os.path.exists(self.path):
return
- with open(self.path) as f:
+ with open(self.path, 'rb') as f:
data = struct.unpack(self.format, f.read())
version = data[0]
if version != self.version:
@@ -2581,7 +2469,7 @@
data.extend(self.failure_times)
data.extend(self.results)
data = struct.pack(self.format, *data)
- with open(tmpfile, 'w') as f:
+ with open(tmpfile, 'wb') as f:
f.write(data)
os.rename(tmpfile, self.path)
diff --git a/zuul/nodepool.py b/zuul/nodepool.py
index e94b950..8f6489c 100644
--- a/zuul/nodepool.py
+++ b/zuul/nodepool.py
@@ -38,11 +38,11 @@
def cancelRequest(self, request):
self.log.info("Canceling node request %s" % (request,))
if request.uid in self.requests:
+ request.canceled = True
try:
self.sched.zk.deleteNodeRequest(request)
except Exception:
self.log.exception("Error deleting node request:")
- del self.requests[request.uid]
def useNodeSet(self, nodeset):
self.log.info("Setting nodeset %s in use" % (nodeset,))
@@ -98,6 +98,10 @@
if request.uid not in self.requests:
return False
+ if request.canceled:
+ del self.requests[request.uid]
+ return False
+
if request.state in (model.STATE_FULFILLED, model.STATE_FAILED):
self.log.info("Node request %s %s" % (request, request.state))
@@ -119,6 +123,11 @@
self.log.info("Accepting node request %s" % (request,))
+ if request.canceled:
+ self.log.info("Ignoring canceled node request %s" % (request,))
+ # The request was already deleted when it was canceled
+ return
+
locked = False
if request.fulfilled:
# If the request suceeded, try to lock the nodes.
diff --git a/zuul/reporter/__init__.py b/zuul/reporter/__init__.py
index 5e25e7c..9c8e953 100644
--- a/zuul/reporter/__init__.py
+++ b/zuul/reporter/__init__.py
@@ -37,7 +37,7 @@
self._action = action
@abc.abstractmethod
- def report(self, source, pipeline, item):
+ def report(self, pipeline, item):
"""Send the compiled report message."""
def getSubmitAllowNeeds(self):
@@ -74,7 +74,12 @@
return ret
def _formatItemReportStart(self, pipeline, item, with_jobs=True):
- return pipeline.start_message.format(pipeline=pipeline)
+ status_url = ''
+ if self.connection.sched.config.has_option('zuul', 'status_url'):
+ status_url = self.connection.sched.config.get('zuul',
+ 'status_url')
+ return pipeline.start_message.format(pipeline=pipeline,
+ status_url=status_url)
def _formatItemReportSuccess(self, pipeline, item, with_jobs=True):
msg = pipeline.success_message
diff --git a/zuul/rpcclient.py b/zuul/rpcclient.py
index 9d81520..d980992 100644
--- a/zuul/rpcclient.py
+++ b/zuul/rpcclient.py
@@ -35,9 +35,9 @@
def submitJob(self, name, data):
self.log.debug("Submitting job %s with data %s" % (name, data))
- job = gear.Job(name,
- json.dumps(data),
- unique=str(time.time()))
+ job = gear.TextJob(name,
+ json.dumps(data),
+ unique=str(time.time()))
self.gearman.submitJob(job, timeout=300)
self.log.debug("Waiting for job completion")
diff --git a/zuul/rpclistener.py b/zuul/rpclistener.py
index 0fb557c..6508e84 100644
--- a/zuul/rpclistener.py
+++ b/zuul/rpclistener.py
@@ -38,7 +38,7 @@
port = self.config.get('gearman', 'port')
else:
port = 4730
- self.worker = gear.Worker('Zuul RPC Listener')
+ self.worker = gear.TextWorker('Zuul RPC Listener')
self.worker.addServer(server, port)
self.worker.waitForServer()
self.register()
@@ -98,9 +98,10 @@
if tenant:
event.tenant_name = args['tenant']
- project = tenant.layout.project_configs.get(args['project'])
+ (trusted, project) = tenant.getProject(args['project'])
if project:
- event.project_name = args['project']
+ event.project_hostname = project.canonical_hostname
+ event.project_name = project.name
else:
errors += 'Invalid project: %s\n' % (args['project'],)
@@ -119,15 +120,15 @@
else:
errors += 'Invalid tenant: %s\n' % (args['tenant'],)
- return (args, event, errors, pipeline, project)
+ return (args, event, errors, project)
def handle_enqueue(self, job):
- (args, event, errors, pipeline, project) = self._common_enqueue(job)
+ (args, event, errors, project) = self._common_enqueue(job)
if not errors:
event.change_number, event.patch_number = args['change'].split(',')
try:
- pipeline.source.getChange(event, project)
+ project.source.getChange(event, project)
except Exception:
errors += 'Invalid change: %s\n' % (args['change'],)
@@ -138,7 +139,7 @@
job.sendWorkComplete()
def handle_enqueue_ref(self, job):
- (args, event, errors, pipeline, project) = self._common_enqueue(job)
+ (args, event, errors, project) = self._common_enqueue(job)
if not errors:
event.ref = args['ref']
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 0fa1763..61f1e5f 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -138,16 +138,18 @@
:arg bool merged: Whether the merge succeeded (changes with refs).
:arg bool updated: Whether the repo was updated (changes without refs).
:arg str commit: The SHA of the merged commit (changes with refs).
+ :arg dict repo_state: The starting repo state before the merge.
"""
def __init__(self, build_set, zuul_url, merged, updated, commit,
- files):
+ files, repo_state):
self.build_set = build_set
self.zuul_url = zuul_url
self.merged = merged
self.updated = updated
self.commit = commit
self.files = files
+ self.repo_state = repo_state
class NodesProvisionedEvent(ResultEvent):
@@ -232,10 +234,11 @@
self.stopConnections()
self.wake_event.set()
- def registerConnections(self, connections, load=True):
+ def registerConnections(self, connections, webapp, load=True):
# load: whether or not to trigger the onLoad for the connection. This
# is useful for not doing a full load during layout validation.
self.connections = connections
+ self.connections.registerWebapp(webapp)
self.connections.registerScheduler(self, load)
def stopConnections(self):
@@ -255,11 +258,6 @@
def addEvent(self, event):
self.log.debug("Adding trigger event: %s" % event)
- try:
- if self.statsd:
- self.statsd.incr('gerrit.event.%s' % event.type)
- except:
- self.log.exception("Exception reporting event stats")
self.trigger_event_queue.put(event)
self.wake_event.set()
self.log.debug("Done adding trigger event: %s" % event)
@@ -315,11 +313,11 @@
self.log.debug("Done adding complete event for build: %s" % build)
def onMergeCompleted(self, build_set, zuul_url, merged, updated,
- commit, files):
+ commit, files, repo_state):
self.log.debug("Adding merge complete event for build set: %s" %
build_set)
event = MergeCompletedEvent(build_set, zuul_url, merged,
- updated, commit, files)
+ updated, commit, files, repo_state)
self.result_event_queue.put(event)
self.wake_event.set()
@@ -486,6 +484,41 @@
finally:
self.layout_lock.release()
+ def _reenqueueGetProject(self, tenant, item):
+ project = item.change.project
+ # Attempt to get the same project as the one passed in. If
+ # the project is now found on a different connection, return
+ # the new version of the project. If it is no longer
+ # available (due to a connection being removed), return None.
+ (trusted, new_project) = tenant.getProject(project.canonical_name)
+ if new_project:
+ return new_project
+ # If this is a non-live item we may be looking at a
+ # "foreign" project, ie, one which is not defined in the
+ # config but is constructed ad-hoc to satisfy a
+ # cross-repo-dependency. Find the corresponding live item
+ # and use its source.
+ child = item
+ while child and not child.live:
+ # This assumes that the queue does not branch behind this
+ # item, which is currently true for non-live items; if
+ # that changes, this traversal will need to be more
+ # complex.
+ if child.items_behind:
+ child = child.items_behind[0]
+ else:
+ child = None
+ if child is item:
+ return None
+ if child and child.live:
+ (child_trusted, child_project) = tenant.getProject(
+ child.change.project.canonical_name)
+ if child_project:
+ source = child_project.source
+ new_project = source.getProject(project.name)
+ return new_project
+ return None
+
def _reenqueueTenant(self, old_tenant, tenant):
for name, new_pipeline in tenant.layout.pipelines.items():
old_pipeline = old_tenant.layout.pipelines.get(name)
@@ -501,15 +534,15 @@
for item in shared_queue.queue:
if not item.item_ahead:
last_head = item
- item.item_ahead = None
- item.items_behind = []
item.pipeline = None
item.queue = None
- project_name = item.change.project.name
- item.change.project = new_pipeline.source.getProject(
- project_name)
- if new_pipeline.manager.reEnqueueItem(item,
- last_head):
+ item.change.project = self._reenqueueGetProject(
+ tenant, item)
+ item.item_ahead = None
+ item.items_behind = []
+ if (item.change.project and
+ new_pipeline.manager.reEnqueueItem(item,
+ last_head)):
for build in item.current_build_set.getBuilds():
new_job = item.getJob(build.job.name)
if new_job:
@@ -553,7 +586,6 @@
# TODOv3(jeblair): remove postconfig calls?
for pipeline in tenant.layout.pipelines.values():
- pipeline.source.postConfig()
for trigger in pipeline.triggers:
trigger.postConfig(pipeline)
for reporter in pipeline.actions:
@@ -611,9 +643,9 @@
def _doEnqueueEvent(self, event):
tenant = self.abide.tenants.get(event.tenant_name)
- project = tenant.layout.project_configs.get(event.project_name)
+ (trusted, project) = tenant.getProject(event.project_name)
pipeline = tenant.layout.pipelines[event.forced_pipeline]
- change = pipeline.source.getChange(event, project)
+ change = project.source.getChange(event, project)
self.log.debug("Event %s for change %s was directly assigned "
"to pipeline %s" % (event, change, self))
pipeline.manager.addChange(change, ignore_requirements=True)
@@ -702,34 +734,31 @@
event = self.trigger_event_queue.get()
self.log.debug("Processing trigger event %s" % event)
try:
+ full_project_name = ('/'.join([event.project_hostname,
+ event.project_name]))
for tenant in self.abide.tenants.values():
- reconfigured_tenant = False
+ (trusted, project) = tenant.getProject(full_project_name)
+ if project is None:
+ continue
+ try:
+ change = project.source.getChange(event)
+ except exceptions.ChangeNotFound as e:
+ self.log.debug("Unable to get change %s from "
+ "source %s",
+ e.change, project.source)
+ continue
+ if (event.type == 'change-merged' and
+ hasattr(change, 'files') and
+ change.updatesConfig()):
+ # The change that just landed updates the config.
+ # Clear out cached data for this project and
+ # perform a reconfiguration.
+ change.project.unparsed_config = None
+ self.reconfigureTenant(tenant)
for pipeline in tenant.layout.pipelines.values():
- # Get the change even if the project is unknown to
- # us for the use of updating the cache if there is
- # another change depending on this foreign one.
- try:
- change = pipeline.source.getChange(event)
- except exceptions.ChangeNotFound as e:
- self.log.debug("Unable to get change %s from "
- "source %s (most likely looking "
- "for a change from another "
- "connection trigger)",
- e.change, pipeline.source)
- continue
- if (event.type == 'change-merged' and
- hasattr(change, 'files') and
- not reconfigured_tenant and
- change.updatesConfig()):
- # The change that just landed updates the config.
- # Clear out cached data for this project and
- # perform a reconfiguration.
- change.project.unparsed_config = None
- self.reconfigureTenant(tenant)
- reconfigured_tenant = True
- if event.type == 'patchset-created':
+ if event.isPatchsetCreated():
pipeline.manager.removeOldVersionsOfChange(change)
- elif event.type == 'change-abandoned':
+ elif event.isChangeAbandoned():
pipeline.manager.removeAbandonedChange(change)
if pipeline.manager.eventMatches(event, change):
pipeline.manager.addChange(change)
@@ -839,6 +868,8 @@
build_set = request.build_set
self.nodepool.acceptNodes(request)
+ if request.canceled:
+ return
if build_set is not build_set.item.current_build_set:
self.log.warning("Build set %s is not current" % (build_set,))
diff --git a/zuul/source/__init__.py b/zuul/source/__init__.py
index f0eeba6..68baf0e 100644
--- a/zuul/source/__init__.py
+++ b/zuul/source/__init__.py
@@ -69,3 +69,13 @@
@abc.abstractmethod
def getProjectBranches(self, project):
"""Get branches for a project"""
+
+ @abc.abstractmethod
+ def getRequireFilters(self, config):
+ """Return a list of ChangeFilters for the scheduler to match against.
+ """
+
+ @abc.abstractmethod
+ def getRejectFilters(self, config):
+ """Return a list of ChangeFilters for the scheduler to match against.
+ """
diff --git a/zuul/webapp.py b/zuul/webapp.py
index 4f040fa..e4feaa0 100644
--- a/zuul/webapp.py
+++ b/zuul/webapp.py
@@ -45,6 +45,7 @@
class WebApp(threading.Thread):
log = logging.getLogger("zuul.WebApp")
+ change_path_regexp = '/status/change/(.*)$'
def __init__(self, scheduler, port=8001, cache_expiry=1,
listen_address='0.0.0.0'):
@@ -56,10 +57,16 @@
self.cache_time = 0
self.cache = {}
self.daemon = True
+ self.routes = {}
+ self._init_default_routes()
self.server = httpserver.serve(
dec.wsgify(self.app), host=self.listen_address, port=self.port,
start_loop=False)
+ def _init_default_routes(self):
+ self.register_path('/(status\.json|status)$', self.status)
+ self.register_path(self.change_path_regexp, self.change)
+
def run(self):
self.server.serve_forever()
@@ -90,14 +97,13 @@
return change['id'] == rev
return self._changes_by_func(func, tenant_name)
- def _normalize_path(self, path):
- # support legacy status.json as well as new /status
- if path == '/status.json' or path == '/status':
- return "status"
- m = re.match('/status/change/(\d+,\d+)$', path)
- if m:
- return m.group(1)
- return None
+ def register_path(self, path, handler):
+ path_re = re.compile(path)
+ self.routes[path] = (path_re, handler)
+
+ def unregister_path(self, path):
+ if self.routes.get(path):
+ del self.routes[path]
def _handle_keys(self, request, path):
m = re.match('/keys/(.*?)/(.*?).pub', path)
@@ -120,14 +126,45 @@
return response.conditional_response_app
def app(self, request):
+ # Try registered paths without a tenant_name first
+ path = request.path
+ for path_re, handler in self.routes.values():
+ if path_re.match(path):
+ return handler(path, '', request)
+
+ # Now try with a tenant_name stripped
tenant_name = request.path.split('/')[1]
path = request.path.replace('/' + tenant_name, '')
+ # Handle keys
if path.startswith('/keys'):
return self._handle_keys(request, path)
- path = self._normalize_path(path)
- if path is None:
+ for path_re, handler in self.routes.values():
+ if path_re.match(path):
+ return handler(path, tenant_name, request)
+ else:
raise webob.exc.HTTPNotFound()
+ def status(self, path, tenant_name, request):
+ def func():
+ return webob.Response(body=self.cache[tenant_name],
+ content_type='application/json',
+ charset='utf8')
+ return self._response_with_status_cache(func, tenant_name)
+
+ def change(self, path, tenant_name, request):
+ def func():
+ m = re.match(self.change_path_regexp, path)
+ change_id = m.group(1)
+ status = self._status_for_change(change_id, tenant_name)
+ if status:
+ return webob.Response(body=status,
+ content_type='application/json',
+ charset='utf8')
+ else:
+ raise webob.exc.HTTPNotFound()
+ return self._response_with_status_cache(func, tenant_name)
+
+ def _refresh_status_cache(self, tenant_name):
if (tenant_name not in self.cache or
(time.time() - self.cache_time) > self.cache_expiry):
try:
@@ -140,16 +177,10 @@
self.log.exception("Exception formatting status:")
raise
- if path == 'status':
- response = webob.Response(body=self.cache[tenant_name],
- content_type='application/json')
- else:
- status = self._status_for_change(path, tenant_name)
- if status:
- response = webob.Response(body=status,
- content_type='application/json')
- else:
- raise webob.exc.HTTPNotFound()
+ def _response_with_status_cache(self, func, tenant_name):
+ self._refresh_status_cache(tenant_name)
+
+ response = func()
response.headers['Access-Control-Allow-Origin'] = '*'
diff --git a/zuul/zk.py b/zuul/zk.py
index 5cd7bee..31b85ea 100644
--- a/zuul/zk.py
+++ b/zuul/zk.py
@@ -59,10 +59,10 @@
self._became_lost = False
def _dictToStr(self, data):
- return json.dumps(data)
+ return json.dumps(data).encode('utf8')
def _strToDict(self, data):
- return json.loads(data)
+ return json.loads(data.decode('utf8'))
def _connection_listener(self, state):
'''
@@ -168,7 +168,7 @@
if data:
data = self._strToDict(data)
node_request.updateFromDict(data)
- request_nodes = node_request.nodeset.getNodes()
+ request_nodes = list(node_request.nodeset.getNodes())
for i, nodeid in enumerate(data.get('nodes', [])):
node_path = '%s/%s' % (self.NODE_ROOT, nodeid)
node_data, node_stat = self.client.get(node_path)