Merge "Extend in-repo config update support to github" into feature/zuulv3
diff --git a/doc/source/developer/datamodel.rst b/doc/source/developer/datamodel.rst
index acb8612..c4ff4a0 100644
--- a/doc/source/developer/datamodel.rst
+++ b/doc/source/developer/datamodel.rst
@@ -8,7 +8,7 @@
 
 Pipelines have a configured
 :py:class:`~zuul.manager.PipelineManager` which controlls how
-the :py:class:`Change <zuul.model.Changeish>` objects are enqueued and
+the :py:class:`Ref <zuul.model.Ref>` objects are enqueued and
 processed.
 
 There are currently two,
@@ -35,7 +35,7 @@
 .. autoclass:: zuul.model.Build
 
 The :py:class:`~zuul.manager.base.PipelineManager` enqueues each
-:py:class:`Change <zuul.model.Changeish>` into the
+:py:class:`Ref <zuul.model.Ref>` into the
 :py:class:`~zuul.model.ChangeQueue` in a :py:class:`~zuul.model.QueueItem`.
 
 .. autoclass:: zuul.model.QueueItem
diff --git a/doc/source/reporters.rst b/doc/source/reporters.rst
index e3ab947..dd053fa 100644
--- a/doc/source/reporters.rst
+++ b/doc/source/reporters.rst
@@ -44,6 +44,10 @@
   set as the commit status on github.
   ``status: 'success'``
 
+  **status-url**
+  String value for a link url to set in the github status. Defaults to the zuul
+  server status_url, or the empty string if that is unset.
+
   **comment**
   Boolean value (``true`` or ``false``) that determines if the reporter should
   add a comment to the pipeline status to the github pull request. Defaults
diff --git a/etc/zuul.conf-sample b/etc/zuul.conf-sample
index bf19895..1065cec 100644
--- a/etc/zuul.conf-sample
+++ b/etc/zuul.conf-sample
@@ -18,6 +18,9 @@
 ;git_user_name=zuul
 zuul_url=http://zuul.example.com/p
 
+[executor]
+default_username=zuul
+
 [webapp]
 listen_address=0.0.0.0
 port=8001
diff --git a/tests/base.py b/tests/base.py
index b78495d..76d604f 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -69,6 +69,7 @@
 import zuul.merger.client
 import zuul.merger.merger
 import zuul.merger.server
+import zuul.model
 import zuul.nodepool
 import zuul.zk
 from zuul.exceptions import MergeFailure
@@ -531,6 +532,18 @@
     def _start_watcher_thread(self, *args, **kw):
         pass
 
+    def _uploadPack(self, project):
+        ret = ('00a31270149696713ba7e06f1beb760f20d359c4abed HEAD\x00'
+               'multi_ack thin-pack side-band side-band-64k ofs-delta '
+               'shallow no-progress include-tag multi_ack_detailed no-done\n')
+        path = os.path.join(self.upstream_root, project.name)
+        repo = git.Repo(path)
+        for ref in repo.refs:
+            r = ref.object.hexsha + ' ' + ref.path + '\n'
+            ret += '%04x%s' % (len(r) + 4, r)
+        ret += '0000'
+        return ret
+
     def getGitUrl(self, project):
         return os.path.join(self.upstream_root, project.name)
 
@@ -1039,28 +1052,6 @@
                 (self.result, self.name, self.uuid, self.changes))
 
 
-class FakeURLOpener(object):
-    def __init__(self, upstream_root, url):
-        self.upstream_root = upstream_root
-        self.url = url
-
-    def read(self):
-        res = urllib.parse.urlparse(self.url)
-        path = res.path
-        project = '/'.join(path.split('/')[2:-2])
-        ret = '001e# service=git-upload-pack\n'
-        ret += ('000000a31270149696713ba7e06f1beb760f20d359c4abed HEAD\x00'
-                'multi_ack thin-pack side-band side-band-64k ofs-delta '
-                'shallow no-progress include-tag multi_ack_detailed no-done\n')
-        path = os.path.join(self.upstream_root, project)
-        repo = git.Repo(path)
-        for ref in repo.refs:
-            r = ref.object.hexsha + ' ' + ref.path + '\n'
-            ret += '%04x%s' % (len(r) + 4, r)
-        ret += '0000'
-        return ret
-
-
 class FakeStatsd(threading.Thread):
     def __init__(self):
         threading.Thread.__init__(self)
@@ -1920,14 +1911,6 @@
         self.configure_connections()
         self.sched.registerConnections(self.connections, self.webapp)
 
-        def URLOpenerFactory(*args, **kw):
-            if isinstance(args[0], urllib.request.Request):
-                return old_urlopen(*args, **kw)
-            return FakeURLOpener(self.upstream_root, *args, **kw)
-
-        old_urlopen = urllib.request.urlopen
-        urllib.request.urlopen = URLOpenerFactory
-
         self.executor_server = RecordingExecutorServer(
             self.config, self.connections,
             jobdir_root=self.test_root,
@@ -2450,6 +2433,12 @@
         jobs = filter(lambda x: x.result == result, jobs)
         return len(list(jobs))
 
+    def getBuildByName(self, name):
+        for build in self.builds:
+            if build.name == name:
+                return build
+        raise Exception("Unable to find build %s" % name)
+
     def getJobFromHistory(self, name, project=None):
         for job in self.history:
             if (job.name == name and
diff --git a/tests/fixtures/config/inventory/git/common-config/playbooks/group-inventory.yaml b/tests/fixtures/config/inventory/git/common-config/playbooks/group-inventory.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/common-config/playbooks/group-inventory.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+  tasks: []
diff --git a/tests/fixtures/config/inventory/git/common-config/playbooks/single-inventory.yaml b/tests/fixtures/config/inventory/git/common-config/playbooks/single-inventory.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/common-config/playbooks/single-inventory.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+  tasks: []
diff --git a/tests/fixtures/config/inventory/git/common-config/zuul.yaml b/tests/fixtures/config/inventory/git/common-config/zuul.yaml
new file mode 100644
index 0000000..184bd80
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/common-config/zuul.yaml
@@ -0,0 +1,42 @@
+- pipeline:
+    name: check
+    manager: independent
+    allow-secrets: true
+    trigger:
+      gerrit:
+        - event: patchset-created
+    success:
+      gerrit:
+        verified: 1
+    failure:
+      gerrit:
+        verified: -1
+
+- nodeset:
+    name: nodeset1
+    nodes:
+      - name: controller
+        image: controller-image
+      - name: compute1
+        image: compute-image
+      - name: compute2
+        image: compute-image
+    groups:
+      - name: ceph-osd
+        nodes:
+          - controller
+      - name: ceph-monitor
+        nodes:
+          - controller
+          - compute1
+          - compute2
+
+- job:
+    name: single-inventory
+    nodes:
+      - name: ubuntu-xenial
+        image: ubuntu-xenial
+
+- job:
+    name: group-inventory
+    nodes: nodeset1
diff --git a/tests/fixtures/config/inventory/git/org_project/.zuul.yaml b/tests/fixtures/config/inventory/git/org_project/.zuul.yaml
new file mode 100644
index 0000000..26310a0
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/org_project/.zuul.yaml
@@ -0,0 +1,6 @@
+- project:
+    name: org/project
+    check:
+      jobs:
+        - single-inventory
+        - group-inventory
diff --git a/tests/fixtures/config/inventory/git/org_project/README b/tests/fixtures/config/inventory/git/org_project/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/org_project/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/inventory/main.yaml b/tests/fixtures/config/inventory/main.yaml
new file mode 100644
index 0000000..208e274
--- /dev/null
+++ b/tests/fixtures/config/inventory/main.yaml
@@ -0,0 +1,8 @@
+- tenant:
+    name: tenant-one
+    source:
+      gerrit:
+        config-projects:
+          - common-config
+        untrusted-projects:
+          - org/project
diff --git a/tests/fixtures/layouts/repo-checkout-four-project.yaml b/tests/fixtures/layouts/repo-checkout-four-project.yaml
new file mode 100644
index 0000000..392931a
--- /dev/null
+++ b/tests/fixtures/layouts/repo-checkout-four-project.yaml
@@ -0,0 +1,81 @@
+- pipeline:
+    name: check
+    manager: independent
+    trigger:
+      gerrit:
+        - event: patchset-created
+    success:
+      gerrit:
+        verified: 1
+    failure:
+      gerrit:
+        verified: -1
+
+- pipeline:
+    name: gate
+    manager: dependent
+    success-message: Build succeeded (gate).
+    trigger:
+      gerrit:
+        - event: comment-added
+          approval:
+            - approved: 1
+    success:
+      gerrit:
+        verified: 2
+        submit: true
+    failure:
+      gerrit:
+        verified: -2
+    start:
+      gerrit:
+        verified: 0
+    precedence: high
+
+- job:
+    name: integration
+    required-projects:
+      - org/project1
+      - org/project2
+      - org/project3
+      - org/project4
+
+- project:
+    name: org/project1
+    check:
+      jobs:
+        - integration
+    gate:
+      queue: integrated
+      jobs:
+        - integration
+
+- project:
+    name: org/project2
+    check:
+      jobs:
+        - integration
+    gate:
+      queue: integrated
+      jobs:
+        - integration
+
+- project:
+    name: org/project3
+    check:
+      jobs:
+        - integration
+    gate:
+      queue: integrated
+      jobs:
+        - integration
+
+- project:
+    name: org/project4
+    check:
+      jobs:
+        - integration
+    gate:
+      queue: integrated
+      jobs:
+        - integration
diff --git a/tests/fixtures/layouts/repo-checkout-no-timer.yaml b/tests/fixtures/layouts/repo-checkout-no-timer.yaml
new file mode 100644
index 0000000..2b65850
--- /dev/null
+++ b/tests/fixtures/layouts/repo-checkout-no-timer.yaml
@@ -0,0 +1,20 @@
+- pipeline:
+    name: periodic
+    manager: independent
+    # Trigger is required, set it to one that is a noop
+    # during tests that check the timer trigger.
+    trigger:
+      gerrit:
+        - event: ref-updated
+
+- job:
+    name: integration
+    override-branch: stable/havana
+    required-projects:
+      - org/project1
+
+- project:
+    name: org/project1
+    periodic:
+      jobs:
+        - integration
diff --git a/tests/fixtures/layouts/repo-checkout-post.yaml b/tests/fixtures/layouts/repo-checkout-post.yaml
new file mode 100644
index 0000000..9698289
--- /dev/null
+++ b/tests/fixtures/layouts/repo-checkout-post.yaml
@@ -0,0 +1,25 @@
+- pipeline:
+    name: post
+    manager: independent
+    trigger:
+      gerrit:
+        - event: ref-updated
+          ref: ^(?!refs/).*$
+
+- job:
+    name: integration
+    required-projects:
+      - org/project1
+      - org/project2
+
+- project:
+    name: org/project1
+    post:
+      jobs:
+        - integration
+
+- project:
+    name: org/project2
+    post:
+      jobs:
+        - integration
diff --git a/tests/fixtures/layouts/repo-checkout-six-project.yaml b/tests/fixtures/layouts/repo-checkout-six-project.yaml
new file mode 100644
index 0000000..93a64ea
--- /dev/null
+++ b/tests/fixtures/layouts/repo-checkout-six-project.yaml
@@ -0,0 +1,104 @@
+- pipeline:
+    name: check
+    manager: independent
+    trigger:
+      gerrit:
+        - event: patchset-created
+    success:
+      gerrit:
+        verified: 1
+    failure:
+      gerrit:
+        verified: -1
+
+- pipeline:
+    name: gate
+    manager: dependent
+    success-message: Build succeeded (gate).
+    trigger:
+      gerrit:
+        - event: comment-added
+          approval:
+            - approved: 1
+    success:
+      gerrit:
+        verified: 2
+        submit: true
+    failure:
+      gerrit:
+        verified: -2
+    start:
+      gerrit:
+        verified: 0
+    precedence: high
+
+- job:
+    name: integration
+    required-projects:
+      - org/project1
+      - org/project2
+      - org/project3
+      - name: org/project4
+        override-branch: master
+      - org/project5
+      - org/project6
+
+- project:
+    name: org/project1
+    check:
+      jobs:
+        - integration
+    gate:
+      queue: integrated
+      jobs:
+        - integration
+
+- project:
+    name: org/project2
+    check:
+      jobs:
+        - integration
+    gate:
+      queue: integrated
+      jobs:
+        - integration
+
+- project:
+    name: org/project3
+    check:
+      jobs:
+        - integration
+    gate:
+      queue: integrated
+      jobs:
+        - integration
+
+- project:
+    name: org/project4
+    check:
+      jobs:
+        - integration
+    gate:
+      queue: integrated
+      jobs:
+        - integration
+
+- project:
+    name: org/project5
+    check:
+      jobs:
+        - integration
+    gate:
+      queue: integrated
+      jobs:
+        - integration
+
+- project:
+    name: org/project6
+    check:
+      jobs:
+        - integration
+    gate:
+      queue: integrated
+      jobs:
+        - integration
diff --git a/tests/fixtures/layouts/repo-checkout-timer.yaml b/tests/fixtures/layouts/repo-checkout-timer.yaml
new file mode 100644
index 0000000..d5917d1
--- /dev/null
+++ b/tests/fixtures/layouts/repo-checkout-timer.yaml
@@ -0,0 +1,18 @@
+- pipeline:
+    name: periodic
+    manager: independent
+    trigger:
+      timer:
+        - time: '* * * * * */1'
+
+- job:
+    name: integration
+    override-branch: stable/havana
+    required-projects:
+      - org/project1
+
+- project:
+    name: org/project1
+    periodic:
+      jobs:
+        - integration
diff --git a/tests/fixtures/layouts/reporting-github.yaml b/tests/fixtures/layouts/reporting-github.yaml
index c939f39..8dd35b0 100644
--- a/tests/fixtures/layouts/reporting-github.yaml
+++ b/tests/fixtures/layouts/reporting-github.yaml
@@ -29,6 +29,7 @@
       github:
         comment: false
         status: 'success'
+        status-url: http://logs.example.com/{pipeline.name}/{change.project}/{change.number}/{change.patchset}/
     failure:
       github:
         comment: false
diff --git a/tests/fixtures/zuul-github-driver.conf b/tests/fixtures/zuul-github-driver.conf
index ab34619..dfa813d 100644
--- a/tests/fixtures/zuul-github-driver.conf
+++ b/tests/fixtures/zuul-github-driver.conf
@@ -3,7 +3,7 @@
 
 [zuul]
 job_name_in_report=true
-status_url=http://zuul.example.com/status
+status_url=http://zuul.example.com/status/#{change.number},{change.patchset}
 
 [merger]
 git_dir=/tmp/zuul-test/git
diff --git a/tests/unit/test_cloner.py b/tests/unit/test_cloner.py
deleted file mode 100644
index 430f24e..0000000
--- a/tests/unit/test_cloner.py
+++ /dev/null
@@ -1,727 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2012 Hewlett-Packard Development Company, L.P.
-# Copyright 2014 Wikimedia Foundation Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import logging
-import os
-import shutil
-import time
-from unittest import skip
-
-import git
-
-import zuul.lib.cloner
-
-from tests.base import ZuulTestCase, simple_layout
-
-
-class TestCloner(ZuulTestCase):
-    tenant_config_file = 'config/single-tenant/main.yaml'
-
-    log = logging.getLogger("zuul.test.cloner")
-
-    @skip("Disabled for early v3 development")
-    def test_cache_dir(self):
-        projects = ['org/project1', 'org/project2']
-        cache_root = os.path.join(self.test_root, "cache")
-        for project in projects:
-            upstream_repo_path = os.path.join(self.upstream_root, project)
-            cache_repo_path = os.path.join(cache_root, project)
-            git.Repo.clone_from(upstream_repo_path, cache_repo_path)
-
-        self.worker.hold_jobs_in_build = True
-        A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
-        A.addApproval('CRVW', 2)
-        self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
-
-        self.waitUntilSettled()
-
-        self.assertEquals(1, len(self.builds), "One build is running")
-
-        B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
-        B.setMerged()
-
-        upstream = self.getUpstreamRepos(projects)
-        states = [{
-            'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-            'org/project2': str(upstream['org/project2'].commit('master')),
-        }]
-
-        for number, build in enumerate(self.builds):
-            self.log.debug("Build parameters: %s", build.parameters)
-            cloner = zuul.lib.cloner.Cloner(
-                git_base_url=self.upstream_root,
-                projects=projects,
-                workspace=self.workspace_root,
-                zuul_project=build.parameters.get('ZUUL_PROJECT', None),
-                zuul_branch=build.parameters['ZUUL_BRANCH'],
-                zuul_ref=build.parameters['ZUUL_REF'],
-                zuul_url=self.src_root,
-                cache_dir=cache_root,
-            )
-            cloner.execute()
-            work = self.getWorkspaceRepos(projects)
-            state = states[number]
-
-            for project in projects:
-                self.assertEquals(state[project],
-                                  str(work[project].commit('HEAD')),
-                                  'Project %s commit for build %s should '
-                                  'be correct' % (project, number))
-
-        work = self.getWorkspaceRepos(projects)
-        # project1 is the zuul_project so the origin should be set to the
-        # zuul_url since that is the most up to date.
-        cache_repo_path = os.path.join(cache_root, 'org/project1')
-        self.assertNotEqual(
-            work['org/project1'].remotes.origin.url,
-            cache_repo_path,
-            'workspace repo origin should not be the cache'
-        )
-        zuul_url_repo_path = os.path.join(self.git_root, 'org/project1')
-        self.assertEqual(
-            work['org/project1'].remotes.origin.url,
-            zuul_url_repo_path,
-            'workspace repo origin should be the zuul url'
-        )
-
-        # project2 is not the zuul_project so the origin should be set
-        # to upstream since that is the best we can do
-        cache_repo_path = os.path.join(cache_root, 'org/project2')
-        self.assertNotEqual(
-            work['org/project2'].remotes.origin.url,
-            cache_repo_path,
-            'workspace repo origin should not be the cache'
-        )
-        upstream_repo_path = os.path.join(self.upstream_root, 'org/project2')
-        self.assertEqual(
-            work['org/project2'].remotes.origin.url,
-            upstream_repo_path,
-            'workspace repo origin should be the upstream url'
-        )
-
-        self.worker.hold_jobs_in_build = False
-        self.worker.release()
-        self.waitUntilSettled()
-
-    @simple_layout('layouts/repo-checkout-two-project.yaml')
-    def test_one_branch(self):
-        self.executor_server.hold_jobs_in_build = True
-
-        p1 = 'review.example.com/org/project1'
-        p2 = 'review.example.com/org/project2'
-        projects = [p1, p2]
-        A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
-        B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
-        A.addApproval('code-review', 2)
-        B.addApproval('code-review', 2)
-        self.fake_gerrit.addEvent(A.addApproval('approved', 1))
-        self.fake_gerrit.addEvent(B.addApproval('approved', 1))
-
-        self.waitUntilSettled()
-
-        self.assertEquals(2, len(self.builds), "Two builds are running")
-
-        upstream = self.getUpstreamRepos(projects)
-        states = [
-            {p1: self.builds[0].parameters['ZUUL_COMMIT'],
-             p2: str(upstream[p2].commit('master')),
-             },
-            {p1: self.builds[0].parameters['ZUUL_COMMIT'],
-             p2: self.builds[1].parameters['ZUUL_COMMIT'],
-             },
-        ]
-
-        for number, build in enumerate(self.builds):
-            self.log.debug("Build parameters: %s", build.parameters)
-            work = build.getWorkspaceRepos(projects)
-            state = states[number]
-
-            for project in projects:
-                self.assertEquals(state[project],
-                                  str(work[project].commit('HEAD')),
-                                  'Project %s commit for build %s should '
-                                  'be correct' % (project, number))
-
-        self.executor_server.hold_jobs_in_build = False
-        self.executor_server.release()
-        self.waitUntilSettled()
-
-    @skip("Disabled for early v3 development")
-    def test_multi_branch(self):
-        self.worker.hold_jobs_in_build = True
-        projects = ['org/project1', 'org/project2',
-                    'org/project3', 'org/project4']
-
-        self.create_branch('org/project2', 'stable/havana')
-        self.create_branch('org/project4', 'stable/havana')
-        A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
-        B = self.fake_gerrit.addFakeChange('org/project2', 'stable/havana',
-                                           'B')
-        C = self.fake_gerrit.addFakeChange('org/project3', 'master', 'C')
-        A.addApproval('CRVW', 2)
-        B.addApproval('CRVW', 2)
-        C.addApproval('CRVW', 2)
-        self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
-        self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
-        self.fake_gerrit.addEvent(C.addApproval('APRV', 1))
-
-        self.waitUntilSettled()
-
-        self.assertEquals(3, len(self.builds), "Three builds are running")
-
-        upstream = self.getUpstreamRepos(projects)
-        states = [
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': str(upstream['org/project2'].commit('master')),
-             'org/project3': str(upstream['org/project3'].commit('master')),
-             'org/project4': str(upstream['org/project4'].
-                                 commit('master')),
-             },
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': self.builds[1].parameters['ZUUL_COMMIT'],
-             'org/project3': str(upstream['org/project3'].commit('master')),
-             'org/project4': str(upstream['org/project4'].
-                                 commit('stable/havana')),
-             },
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': str(upstream['org/project2'].commit('master')),
-             'org/project3': self.builds[2].parameters['ZUUL_COMMIT'],
-             'org/project4': str(upstream['org/project4'].
-                                 commit('master')),
-             },
-        ]
-
-        for number, build in enumerate(self.builds):
-            self.log.debug("Build parameters: %s", build.parameters)
-            cloner = zuul.lib.cloner.Cloner(
-                git_base_url=self.upstream_root,
-                projects=projects,
-                workspace=self.workspace_root,
-                zuul_project=build.parameters.get('ZUUL_PROJECT', None),
-                zuul_branch=build.parameters['ZUUL_BRANCH'],
-                zuul_ref=build.parameters['ZUUL_REF'],
-                zuul_url=self.src_root,
-            )
-            cloner.execute()
-            work = self.getWorkspaceRepos(projects)
-            state = states[number]
-
-            for project in projects:
-                self.assertEquals(state[project],
-                                  str(work[project].commit('HEAD')),
-                                  'Project %s commit for build %s should '
-                                  'be correct' % (project, number))
-            shutil.rmtree(self.workspace_root)
-
-        self.worker.hold_jobs_in_build = False
-        self.worker.release()
-        self.waitUntilSettled()
-
-    @skip("Disabled for early v3 development")
-    def test_upgrade(self):
-        # Simulates an upgrade test
-        self.worker.hold_jobs_in_build = True
-        projects = ['org/project1', 'org/project2', 'org/project3',
-                    'org/project4', 'org/project5', 'org/project6']
-
-        self.create_branch('org/project2', 'stable/havana')
-        self.create_branch('org/project3', 'stable/havana')
-        self.create_branch('org/project4', 'stable/havana')
-        self.create_branch('org/project5', 'stable/havana')
-        A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
-        B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
-        C = self.fake_gerrit.addFakeChange('org/project3', 'stable/havana',
-                                           'C')
-        D = self.fake_gerrit.addFakeChange('org/project3', 'master', 'D')
-        E = self.fake_gerrit.addFakeChange('org/project4', 'stable/havana',
-                                           'E')
-        A.addApproval('CRVW', 2)
-        B.addApproval('CRVW', 2)
-        C.addApproval('CRVW', 2)
-        D.addApproval('CRVW', 2)
-        E.addApproval('CRVW', 2)
-        self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
-        self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
-        self.fake_gerrit.addEvent(C.addApproval('APRV', 1))
-        self.fake_gerrit.addEvent(D.addApproval('APRV', 1))
-        self.fake_gerrit.addEvent(E.addApproval('APRV', 1))
-
-        self.waitUntilSettled()
-
-        self.assertEquals(5, len(self.builds), "Five builds are running")
-
-        # Check the old side of the upgrade first
-        upstream = self.getUpstreamRepos(projects)
-        states = [
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': str(upstream['org/project2'].commit(
-                                 'stable/havana')),
-             'org/project3': str(upstream['org/project3'].commit(
-                                 'stable/havana')),
-             'org/project4': str(upstream['org/project4'].commit(
-                                 'stable/havana')),
-             'org/project5': str(upstream['org/project5'].commit(
-                                 'stable/havana')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': str(upstream['org/project2'].commit(
-                                 'stable/havana')),
-             'org/project3': str(upstream['org/project3'].commit(
-                                 'stable/havana')),
-             'org/project4': str(upstream['org/project4'].commit(
-                                 'stable/havana')),
-             'org/project5': str(upstream['org/project5'].commit(
-                                 'stable/havana')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': str(upstream['org/project2'].commit(
-                                 'stable/havana')),
-             'org/project3': self.builds[2].parameters['ZUUL_COMMIT'],
-             'org/project4': str(upstream['org/project4'].commit(
-                                 'stable/havana')),
-
-             'org/project5': str(upstream['org/project5'].commit(
-                                 'stable/havana')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': str(upstream['org/project2'].commit(
-                                 'stable/havana')),
-             'org/project3': self.builds[2].parameters['ZUUL_COMMIT'],
-             'org/project4': str(upstream['org/project4'].commit(
-                                 'stable/havana')),
-             'org/project5': str(upstream['org/project5'].commit(
-                                 'stable/havana')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': str(upstream['org/project2'].commit(
-                                 'stable/havana')),
-             'org/project3': self.builds[2].parameters['ZUUL_COMMIT'],
-             'org/project4': self.builds[4].parameters['ZUUL_COMMIT'],
-             'org/project5': str(upstream['org/project5'].commit(
-                                 'stable/havana')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-        ]
-
-        for number, build in enumerate(self.builds):
-            self.log.debug("Build parameters: %s", build.parameters)
-            cloner = zuul.lib.cloner.Cloner(
-                git_base_url=self.upstream_root,
-                projects=projects,
-                workspace=self.workspace_root,
-                zuul_project=build.parameters.get('ZUUL_PROJECT', None),
-                zuul_branch=build.parameters['ZUUL_BRANCH'],
-                zuul_ref=build.parameters['ZUUL_REF'],
-                zuul_url=self.src_root,
-                branch='stable/havana',  # Old branch for upgrade
-            )
-            cloner.execute()
-            work = self.getWorkspaceRepos(projects)
-            state = states[number]
-
-            for project in projects:
-                self.assertEquals(state[project],
-                                  str(work[project].commit('HEAD')),
-                                  'Project %s commit for build %s should '
-                                  'be correct on old side of upgrade' %
-                                  (project, number))
-            shutil.rmtree(self.workspace_root)
-
-        # Check the new side of the upgrade
-        states = [
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': str(upstream['org/project2'].commit('master')),
-             'org/project3': str(upstream['org/project3'].commit('master')),
-             'org/project4': str(upstream['org/project4'].commit('master')),
-             'org/project5': str(upstream['org/project5'].commit('master')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': self.builds[1].parameters['ZUUL_COMMIT'],
-             'org/project3': str(upstream['org/project3'].commit('master')),
-             'org/project4': str(upstream['org/project4'].commit('master')),
-             'org/project5': str(upstream['org/project5'].commit('master')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': self.builds[1].parameters['ZUUL_COMMIT'],
-             'org/project3': str(upstream['org/project3'].commit('master')),
-             'org/project4': str(upstream['org/project4'].commit('master')),
-             'org/project5': str(upstream['org/project5'].commit('master')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': self.builds[1].parameters['ZUUL_COMMIT'],
-             'org/project3': self.builds[3].parameters['ZUUL_COMMIT'],
-             'org/project4': str(upstream['org/project4'].commit('master')),
-             'org/project5': str(upstream['org/project5'].commit('master')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': self.builds[1].parameters['ZUUL_COMMIT'],
-             'org/project3': self.builds[3].parameters['ZUUL_COMMIT'],
-             'org/project4': str(upstream['org/project4'].commit('master')),
-             'org/project5': str(upstream['org/project5'].commit('master')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-        ]
-
-        for number, build in enumerate(self.builds):
-            self.log.debug("Build parameters: %s", build.parameters)
-            cloner = zuul.lib.cloner.Cloner(
-                git_base_url=self.upstream_root,
-                projects=projects,
-                workspace=self.workspace_root,
-                zuul_project=build.parameters.get('ZUUL_PROJECT', None),
-                zuul_branch=build.parameters['ZUUL_BRANCH'],
-                zuul_ref=build.parameters['ZUUL_REF'],
-                zuul_url=self.src_root,
-                branch='master',  # New branch for upgrade
-            )
-            cloner.execute()
-            work = self.getWorkspaceRepos(projects)
-            state = states[number]
-
-            for project in projects:
-                self.assertEquals(state[project],
-                                  str(work[project].commit('HEAD')),
-                                  'Project %s commit for build %s should '
-                                  'be correct on old side of upgrade' %
-                                  (project, number))
-            shutil.rmtree(self.workspace_root)
-
-        self.worker.hold_jobs_in_build = False
-        self.worker.release()
-        self.waitUntilSettled()
-
-    @skip("Disabled for early v3 development")
-    def test_project_override(self):
-        self.worker.hold_jobs_in_build = True
-        projects = ['org/project1', 'org/project2', 'org/project3',
-                    'org/project4', 'org/project5', 'org/project6']
-
-        self.create_branch('org/project3', 'stable/havana')
-        self.create_branch('org/project4', 'stable/havana')
-        self.create_branch('org/project6', 'stable/havana')
-        A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
-        B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
-        C = self.fake_gerrit.addFakeChange('org/project2', 'master', 'C')
-        D = self.fake_gerrit.addFakeChange('org/project3', 'stable/havana',
-                                           'D')
-        A.addApproval('CRVW', 2)
-        B.addApproval('CRVW', 2)
-        C.addApproval('CRVW', 2)
-        D.addApproval('CRVW', 2)
-        self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
-        self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
-        self.fake_gerrit.addEvent(C.addApproval('APRV', 1))
-        self.fake_gerrit.addEvent(D.addApproval('APRV', 1))
-
-        self.waitUntilSettled()
-
-        self.assertEquals(4, len(self.builds), "Four builds are running")
-
-        upstream = self.getUpstreamRepos(projects)
-        states = [
-            {'org/project1': self.builds[0].parameters['ZUUL_COMMIT'],
-             'org/project2': str(upstream['org/project2'].commit('master')),
-             'org/project3': str(upstream['org/project3'].commit('master')),
-             'org/project4': str(upstream['org/project4'].commit('master')),
-             'org/project5': str(upstream['org/project5'].commit('master')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-            {'org/project1': self.builds[1].parameters['ZUUL_COMMIT'],
-             'org/project2': str(upstream['org/project2'].commit('master')),
-             'org/project3': str(upstream['org/project3'].commit('master')),
-             'org/project4': str(upstream['org/project4'].commit('master')),
-             'org/project5': str(upstream['org/project5'].commit('master')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-            {'org/project1': self.builds[1].parameters['ZUUL_COMMIT'],
-             'org/project2': self.builds[2].parameters['ZUUL_COMMIT'],
-             'org/project3': str(upstream['org/project3'].commit('master')),
-             'org/project4': str(upstream['org/project4'].commit('master')),
-             'org/project5': str(upstream['org/project5'].commit('master')),
-             'org/project6': str(upstream['org/project6'].commit('master')),
-             },
-            {'org/project1': self.builds[1].parameters['ZUUL_COMMIT'],
-             'org/project2': self.builds[2].parameters['ZUUL_COMMIT'],
-             'org/project3': self.builds[3].parameters['ZUUL_COMMIT'],
-             'org/project4': str(upstream['org/project4'].commit('master')),
-             'org/project5': str(upstream['org/project5'].commit('master')),
-             'org/project6': str(upstream['org/project6'].commit(
-                                 'stable/havana')),
-             },
-        ]
-
-        for number, build in enumerate(self.builds):
-            self.log.debug("Build parameters: %s", build.parameters)
-            cloner = zuul.lib.cloner.Cloner(
-                git_base_url=self.upstream_root,
-                projects=projects,
-                workspace=self.workspace_root,
-                zuul_project=build.parameters.get('ZUUL_PROJECT', None),
-                zuul_branch=build.parameters['ZUUL_BRANCH'],
-                zuul_ref=build.parameters['ZUUL_REF'],
-                zuul_url=self.src_root,
-                project_branches={'org/project4': 'master'},
-            )
-            cloner.execute()
-            work = self.getWorkspaceRepos(projects)
-            state = states[number]
-
-            for project in projects:
-                self.assertEquals(state[project],
-                                  str(work[project].commit('HEAD')),
-                                  'Project %s commit for build %s should '
-                                  'be correct' % (project, number))
-            shutil.rmtree(self.workspace_root)
-
-        self.worker.hold_jobs_in_build = False
-        self.worker.release()
-        self.waitUntilSettled()
-
-    @skip("Disabled for early v3 development")
-    def test_periodic(self):
-        self.worker.hold_jobs_in_build = True
-        self.create_branch('org/project', 'stable/havana')
-        self.updateConfigLayout(
-            'tests/fixtures/layout-timer.yaml')
-        self.sched.reconfigure(self.config)
-        self.registerJobs()
-
-        # The pipeline triggers every second, so we should have seen
-        # several by now.
-        time.sleep(5)
-        self.waitUntilSettled()
-
-        builds = self.builds[:]
-
-        self.worker.hold_jobs_in_build = False
-        # Stop queuing timer triggered jobs so that the assertions
-        # below don't race against more jobs being queued.
-        self.updateConfigLayout(
-            'tests/fixtures/layout-no-timer.yaml')
-        self.sched.reconfigure(self.config)
-        self.registerJobs()
-        self.worker.release()
-        self.waitUntilSettled()
-
-        projects = ['org/project']
-
-        self.assertEquals(2, len(builds), "Two builds are running")
-
-        upstream = self.getUpstreamRepos(projects)
-        states = [
-            {'org/project':
-                str(upstream['org/project'].commit('stable/havana')),
-             },
-            {'org/project':
-                str(upstream['org/project'].commit('stable/havana')),
-             },
-        ]
-
-        for number, build in enumerate(builds):
-            self.log.debug("Build parameters: %s", build.parameters)
-            cloner = zuul.lib.cloner.Cloner(
-                git_base_url=self.upstream_root,
-                projects=projects,
-                workspace=self.workspace_root,
-                zuul_project=build.parameters.get('ZUUL_PROJECT', None),
-                zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
-                zuul_ref=build.parameters.get('ZUUL_REF', None),
-                zuul_url=self.src_root,
-                branch='stable/havana',
-            )
-            cloner.execute()
-            work = self.getWorkspaceRepos(projects)
-            state = states[number]
-
-            for project in projects:
-                self.assertEquals(state[project],
-                                  str(work[project].commit('HEAD')),
-                                  'Project %s commit for build %s should '
-                                  'be correct' % (project, number))
-
-            shutil.rmtree(self.workspace_root)
-
-        self.worker.hold_jobs_in_build = False
-        self.worker.release()
-        self.waitUntilSettled()
-
-    @skip("Disabled for early v3 development")
-    def test_periodic_update(self):
-        # Test that the merger correctly updates its local repository
-        # before running a periodic job.
-
-        # Prime the merger with the current state
-        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
-        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
-        self.waitUntilSettled()
-
-        # Merge a different change
-        B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
-        B.setMerged()
-
-        # Start a periodic job
-        self.worker.hold_jobs_in_build = True
-        self.executor.negative_function_cache_ttl = 0
-        self.config.set('zuul', 'layout_config',
-                        'tests/fixtures/layout-timer.yaml')
-        self.sched.reconfigure(self.config)
-        self.registerJobs()
-
-        # The pipeline triggers every second, so we should have seen
-        # several by now.
-        time.sleep(5)
-        self.waitUntilSettled()
-
-        builds = self.builds[:]
-
-        self.worker.hold_jobs_in_build = False
-        # Stop queuing timer triggered jobs so that the assertions
-        # below don't race against more jobs being queued.
-        self.config.set('zuul', 'layout_config',
-                        'tests/fixtures/layout-no-timer.yaml')
-        self.sched.reconfigure(self.config)
-        self.registerJobs()
-        self.worker.release()
-        self.waitUntilSettled()
-
-        projects = ['org/project']
-
-        self.assertEquals(2, len(builds), "Two builds are running")
-
-        upstream = self.getUpstreamRepos(projects)
-        self.assertEqual(upstream['org/project'].commit('master').hexsha,
-                         B.patchsets[0]['revision'])
-        states = [
-            {'org/project':
-                str(upstream['org/project'].commit('master')),
-             },
-            {'org/project':
-                str(upstream['org/project'].commit('master')),
-             },
-        ]
-
-        for number, build in enumerate(builds):
-            self.log.debug("Build parameters: %s", build.parameters)
-            cloner = zuul.lib.cloner.Cloner(
-                git_base_url=self.upstream_root,
-                projects=projects,
-                workspace=self.workspace_root,
-                zuul_project=build.parameters.get('ZUUL_PROJECT', None),
-                zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
-                zuul_ref=build.parameters.get('ZUUL_REF', None),
-                zuul_url=self.git_root,
-            )
-            cloner.execute()
-            work = self.getWorkspaceRepos(projects)
-            state = states[number]
-
-            for project in projects:
-                self.assertEquals(state[project],
-                                  str(work[project].commit('HEAD')),
-                                  'Project %s commit for build %s should '
-                                  'be correct' % (project, number))
-
-            shutil.rmtree(self.workspace_root)
-
-        self.worker.hold_jobs_in_build = False
-        self.worker.release()
-        self.waitUntilSettled()
-
-    @skip("Disabled for early v3 development")
-    def test_post_checkout(self):
-        self.worker.hold_jobs_in_build = True
-        project = "org/project1"
-
-        A = self.fake_gerrit.addFakeChange(project, 'master', 'A')
-        event = A.getRefUpdatedEvent()
-        A.setMerged()
-        self.fake_gerrit.addEvent(event)
-        self.waitUntilSettled()
-
-        build = self.builds[0]
-        state = {'org/project1': build.parameters['ZUUL_COMMIT']}
-
-        build.release()
-        self.waitUntilSettled()
-
-        cloner = zuul.lib.cloner.Cloner(
-            git_base_url=self.upstream_root,
-            projects=[project],
-            workspace=self.workspace_root,
-            zuul_project=build.parameters.get('ZUUL_PROJECT', None),
-            zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
-            zuul_ref=build.parameters.get('ZUUL_REF', None),
-            zuul_newrev=build.parameters.get('ZUUL_NEWREV', None),
-            zuul_url=self.git_root,
-        )
-        cloner.execute()
-        work = self.getWorkspaceRepos([project])
-        self.assertEquals(state[project],
-                          str(work[project].commit('HEAD')),
-                          'Project %s commit for build %s should '
-                          'be correct' % (project, 0))
-        shutil.rmtree(self.workspace_root)
-
-    @skip("Disabled for early v3 development")
-    def test_post_and_master_checkout(self):
-        self.worker.hold_jobs_in_build = True
-        projects = ["org/project1", "org/project2"]
-
-        A = self.fake_gerrit.addFakeChange(projects[0], 'master', 'A')
-        event = A.getRefUpdatedEvent()
-        A.setMerged()
-        self.fake_gerrit.addEvent(event)
-        self.waitUntilSettled()
-
-        build = self.builds[0]
-        upstream = self.getUpstreamRepos(projects)
-        state = {'org/project1':
-                 build.parameters['ZUUL_COMMIT'],
-                 'org/project2':
-                 str(upstream['org/project2'].commit('master')),
-                 }
-
-        build.release()
-        self.waitUntilSettled()
-
-        cloner = zuul.lib.cloner.Cloner(
-            git_base_url=self.upstream_root,
-            projects=projects,
-            workspace=self.workspace_root,
-            zuul_project=build.parameters.get('ZUUL_PROJECT', None),
-            zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
-            zuul_ref=build.parameters.get('ZUUL_REF', None),
-            zuul_newrev=build.parameters.get('ZUUL_NEWREV', None),
-            zuul_url=self.git_root,
-        )
-        cloner.execute()
-        work = self.getWorkspaceRepos(projects)
-
-        for project in projects:
-            self.assertEquals(state[project],
-                              str(work[project].commit('HEAD')),
-                              'Project %s commit for build %s should '
-                              'be correct' % (project, 0))
-        shutil.rmtree(self.workspace_root)
diff --git a/tests/unit/test_cloner_cmd.py b/tests/unit/test_cloner_cmd.py
index 2d8747f..84bd243 100644
--- a/tests/unit/test_cloner_cmd.py
+++ b/tests/unit/test_cloner_cmd.py
@@ -26,7 +26,7 @@
 
     def test_default_cache_dir_empty(self):
         self.app.parse_arguments(['base', 'repo'])
-        self.assertEqual(None, self.app.args.cache_dir)
+        self.assertIsNone(self.app.args.cache_dir)
 
     def test_default_cache_dir_environ(self):
         try:
diff --git a/tests/unit/test_executor.py b/tests/unit/test_executor.py
new file mode 100644
index 0000000..39b6070
--- /dev/null
+++ b/tests/unit/test_executor.py
@@ -0,0 +1,307 @@
+#!/usr/bin/env python
+
+# Copyright 2012 Hewlett-Packard Development Company, L.P.
+# Copyright 2014 Wikimedia Foundation Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import time
+
+from tests.base import ZuulTestCase, simple_layout
+
+
+class TestExecutorRepos(ZuulTestCase):
+    tenant_config_file = 'config/single-tenant/main.yaml'
+
+    log = logging.getLogger("zuul.test.executor")
+
+    def assertRepoState(self, repo, state, project, build, number):
+        if 'branch' in state:
+            self.assertFalse(repo.head.is_detached,
+                             'Project %s commit for build %s #%s should '
+                             'not have a detached HEAD' % (
+                                 project, build, number))
+            self.assertEquals(repo.active_branch.name,
+                              state['branch'],
+                              'Project %s commit for build %s #%s should '
+                              'be on the correct branch' % (
+                                  project, build, number))
+        if 'commit' in state:
+            self.assertEquals(state['commit'],
+                              str(repo.commit('HEAD')),
+                              'Project %s commit for build %s #%s should '
+                              'be correct' % (
+                                  project, build, number))
+        ref = repo.commit('HEAD')
+        repo_messages = set(
+            [c.message.strip() for c in repo.iter_commits(ref)])
+        if 'present' in state:
+            for change in state['present']:
+                msg = '%s-1' % change.subject
+                self.assertTrue(msg in repo_messages,
+                                'Project %s for build %s #%s should '
+                                'have change %s' % (
+                                    project, build, number, change.subject))
+        if 'absent' in state:
+            for change in state['absent']:
+                msg = '%s-1' % change.subject
+                self.assertTrue(msg not in repo_messages,
+                                'Project %s for build %s #%s should '
+                                'not have change %s' % (
+                                    project, build, number, change.subject))
+
+    def assertBuildStates(self, states, projects):
+        for number, build in enumerate(self.builds):
+            work = build.getWorkspaceRepos(projects)
+            state = states[number]
+
+            for project in projects:
+                self.assertRepoState(work[project], state[project],
+                                     project, build, number)
+
+        self.executor_server.hold_jobs_in_build = False
+        self.executor_server.release()
+        self.waitUntilSettled()
+
+    @simple_layout('layouts/repo-checkout-two-project.yaml')
+    def test_one_branch(self):
+        self.executor_server.hold_jobs_in_build = True
+
+        p1 = 'review.example.com/org/project1'
+        p2 = 'review.example.com/org/project2'
+        projects = [p1, p2]
+        A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+        B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
+        A.addApproval('code-review', 2)
+        B.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+        self.fake_gerrit.addEvent(B.addApproval('approved', 1))
+
+        self.waitUntilSettled()
+
+        self.assertEquals(2, len(self.builds), "Two builds are running")
+
+        upstream = self.getUpstreamRepos(projects)
+        states = [
+            {p1: dict(present=[A], absent=[B], branch='master'),
+             p2: dict(commit=str(upstream[p2].commit('master')),
+                      branch='master'),
+             },
+            {p1: dict(present=[A], absent=[B], branch='master'),
+             p2: dict(present=[B], absent=[A], branch='master'),
+             },
+        ]
+
+        self.assertBuildStates(states, projects)
+
+    @simple_layout('layouts/repo-checkout-four-project.yaml')
+    def test_multi_branch(self):
+        self.executor_server.hold_jobs_in_build = True
+
+        p1 = 'review.example.com/org/project1'
+        p2 = 'review.example.com/org/project2'
+        p3 = 'review.example.com/org/project3'
+        p4 = 'review.example.com/org/project4'
+        projects = [p1, p2, p3, p4]
+
+        self.create_branch('org/project2', 'stable/havana')
+        self.create_branch('org/project4', 'stable/havana')
+        A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+        B = self.fake_gerrit.addFakeChange('org/project2', 'stable/havana',
+                                           'B')
+        C = self.fake_gerrit.addFakeChange('org/project3', 'master', 'C')
+        A.addApproval('code-review', 2)
+        B.addApproval('code-review', 2)
+        C.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+        self.fake_gerrit.addEvent(B.addApproval('approved', 1))
+        self.fake_gerrit.addEvent(C.addApproval('approved', 1))
+
+        self.waitUntilSettled()
+
+        self.assertEquals(3, len(self.builds), "Three builds are running")
+
+        upstream = self.getUpstreamRepos(projects)
+        states = [
+            {p1: dict(present=[A], absent=[B, C], branch='master'),
+             p2: dict(commit=str(upstream[p2].commit('master')),
+                      branch='master'),
+             p3: dict(commit=str(upstream[p3].commit('master')),
+                      branch='master'),
+             p4: dict(commit=str(upstream[p4].commit('master')),
+                      branch='master'),
+             },
+            {p1: dict(present=[A], absent=[B, C], branch='master'),
+             p2: dict(present=[B], absent=[A, C], branch='stable/havana'),
+             p3: dict(commit=str(upstream[p3].commit('master')),
+                      branch='master'),
+             p4: dict(commit=str(upstream[p4].commit('stable/havana')),
+                      branch='stable/havana'),
+             },
+            {p1: dict(present=[A], absent=[B, C], branch='master'),
+             p2: dict(commit=str(upstream[p2].commit('master')),
+                      branch='master'),
+             p3: dict(present=[C], absent=[A, B], branch='master'),
+             p4: dict(commit=str(upstream[p4].commit('master')),
+                      branch='master'),
+             },
+        ]
+
+        self.assertBuildStates(states, projects)
+
+    @simple_layout('layouts/repo-checkout-six-project.yaml')
+    def test_project_override(self):
+        self.executor_server.hold_jobs_in_build = True
+
+        p1 = 'review.example.com/org/project1'
+        p2 = 'review.example.com/org/project2'
+        p3 = 'review.example.com/org/project3'
+        p4 = 'review.example.com/org/project4'
+        p5 = 'review.example.com/org/project5'
+        p6 = 'review.example.com/org/project6'
+        projects = [p1, p2, p3, p4, p5, p6]
+
+        self.create_branch('org/project3', 'stable/havana')
+        self.create_branch('org/project4', 'stable/havana')
+        self.create_branch('org/project6', 'stable/havana')
+        A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+        B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
+        C = self.fake_gerrit.addFakeChange('org/project2', 'master', 'C')
+        D = self.fake_gerrit.addFakeChange('org/project3', 'stable/havana',
+                                           'D')
+        A.addApproval('code-review', 2)
+        B.addApproval('code-review', 2)
+        C.addApproval('code-review', 2)
+        D.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+        self.fake_gerrit.addEvent(B.addApproval('approved', 1))
+        self.fake_gerrit.addEvent(C.addApproval('approved', 1))
+        self.fake_gerrit.addEvent(D.addApproval('approved', 1))
+
+        self.waitUntilSettled()
+
+        self.assertEquals(4, len(self.builds), "Four builds are running")
+
+        upstream = self.getUpstreamRepos(projects)
+        states = [
+            {p1: dict(present=[A], absent=[B, C, D], branch='master'),
+             p2: dict(commit=str(upstream[p2].commit('master')),
+                      branch='master'),
+             p3: dict(commit=str(upstream[p3].commit('master')),
+                      branch='master'),
+             p4: dict(commit=str(upstream[p4].commit('master')),
+                      branch='master'),
+             p5: dict(commit=str(upstream[p5].commit('master')),
+                      branch='master'),
+             p6: dict(commit=str(upstream[p6].commit('master')),
+                      branch='master'),
+             },
+            {p1: dict(present=[A, B], absent=[C, D], branch='master'),
+             p2: dict(commit=str(upstream[p2].commit('master')),
+                      branch='master'),
+             p3: dict(commit=str(upstream[p3].commit('master')),
+                      branch='master'),
+             p4: dict(commit=str(upstream[p4].commit('master')),
+                      branch='master'),
+             p5: dict(commit=str(upstream[p5].commit('master')),
+                      branch='master'),
+             p6: dict(commit=str(upstream[p6].commit('master')),
+                      branch='master'),
+             },
+            {p1: dict(present=[A, B], absent=[C, D], branch='master'),
+             p2: dict(present=[C], absent=[A, B, D], branch='master'),
+             p3: dict(commit=str(upstream[p3].commit('master')),
+                      branch='master'),
+             p4: dict(commit=str(upstream[p4].commit('master')),
+                      branch='master'),
+             p5: dict(commit=str(upstream[p5].commit('master')),
+                      branch='master'),
+             p6: dict(commit=str(upstream[p6].commit('master')),
+                      branch='master'),
+             },
+            {p1: dict(present=[A, B], absent=[C, D], branch='master'),
+             p2: dict(present=[C], absent=[A, B, D], branch='master'),
+             p3: dict(present=[D], absent=[A, B, C],
+                      branch='stable/havana'),
+             p4: dict(commit=str(upstream[p4].commit('master')),
+                      branch='master'),
+             p5: dict(commit=str(upstream[p5].commit('master')),
+                      branch='master'),
+             p6: dict(commit=str(upstream[p6].commit('stable/havana')),
+                      branch='stable/havana'),
+             },
+        ]
+
+        self.assertBuildStates(states, projects)
+
+    def test_periodic(self):
+        # This test can not use simple_layout because it must start
+        # with a configuration which does not include a
+        # timer-triggered job so that we have an opportunity to set
+        # the hold flag before the first job.
+        self.executor_server.hold_jobs_in_build = True
+        # Start timer trigger - also org/project
+        self.commitConfigUpdate('common-config',
+                                'layouts/repo-checkout-timer.yaml')
+        self.sched.reconfigure(self.config)
+
+        p1 = 'review.example.com/org/project1'
+        projects = [p1]
+        self.create_branch('org/project1', 'stable/havana')
+
+        # The pipeline triggers every second, so we should have seen
+        # several by now.
+        time.sleep(5)
+        self.waitUntilSettled()
+
+        # Stop queuing timer triggered jobs so that the assertions
+        # below don't race against more jobs being queued.
+        self.commitConfigUpdate('common-config',
+                                'layouts/repo-checkout-no-timer.yaml')
+        self.sched.reconfigure(self.config)
+
+        self.assertEquals(1, len(self.builds), "One build is running")
+
+        upstream = self.getUpstreamRepos(projects)
+        states = [
+            {p1: dict(commit=str(upstream[p1].commit('stable/havana')),
+                      branch='stable/havana'),
+             },
+        ]
+
+        self.assertBuildStates(states, projects)
+
+    @simple_layout('layouts/repo-checkout-post.yaml')
+    def test_post_and_master_checkout(self):
+        self.executor_server.hold_jobs_in_build = True
+        p1 = "review.example.com/org/project1"
+        p2 = "review.example.com/org/project2"
+        projects = [p1, p2]
+
+        A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+        event = A.getRefUpdatedEvent()
+        A.setMerged()
+        self.fake_gerrit.addEvent(event)
+        self.waitUntilSettled()
+
+        upstream = self.getUpstreamRepos(projects)
+        states = [
+            {p1: dict(commit=str(upstream[p1].commit('master')),
+                      present=[A], branch='master'),
+             p2: dict(commit=str(upstream[p2].commit('master')),
+                      absent=[A], branch='master'),
+             },
+        ]
+
+        self.assertBuildStates(states, projects)
diff --git a/tests/unit/test_github_driver.py b/tests/unit/test_github_driver.py
index 2474095..2013ee0 100644
--- a/tests/unit/test_github_driver.py
+++ b/tests/unit/test_github_driver.py
@@ -300,6 +300,9 @@
         self.assertEqual('tenant-one/reporting', report_status['context'])
         self.assertEqual('success', report_status['state'])
         self.assertEqual(2, len(A.comments))
+        report_url = ('http://logs.example.com/reporting/%s/%s/%s/' %
+                      (A.project, A.number, A.head_sha))
+        self.assertEqual(report_url, report_status['url'])
 
     @simple_layout('layouts/merging-github.yaml', driver='github')
     def test_report_pull_merge(self):
diff --git a/tests/unit/test_inventory.py b/tests/unit/test_inventory.py
new file mode 100644
index 0000000..2835d30
--- /dev/null
+++ b/tests/unit/test_inventory.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import yaml
+
+from tests.base import ZuulTestCase
+
+
+class TestInventory(ZuulTestCase):
+
+    tenant_config_file = 'config/inventory/main.yaml'
+
+    def setUp(self):
+        super(TestInventory, self).setUp()
+        self.executor_server.hold_jobs_in_build = True
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+
+    def _get_build_inventory(self, name):
+        build = self.getBuildByName(name)
+        inv_path = os.path.join(build.jobdir.root, 'ansible', 'inventory.yaml')
+        return yaml.safe_load(open(inv_path, 'r'))
+
+    def test_single_inventory(self):
+
+        inventory = self._get_build_inventory('single-inventory')
+
+        all_nodes = ('ubuntu-xenial',)
+        self.assertIn('all', inventory)
+        self.assertIn('hosts', inventory['all'])
+        self.assertIn('vars', inventory['all'])
+        for node_name in all_nodes:
+            self.assertIn(node_name, inventory['all']['hosts'])
+        self.assertIn('zuul', inventory['all']['vars'])
+        z_vars = inventory['all']['vars']['zuul']
+        self.assertIn('executor', z_vars)
+        self.assertIn('src_root', z_vars['executor'])
+        self.assertIn('job', z_vars)
+        self.assertEqual(z_vars['job'], 'single-inventory')
+
+        self.executor_server.release()
+        self.waitUntilSettled()
+
+    def test_group_inventory(self):
+
+        inventory = self._get_build_inventory('group-inventory')
+
+        all_nodes = ('controller', 'compute1', 'compute2')
+        self.assertIn('all', inventory)
+        self.assertIn('hosts', inventory['all'])
+        self.assertIn('vars', inventory['all'])
+        for group_name in ('ceph-osd', 'ceph-monitor'):
+            self.assertIn(group_name, inventory)
+        for node_name in all_nodes:
+            self.assertIn(node_name, inventory['all']['hosts'])
+            self.assertIn(node_name,
+                          inventory['ceph-monitor']['hosts'])
+        self.assertIn('zuul', inventory['all']['vars'])
+        z_vars = inventory['all']['vars']['zuul']
+        self.assertIn('executor', z_vars)
+        self.assertIn('src_root', z_vars['executor'])
+        self.assertIn('job', z_vars)
+        self.assertEqual(z_vars['job'], 'group-inventory')
+
+        self.executor_server.release()
+        self.waitUntilSettled()
diff --git a/tests/unit/test_model.py b/tests/unit/test_model.py
index 5f968b4..e7e53c4 100644
--- a/tests/unit/test_model.py
+++ b/tests/unit/test_model.py
@@ -106,7 +106,7 @@
         base.auth = model.AuthContext()
 
         py27 = model.Job('py27')
-        self.assertEqual(None, py27.timeout)
+        self.assertIsNone(py27.timeout)
         py27.inheritFrom(base)
         self.assertEqual(30, py27.timeout)
         self.assertEqual(['base-pre'],
@@ -115,7 +115,7 @@
                          [x.path for x in py27.run])
         self.assertEqual(['base-post'],
                          [x.path for x in py27.post_run])
-        self.assertEqual(None, py27.auth)
+        self.assertIsNone(py27.auth)
 
     def test_job_variants(self):
         # This simulates freezing a job.
@@ -433,11 +433,11 @@
             })
         layout.addJob(in_repo_job_with_inherit_false)
 
-        self.assertEqual(None, in_repo_job_without_inherit.auth)
+        self.assertIsNone(in_repo_job_without_inherit.auth)
         self.assertEqual(1, len(in_repo_job_with_inherit.auth.secrets))
         self.assertEqual(in_repo_job_with_inherit.auth.secrets[0].name,
                          'pypi-credentials')
-        self.assertEqual(None, in_repo_job_with_inherit_false.auth)
+        self.assertIsNone(in_repo_job_with_inherit_false.auth)
 
     def test_job_inheritance_job_tree(self):
         tenant = model.Tenant('tenant')
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index 9cc5e60..0ac42c1 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -63,7 +63,11 @@
         self.assertIsNone(self.getJobFromHistory('project-test2').node)
 
         # TODOv3(jeblair): we may want to report stats by tenant (also?).
-        self.assertReportedStat('gerrit.event.comment-added', value='1|c')
+        # Per-driver
+        self.assertReportedStat('zuul.event.gerrit.comment-added', value='1|c')
+        # Per-driver per-connection
+        self.assertReportedStat('zuul.event.gerrit.gerrit.comment-added',
+                                value='1|c')
         self.assertReportedStat('zuul.pipeline.gate.current_changes',
                                 value='1|g')
         self.assertReportedStat('zuul.pipeline.gate.job.project-merge.SUCCESS',
@@ -3545,9 +3549,9 @@
         self.assertEqual([], running_item['failing_reasons'])
         self.assertEqual([], running_item['items_behind'])
         self.assertEqual('https://hostname/1', running_item['url'])
-        self.assertEqual(None, running_item['item_ahead'])
+        self.assertIsNone(running_item['item_ahead'])
         self.assertEqual('org/project', running_item['project'])
-        self.assertEqual(None, running_item['remaining_time'])
+        self.assertIsNone(running_item['remaining_time'])
         self.assertEqual(True, running_item['active'])
         self.assertEqual('1,1', running_item['id'])
 
@@ -3562,7 +3566,7 @@
                 self.assertEqual(7, len(job['worker']))
                 self.assertEqual(False, job['canceled'])
                 self.assertEqual(True, job['voting'])
-                self.assertEqual(None, job['result'])
+                self.assertIsNone(job['result'])
                 self.assertEqual('gate', job['pipeline'])
                 break
 
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 18a49db..707515a 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -328,6 +328,46 @@
         self.assertIn('not permitted to shadow', A.messages[0],
                       "A should have a syntax error reported")
 
+    def test_untrusted_pipeline_error(self):
+        in_repo_conf = textwrap.dedent(
+            """
+            - pipeline:
+                name: test
+            """)
+
+        file_dict = {'.zuul.yaml': in_repo_conf}
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+                                           files=file_dict)
+        A.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+        self.waitUntilSettled()
+
+        self.assertEqual(A.data['status'], 'NEW')
+        self.assertEqual(A.reported, 1,
+                         "A should report failure")
+        self.assertIn('Pipelines may not be defined', A.messages[0],
+                      "A should have a syntax error reported")
+
+    def test_untrusted_project_error(self):
+        in_repo_conf = textwrap.dedent(
+            """
+            - project:
+                name: org/project1
+            """)
+
+        file_dict = {'.zuul.yaml': in_repo_conf}
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+                                           files=file_dict)
+        A.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+        self.waitUntilSettled()
+
+        self.assertEqual(A.data['status'], 'NEW')
+        self.assertEqual(A.reported, 1,
+                         "A should report failure")
+        self.assertIn('the only project definition permitted', A.messages[0],
+                      "A should have a syntax error reported")
+
 
 class TestAnsible(AnsibleZuulTestCase):
     # A temporary class to hold new tests while others are disabled
diff --git a/zuul/configloader.py b/zuul/configloader.py
index c0267ed..f78e8a4 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -47,6 +47,16 @@
     pass
 
 
+class NodeFromGroupNotFoundError(Exception):
+    def __init__(self, nodeset, node, group):
+        message = textwrap.dedent("""\
+        In nodeset {nodeset} the group {group} contains a
+        node named {node} which is not defined in the nodeset.""")
+        message = textwrap.fill(message.format(nodeset=nodeset,
+                                               node=node, group=group))
+        super(NodeFromGroupNotFoundError, self).__init__(message)
+
+
 class ProjectNotFoundError(Exception):
     def __init__(self, project):
         message = textwrap.dedent("""\
@@ -58,6 +68,24 @@
         super(ProjectNotFoundError, self).__init__(message)
 
 
+class PipelineNotPermittedError(Exception):
+    def __init__(self):
+        message = textwrap.dedent("""\
+        Pipelines may not be defined in untrusted repos,
+        they may only be defined in config repos.""")
+        message = textwrap.fill(message)
+        super(PipelineNotPermittedError, self).__init__(message)
+
+
+class ProjectNotPermittedError(Exception):
+    def __init__(self):
+        message = textwrap.dedent("""\
+        Within an untrusted project, the only project definition
+        permitted is that of the project itself.""")
+        message = textwrap.fill(message)
+        super(ProjectNotPermittedError, self).__init__(message)
+
+
 def indent(s):
     return '\n'.join(['  ' + x for x in s.split('\n')])
 
@@ -169,8 +197,13 @@
                 vs.Required('image'): str,
                 }
 
+        group = {vs.Required('name'): str,
+                 vs.Required('nodes'): [str]
+                 }
+
         nodeset = {vs.Required('name'): str,
                    vs.Required('nodes'): [node],
+                   'groups': [group],
                    '_source_context': model.SourceContext,
                    '_start_mark': yaml.Mark,
                    }
@@ -182,9 +215,18 @@
         with configuration_exceptions('nodeset', conf):
             NodeSetParser.getSchema()(conf)
         ns = model.NodeSet(conf['name'])
+        node_names = []
         for conf_node in as_list(conf['nodes']):
             node = model.Node(conf_node['name'], conf_node['image'])
             ns.addNode(node)
+            node_names.append(conf_node['name'])
+        for conf_group in as_list(conf.get('groups', [])):
+            for node_name in conf_group['nodes']:
+                if node_name not in node_names:
+                    raise NodeFromGroupNotFoundError(conf['name'], node_name,
+                                                     conf_group['name'])
+            group = model.Group(conf_group['name'], conf_group['nodes'])
+            ns.addGroup(group)
         return ns
 
 
@@ -562,6 +604,11 @@
 
         configs = []
         for conf in conf_list:
+            with configuration_exceptions('project', conf):
+                if not conf['_source_context'].trusted:
+                    if project != conf['_source_context'].project:
+                        raise ProjectNotPermittedError()
+
             # Make a copy since we modify this later via pop
             conf = copy.deepcopy(conf)
             conf_templates = conf.pop('templates', [])
@@ -773,12 +820,12 @@
 
         for source_name, require_config in conf.get('require', {}).items():
             source = connections.getSource(source_name)
-            manager.changeish_filters.extend(
+            manager.ref_filters.extend(
                 source.getRequireFilters(require_config))
 
         for source_name, reject_config in conf.get('reject', {}).items():
             source = connections.getSource(source_name)
-            manager.changeish_filters.extend(
+            manager.ref_filters.extend(
                 source.getRejectFilters(reject_config))
 
         for trigger_name, trigger_config in conf.get('trigger').items():
@@ -879,7 +926,7 @@
 
         key_dir = os.path.dirname(project.private_key_file)
         if not os.path.isdir(key_dir):
-            os.makedirs(key_dir)
+            os.makedirs(key_dir, 0o700)
 
         TenantParser.log.info(
             "Generating RSA keypair for project %s" % (project.name,)
@@ -896,6 +943,9 @@
         with open(project.private_key_file, 'wb') as f:
             f.write(pem_private_key)
 
+        # Ensure private key is read/write for zuul user only.
+        os.chmod(project.private_key_file, 0o600)
+
     @staticmethod
     def _loadKeys(project):
         # Check the key files specified are there
@@ -1032,10 +1082,11 @@
 
     @staticmethod
     def _parseUntrustedProjectLayout(data, source_context):
-        # TODOv3(jeblair): this should implement some rules to protect
-        # aspects of the config that should not be changed in-repo
         config = model.UnparsedTenantConfig()
         config.extend(safe_load_yaml(data, source_context))
+        if config.pipelines:
+            with configuration_exceptions('pipeline', config.pipelines[0]):
+                raise PipelineNotPermittedError()
         return config
 
     @staticmethod
diff --git a/zuul/connection/__init__.py b/zuul/connection/__init__.py
index 49624d7..90ab39c 100644
--- a/zuul/connection/__init__.py
+++ b/zuul/connection/__init__.py
@@ -14,6 +14,7 @@
 
 import abc
 
+import extras
 import six
 
 
@@ -43,6 +44,26 @@
         self.driver = driver
         self.connection_name = connection_name
         self.connection_config = connection_config
+        self.statsd = extras.try_import('statsd.statsd')
+
+    def logEvent(self, event):
+        self.log.debug(
+            'Scheduling {driver} event from {connection}: {event}'.format(
+                driver=self.driver.name,
+                connection=self.connection_name,
+                event=event.type))
+        try:
+            if self.statsd:
+                self.statsd.incr(
+                    'zuul.event.{driver}.{event}'.format(
+                        driver=self.driver.name, event=event.type))
+                self.statsd.incr(
+                    'zuul.event.{driver}.{connection}.{event}'.format(
+                        driver=self.driver.name,
+                        connection=self.connection_name,
+                        event=event.type))
+        except:
+            self.log.exception("Exception reporting event stats")
 
     def onLoad(self):
         pass
diff --git a/zuul/driver/gerrit/gerritconnection.py b/zuul/driver/gerrit/gerritconnection.py
index 85a16fb..fa43e66 100644
--- a/zuul/driver/gerrit/gerritconnection.py
+++ b/zuul/driver/gerrit/gerritconnection.py
@@ -19,7 +19,6 @@
 import threading
 import time
 from six.moves import queue as Queue
-from six.moves import urllib
 from six.moves import shlex_quote
 import paramiko
 import logging
@@ -151,6 +150,7 @@
             self.connection._getChange(event.change_number,
                                        event.patch_number,
                                        refresh=True)
+        self.connection.logEvent(event)
         self.connection.sched.addEvent(event)
 
     def run(self):
@@ -633,7 +633,7 @@
             if val is True:
                 cmd += ' --%s' % key
             else:
-                cmd += ' --%s %s' % (key, val)
+                cmd += ' --label %s=%s' % (key, val)
         cmd += ' %s' % change
         out, err = self._ssh(cmd)
         return err
@@ -707,6 +707,11 @@
             chunk, more_changes = _query_chunk("%s %s" % (query, resume))
         return alldata
 
+    def _uploadPack(self, project_name):
+        cmd = "git-upload-pack %s" % project_name
+        out, err = self._ssh(cmd, "0000")
+        return out
+
     def _open(self):
         client = paramiko.SSHClient()
         client.load_system_host_keys()
@@ -746,19 +751,13 @@
         return (out, err)
 
     def getInfoRefs(self, project):
-        url = "%s/p/%s/info/refs?service=git-upload-pack" % (
-            self.baseurl, project.name)
         try:
-            data = urllib.request.urlopen(url).read()
+            data = self._uploadPack(project)
         except:
-            self.log.error("Cannot get references from %s" % url)
-            raise  # keeps urllib error informations
+            self.log.error("Cannot get references from %s" % project)
+            raise  # keeps error information
         ret = {}
-        read_headers = False
         read_advertisement = False
-        if data[4] != '#':
-            raise Exception("Gerrit repository does not support "
-                            "git-upload-pack")
         i = 0
         while i < len(data):
             if len(data) - i < 4:
@@ -774,10 +773,6 @@
                 raise Exception("Invalid data in info/refs")
             line = data[i:i + plen]
             i += plen
-            if not read_headers:
-                if plen == 0:
-                    read_headers = True
-                continue
             if not read_advertisement:
                 read_advertisement = True
                 continue
diff --git a/zuul/driver/gerrit/gerritreporter.py b/zuul/driver/gerrit/gerritreporter.py
index f8e8b03..90c95e3 100644
--- a/zuul/driver/gerrit/gerritreporter.py
+++ b/zuul/driver/gerrit/gerritreporter.py
@@ -25,7 +25,7 @@
     name = 'gerrit'
     log = logging.getLogger("zuul.GerritReporter")
 
-    def report(self, pipeline, item):
+    def report(self, item):
         """Send a message to gerrit."""
 
         # If the source is no GerritSource we cannot report anything here.
@@ -38,7 +38,7 @@
                 self.connection.canonical_hostname:
             return
 
-        message = self._formatItemReport(pipeline, item)
+        message = self._formatItemReport(item)
 
         self.log.debug("Report change %s, params %s, message: %s" %
                        (item.change, self.config, message))
diff --git a/zuul/driver/github/githubconnection.py b/zuul/driver/github/githubconnection.py
index d0557b4..659d88b 100644
--- a/zuul/driver/github/githubconnection.py
+++ b/zuul/driver/github/githubconnection.py
@@ -119,7 +119,7 @@
 
         if event:
             event.project_hostname = self.connection.canonical_hostname
-            self.log.debug('Scheduling github event: {0}'.format(event.type))
+            self.connection.logEvent(event)
             self.connection.sched.addEvent(event)
 
     def _event_push(self, body):
diff --git a/zuul/driver/github/githubreporter.py b/zuul/driver/github/githubreporter.py
index 68c6af0..29edb8a 100644
--- a/zuul/driver/github/githubreporter.py
+++ b/zuul/driver/github/githubreporter.py
@@ -39,25 +39,25 @@
         if not isinstance(self._unlabels, list):
             self._unlabels = [self._unlabels]
 
-    def report(self, pipeline, item):
+    def report(self, item):
         """Comment on PR and set commit status."""
         if self._create_comment:
-            self.addPullComment(pipeline, item)
+            self.addPullComment(item)
         if (self._commit_status is not None and
             hasattr(item.change, 'patchset') and
             item.change.patchset is not None):
-            self.setPullStatus(pipeline, item)
+            self.setPullStatus(item)
         if (self._merge and
             hasattr(item.change, 'number')):
             self.mergePull(item)
             if not item.change.is_merged:
-                msg = self._formatItemReportMergeFailure(pipeline, item)
-                self.addPullComment(pipeline, item, msg)
+                msg = self._formatItemReportMergeFailure(item)
+                self.addPullComment(item, msg)
         if self._labels or self._unlabels:
             self.setLabels(item)
 
-    def addPullComment(self, pipeline, item, comment=None):
-        message = comment or self._formatItemReport(pipeline, item)
+    def addPullComment(self, item, comment=None):
+        message = comment or self._formatItemReport(item)
         project = item.change.project.name
         pr_number = item.change.number
         self.log.debug(
@@ -65,20 +65,23 @@
             (item.change, self.config, message))
         self.connection.commentPull(project, pr_number, message)
 
-    def setPullStatus(self, pipeline, item):
+    def setPullStatus(self, item):
         project = item.change.project.name
         sha = item.change.patchset
-        context = '%s/%s' % (pipeline.layout.tenant.name, pipeline.name)
+        context = '%s/%s' % (item.pipeline.layout.tenant.name,
+                             item.pipeline.name)
         state = self._commit_status
-        url = ''
-        if self.connection.sched.config.has_option('zuul', 'status_url'):
-            base = self.connection.sched.config.get('zuul', 'status_url')
-            url = '%s/#%s,%s' % (base,
-                                 item.change.number,
-                                 item.change.patchset)
+
+        url_pattern = self.config.get('status-url')
+        if not url_pattern:
+            sched_config = self.connection.sched.config
+            if sched_config.has_option('zuul', 'status_url'):
+                url_pattern = sched_config.get('zuul', 'status_url')
+        url = item.formatUrlPattern(url_pattern) if url_pattern else ''
+
         description = ''
-        if pipeline.description:
-            description = pipeline.description
+        if item.pipeline.description:
+            description = item.pipeline.description
 
         self.log.debug(
             'Reporting change %s, params %s, status:\n'
@@ -157,6 +160,7 @@
 def getSchema():
     github_reporter = v.Schema({
         'status': v.Any('pending', 'success', 'failure'),
+        'status-url': str,
         'comment': bool,
         'merge': bool,
         'label': scalar_or_list(str),
diff --git a/zuul/driver/smtp/smtpreporter.py b/zuul/driver/smtp/smtpreporter.py
index 35eb69f..1f232e9 100644
--- a/zuul/driver/smtp/smtpreporter.py
+++ b/zuul/driver/smtp/smtpreporter.py
@@ -24,9 +24,9 @@
     name = 'smtp'
     log = logging.getLogger("zuul.SMTPReporter")
 
-    def report(self, pipeline, item):
+    def report(self, item):
         """Send the compiled report message via smtp."""
-        message = self._formatItemReport(pipeline, item)
+        message = self._formatItemReport(item)
 
         self.log.debug("Report change %s, params %s, message: %s" %
                        (item.change, self.config, message))
diff --git a/zuul/driver/sql/sqlconnection.py b/zuul/driver/sql/sqlconnection.py
index 4b1b1a2..e478d33 100644
--- a/zuul/driver/sql/sqlconnection.py
+++ b/zuul/driver/sql/sqlconnection.py
@@ -43,6 +43,8 @@
             self.engine = sa.create_engine(self.dburi)
             self._migrate()
             self._setup_tables()
+            self.zuul_buildset_table, self.zuul_build_table \
+                = self._setup_tables()
             self.tables_established = True
         except sa.exc.NoSuchModuleError:
             self.log.exception(
@@ -68,10 +70,11 @@
 
             alembic.command.upgrade(config, 'head')
 
-    def _setup_tables(self):
+    @staticmethod
+    def _setup_tables():
         metadata = sa.MetaData()
 
-        self.zuul_buildset_table = sa.Table(
+        zuul_buildset_table = sa.Table(
             BUILDSET_TABLE, metadata,
             sa.Column('id', sa.Integer, primary_key=True),
             sa.Column('zuul_ref', sa.String(255)),
@@ -84,7 +87,7 @@
             sa.Column('message', sa.TEXT()),
         )
 
-        self.zuul_build_table = sa.Table(
+        zuul_build_table = sa.Table(
             BUILD_TABLE, metadata,
             sa.Column('id', sa.Integer, primary_key=True),
             sa.Column('buildset_id', sa.Integer,
@@ -99,6 +102,8 @@
             sa.Column('node_name', sa.String(255)),
         )
 
+        return zuul_buildset_table, zuul_build_table
+
 
 def getSchema():
     sql_connection = v.Any(str, v.Schema(dict))
diff --git a/zuul/driver/sql/sqlreporter.py b/zuul/driver/sql/sqlreporter.py
index 46d538a..5f93ce8 100644
--- a/zuul/driver/sql/sqlreporter.py
+++ b/zuul/driver/sql/sqlreporter.py
@@ -31,7 +31,7 @@
         # TODO(jeblair): document this is stored as NULL if unspecified
         self.result_score = config.get('score', None)
 
-    def report(self, pipeline, item):
+    def report(self, item):
         """Create an entry into a database."""
 
         if not self.connection.tables_established:
@@ -39,16 +39,19 @@
             return
 
         with self.connection.engine.begin() as conn:
+            change = getattr(item.change, 'number', '')
+            patchset = getattr(item.change, 'patchset', '')
+            refspec = getattr(item.change, 'refspec', item.change.newrev)
             buildset_ins = self.connection.zuul_buildset_table.insert().values(
                 zuul_ref=item.current_build_set.ref,
                 pipeline=item.pipeline.name,
                 project=item.change.project.name,
-                change=item.change.number,
-                patchset=item.change.patchset,
-                ref=item.change.refspec,
+                change=change,
+                patchset=patchset,
+                ref=refspec,
                 score=self.result_score,
                 message=self._formatItemReport(
-                    pipeline, item, with_jobs=False),
+                    item, with_jobs=False),
             )
             buildset_ins_result = conn.execute(buildset_ins)
             build_inserts = []
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index 5a1820e..cf8d973 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -274,8 +274,9 @@
             params['post_playbooks'] = [x.toDict() for x in job.post_run]
             params['roles'] = [x.toDict() for x in job.roles]
 
+        nodeset = item.current_build_set.getJobNodeSet(job.name)
         nodes = []
-        for node in item.current_build_set.getJobNodeSet(job.name).getNodes():
+        for node in nodeset.getNodes():
             nodes.append(dict(name=node.name, image=node.image,
                               az=node.az,
                               host_keys=node.host_keys,
@@ -285,6 +286,7 @@
                               public_ipv6=node.public_ipv6,
                               public_ipv4=node.public_ipv4))
         params['nodes'] = nodes
+        params['groups'] = [group.toDict() for group in nodeset.getGroups()]
         params['vars'] = copy.deepcopy(job.variables)
         if job.auth:
             for secret in job.auth.secrets:
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index fd7ebbe..f71bb92 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -182,8 +182,7 @@
         self.ansible_root = os.path.join(self.root, 'ansible')
         os.makedirs(self.ansible_root)
         self.known_hosts = os.path.join(self.ansible_root, 'known_hosts')
-        self.inventory = os.path.join(self.ansible_root, 'inventory')
-        self.vars = os.path.join(self.ansible_root, 'vars.yaml')
+        self.inventory = os.path.join(self.ansible_root, 'inventory.yaml')
         self.playbooks = []  # The list of candidate playbooks
         self.playbook = None  # A pointer to the candidate we have chosen
         self.pre_playbooks = []
@@ -312,6 +311,31 @@
                 shutil.copy(os.path.join(library_path, fn), target_dir)
 
 
+def make_inventory_dict(nodes, groups, all_vars):
+
+    hosts = {}
+    for node in nodes:
+        hosts[node['name']] = node['host_vars']
+
+    inventory = {
+        'all': {
+            'hosts': hosts,
+            'vars': all_vars,
+        }
+    }
+
+    for group in groups:
+        group_hosts = {}
+        for node_name in group['nodes']:
+            # children is a dict with None as values because we don't have
+            # and per-group variables. If we did, None would be a dict
+            # with the per-group variables
+            group_hosts[node_name] = None
+        inventory[group['name']] = {'hosts': group_hosts}
+
+    return inventory
+
+
 class ExecutorMergeWorker(gear.TextWorker):
     def __init__(self, executor_server, *args, **kw):
         self.zuul_executor_server = executor_server
@@ -353,6 +377,12 @@
         else:
             self.merge_root = '/var/lib/zuul/executor-git'
 
+        if self.config.has_option('executor', 'default_username'):
+            self.default_username = self.config.get('executor',
+                                                    'default_username')
+        else:
+            self.default_username = 'zuul'
+
         if self.config.has_option('merger', 'git_user_email'):
             self.merge_email = self.config.get('merger', 'git_user_email')
         else:
@@ -387,12 +417,13 @@
         self.command_socket = commandsocket.CommandSocket(path)
         ansible_dir = os.path.join(state_dir, 'ansible')
         self.ansible_dir = ansible_dir
+        if os.path.exists(ansible_dir):
+            shutil.rmtree(ansible_dir)
 
         zuul_dir = os.path.join(ansible_dir, 'zuul')
         plugin_dir = os.path.join(zuul_dir, 'ansible')
 
-        if not os.path.exists(plugin_dir):
-            os.makedirs(plugin_dir)
+        os.makedirs(plugin_dir, mode=0o0755)
 
         self.library_dir = os.path.join(plugin_dir, 'library')
         self.action_dir = os.path.join(plugin_dir, 'action')
@@ -883,6 +914,7 @@
             ip = node.get('interface_ip')
             host_vars = dict(
                 ansible_host=ip,
+                ansible_user=self.executor_server.default_username,
                 nodepool_az=node.get('az'),
                 nodepool_provider=node.get('provider'),
                 nodepool_region=node.get('region'))
@@ -1121,28 +1153,24 @@
             self.jobdir.trusted_roles_path.append(trusted_role_path)
 
     def prepareAnsibleFiles(self, args):
-        keys = []
-        with open(self.jobdir.inventory, 'w') as inventory:
-            for item in self.getHostList(args):
-                inventory.write(item['name'])
-                for k, v in item['host_vars'].items():
-                    inventory.write(' %s="%s"' % (k, v))
-                inventory.write('\n')
-                for key in item['host_keys']:
-                    keys.append(key)
+        all_vars = dict(args['vars'])
+        all_vars['zuul']['executor'] = dict(
+            hostname=self.executor_server.hostname,
+            src_root=self.jobdir.src_root,
+            log_root=self.jobdir.log_root)
+
+        nodes = self.getHostList(args)
+        inventory = make_inventory_dict(nodes, args['groups'], all_vars)
+
+        with open(self.jobdir.inventory, 'w') as inventory_yaml:
+            inventory_yaml.write(
+                yaml.safe_dump(inventory, default_flow_style=False))
 
         with open(self.jobdir.known_hosts, 'w') as known_hosts:
-            for key in keys:
-                known_hosts.write('%s\n' % key)
+            for node in nodes:
+                for key in node['host_keys']:
+                    known_hosts.write('%s\n' % key)
 
-        with open(self.jobdir.vars, 'w') as vars_yaml:
-            zuul_vars = dict(args['vars'])
-            zuul_vars['zuul']['executor'] = dict(
-                hostname=self.executor_server.hostname,
-                src_root=self.jobdir.src_root,
-                log_root=self.jobdir.log_root)
-            vars_yaml.write(
-                yaml.safe_dump(zuul_vars, default_flow_style=False))
         self.writeAnsibleConfig(self.jobdir.untrusted_config)
         self.writeAnsibleConfig(self.jobdir.trusted_config, trusted=True)
 
@@ -1296,12 +1324,10 @@
         else:
             verbose = '-v'
 
-        cmd = ['ansible-playbook', playbook.path]
+        cmd = ['ansible-playbook', verbose, playbook.path]
 
         if success is not None:
             cmd.extend(['-e', 'success=%s' % str(bool(success))])
 
-        cmd.extend(['-e@%s' % self.jobdir.vars, verbose])
-
         return self.runAnsible(
             cmd=cmd, timeout=timeout, trusted=playbook.trusted)
diff --git a/zuul/lib/log_streamer.py b/zuul/lib/log_streamer.py
index 6aa51a6..de072b6 100644
--- a/zuul/lib/log_streamer.py
+++ b/zuul/lib/log_streamer.py
@@ -47,8 +47,39 @@
     the (class/method/attribute) names were changed to protect the innocent.
     '''
 
+    MAX_REQUEST_LEN = 1024
+    REQUEST_TIMEOUT = 10
+
+    def get_command(self):
+        poll = select.poll()
+        bitmask = (select.POLLIN | select.POLLERR |
+                   select.POLLHUP | select.POLLNVAL)
+        poll.register(self.request, bitmask)
+        buffer = b''
+        ret = None
+        start = time.time()
+        while True:
+            elapsed = time.time() - start
+            timeout = max(self.REQUEST_TIMEOUT - elapsed, 0)
+            if not timeout:
+                raise Exception("Timeout while waiting for input")
+            for fd, event in poll.poll(timeout):
+                if event & select.POLLIN:
+                    buffer += self.request.recv(self.MAX_REQUEST_LEN)
+                else:
+                    raise Exception("Received error event")
+            if len(buffer) >= self.MAX_REQUEST_LEN:
+                raise Exception("Request too long")
+            try:
+                ret = buffer.decode('utf-8')
+                x = ret.find('\n')
+                if x > 0:
+                    return ret[:x]
+            except UnicodeDecodeError:
+                pass
+
     def handle(self):
-        build_uuid = self.request.recv(1024).decode("utf-8")
+        build_uuid = self.get_command()
         build_uuid = build_uuid.rstrip()
 
         # validate build ID
diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py
index 4005b01..c3958d7 100644
--- a/zuul/manager/__init__.py
+++ b/zuul/manager/__init__.py
@@ -47,7 +47,7 @@
         self.sched = sched
         self.pipeline = pipeline
         self.event_filters = []
-        self.changeish_filters = []
+        self.ref_filters = []
 
     def __str__(self):
         return "<%s %s>" % (self.__class__.__name__, self.pipeline.name)
@@ -55,7 +55,7 @@
     def _postConfig(self, layout):
         self.log.info("Configured Pipeline Manager %s" % self.pipeline.name)
         self.log.info("  Requirements:")
-        for f in self.changeish_filters:
+        for f in self.ref_filters:
             self.log.info("    %s" % f)
         self.log.info("  Events:")
         for e in self.event_filters:
@@ -165,7 +165,7 @@
         report_errors = []
         if len(action_reporters) > 0:
             for reporter in action_reporters:
-                ret = reporter.report(self.pipeline, item)
+                ret = reporter.report(item)
                 if ret:
                     report_errors.append(ret)
             if len(report_errors) == 0:
@@ -281,7 +281,7 @@
             return False
 
         if not ignore_requirements:
-            for f in self.changeish_filters:
+            for f in self.ref_filters:
                 if f.connection_name != change.project.connection_name:
                     self.log.debug("Filter %s skipped for change %s due "
                                    "to mismatched connections" % (f, change))
diff --git a/zuul/merger/merger.py b/zuul/merger/merger.py
index 79531d9..6cfd904 100644
--- a/zuul/merger/merger.py
+++ b/zuul/merger/merger.py
@@ -182,8 +182,10 @@
 
     def checkoutLocalBranch(self, branch):
         repo = self.createRepoObject()
-        ref = repo.heads[branch].commit
-        self.checkout(ref)
+        # Perform a hard reset before checking out so that we clean up
+        # anything that might be left over from a merge.
+        reset_repo_to_head(repo)
+        repo.heads[branch].checkout()
 
     def cherryPick(self, ref):
         repo = self.createRepoObject()
@@ -350,17 +352,19 @@
         repo.checkoutLocalBranch(branch)
 
     def _saveRepoState(self, connection_name, project_name, repo,
-                       repo_state):
+                       repo_state, recent):
         projects = repo_state.setdefault(connection_name, {})
         project = projects.setdefault(project_name, {})
-        if project:
-            # We already have a state for this project.
-            return
         for ref in repo.getRefs():
-            if ref.path.startswith('refs/zuul'):
+            if ref.path.startswith('refs/zuul/'):
                 continue
-            if ref.path.startswith('refs/remotes'):
+            if ref.path.startswith('refs/remotes/'):
                 continue
+            if ref.path.startswith('refs/heads/'):
+                branch = ref.path[len('refs/heads/'):]
+                key = (connection_name, project_name, branch)
+                if key not in recent:
+                    recent[key] = ref.object
             project[ref.path] = ref.object.hexsha
 
     def _restoreRepoState(self, connection_name, project_name, repo,
@@ -429,7 +433,7 @@
             # Save the repo state so that later mergers can repeat
             # this process.
             self._saveRepoState(item['connection'], item['project'], repo,
-                                repo_state)
+                                repo_state, recent)
         else:
             self.log.debug("Found base commit %s for %s" % (base, key,))
         # Merge the change
diff --git a/zuul/model.py b/zuul/model.py
index 4a3dba6..5ff5432 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -410,6 +410,37 @@
         self._keys = keys
 
 
+class Group(object):
+    """A logical group of nodes for use by a job.
+
+    A Group is a named set of node names that will be provided to
+    jobs in the inventory to describe logical units where some subset of tasks
+    run.
+    """
+
+    def __init__(self, name, nodes):
+        self.name = name
+        self.nodes = nodes
+
+    def __repr__(self):
+        return '<Group %s %s>' % (self.name, str(self.nodes))
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __eq__(self, other):
+        if not isinstance(other, Group):
+            return False
+        return (self.name == other.name and
+                self.nodes == other.nodes)
+
+    def toDict(self):
+        return {
+            'name': self.name,
+            'nodes': self.nodes
+        }
+
+
 class NodeSet(object):
     """A set of nodes.
 
@@ -423,6 +454,7 @@
     def __init__(self, name=None):
         self.name = name or ''
         self.nodes = OrderedDict()
+        self.groups = OrderedDict()
 
     def __ne__(self, other):
         return not self.__eq__(other)
@@ -437,6 +469,8 @@
         n = NodeSet(self.name)
         for name, node in self.nodes.items():
             n.addNode(Node(node.name, node.image))
+        for name, group in self.groups.items():
+            n.addGroup(Group(group.name, group.nodes[:]))
         return n
 
     def addNode(self, node):
@@ -447,12 +481,20 @@
     def getNodes(self):
         return list(self.nodes.values())
 
+    def addGroup(self, group):
+        if group.name in self.groups:
+            raise Exception("Duplicate group in %s" % (self,))
+        self.groups[group.name] = group
+
+    def getGroups(self):
+        return list(self.groups.values())
+
     def __repr__(self):
         if self.name:
             name = self.name + ' '
         else:
             name = ''
-        return '<NodeSet %s%s>' % (name, self.nodes)
+        return '<NodeSet %s%s%s>' % (name, self.nodes, self.groups)
 
 
 class NodeRequest(object):
@@ -1280,7 +1322,7 @@
     def __init__(self, queue, change):
         self.pipeline = queue.pipeline
         self.queue = queue
-        self.change = change  # a changeish
+        self.change = change  # a ref
         self.build_sets = []
         self.dequeued_needing_change = False
         self.current_build_set = BuildSet(self)
@@ -1546,8 +1588,8 @@
         # secrets, etc.
         safe_change = self.change.getSafeAttributes()
         safe_pipeline = self.pipeline.getSafeAttributes()
-        safe_job = job.getSafeAttributes()
-        safe_build = build.getSafeAttributes()
+        safe_job = job.getSafeAttributes() if job else {}
+        safe_build = build.getSafeAttributes() if build else {}
         try:
             url = url_pattern.format(change=safe_change,
                                      pipeline=safe_pipeline,
@@ -1589,15 +1631,14 @@
         return (result, url)
 
     def formatJSON(self):
-        changeish = self.change
         ret = {}
         ret['active'] = self.active
         ret['live'] = self.live
-        if hasattr(changeish, 'url') and changeish.url is not None:
-            ret['url'] = changeish.url
+        if hasattr(self.change, 'url') and self.change.url is not None:
+            ret['url'] = self.change.url
         else:
             ret['url'] = None
-        ret['id'] = changeish._id()
+        ret['id'] = self.change._id()
         if self.item_ahead:
             ret['item_ahead'] = self.item_ahead.change._id()
         else:
@@ -1605,8 +1646,8 @@
         ret['items_behind'] = [i.change._id() for i in self.items_behind]
         ret['failing_reasons'] = self.current_build_set.failing_reasons
         ret['zuul_ref'] = self.current_build_set.ref
-        if changeish.project:
-            ret['project'] = changeish.project.name
+        if self.change.project:
+            ret['project'] = self.change.project.name
         else:
             # For cross-project dependencies with the depends-on
             # project not known to zuul, the project is None
@@ -1614,8 +1655,8 @@
             ret['project'] = "Unknown Project"
         ret['enqueue_time'] = int(self.enqueue_time * 1000)
         ret['jobs'] = []
-        if hasattr(changeish, 'owner'):
-            ret['owner'] = changeish.owner
+        if hasattr(self.change, 'owner'):
+            ret['owner'] = self.change.owner
         else:
             ret['owner'] = None
         max_remaining = 0
@@ -1683,20 +1724,19 @@
         return ret
 
     def formatStatus(self, indent=0, html=False):
-        changeish = self.change
         indent_str = ' ' * indent
         ret = ''
-        if html and hasattr(changeish, 'url') and changeish.url is not None:
+        if html and getattr(self.change, 'url', None) is not None:
             ret += '%sProject %s change <a href="%s">%s</a>\n' % (
                 indent_str,
-                changeish.project.name,
-                changeish.url,
-                changeish._id())
+                self.change.project.name,
+                self.change.url,
+                self.change._id())
         else:
             ret += '%sProject %s change %s based on %s\n' % (
                 indent_str,
-                changeish.project.name,
-                changeish._id(),
+                self.change.project.name,
+                self.change._id(),
                 self.item_ahead)
         for job in self.getJobs():
             build = self.current_build_set.getBuild(job.name)
diff --git a/zuul/reporter/__init__.py b/zuul/reporter/__init__.py
index 9c8e953..dc99c8b 100644
--- a/zuul/reporter/__init__.py
+++ b/zuul/reporter/__init__.py
@@ -37,7 +37,7 @@
         self._action = action
 
     @abc.abstractmethod
-    def report(self, pipeline, item):
+    def report(self, item):
         """Send the compiled report message."""
 
     def getSubmitAllowNeeds(self):
@@ -61,57 +61,55 @@
         }
         return format_methods[self._action]
 
-    # TODOv3(jeblair): Consider removing pipeline argument in favor of
-    # item.pipeline
-    def _formatItemReport(self, pipeline, item, with_jobs=True):
+    def _formatItemReport(self, item, with_jobs=True):
         """Format a report from the given items. Usually to provide results to
         a reporter taking free-form text."""
-        ret = self._getFormatter()(pipeline, item, with_jobs)
+        ret = self._getFormatter()(item, with_jobs)
 
-        if pipeline.footer_message:
-            ret += '\n' + pipeline.footer_message
+        if item.pipeline.footer_message:
+            ret += '\n' + item.pipeline.footer_message
 
         return ret
 
-    def _formatItemReportStart(self, pipeline, item, with_jobs=True):
+    def _formatItemReportStart(self, item, with_jobs=True):
         status_url = ''
         if self.connection.sched.config.has_option('zuul', 'status_url'):
             status_url = self.connection.sched.config.get('zuul',
                                                           'status_url')
-        return pipeline.start_message.format(pipeline=pipeline,
-                                             status_url=status_url)
+        return item.pipeline.start_message.format(pipeline=item.pipeline,
+                                                  status_url=status_url)
 
-    def _formatItemReportSuccess(self, pipeline, item, with_jobs=True):
-        msg = pipeline.success_message
+    def _formatItemReportSuccess(self, item, with_jobs=True):
+        msg = item.pipeline.success_message
         if with_jobs:
-            msg += '\n\n' + self._formatItemReportJobs(pipeline, item)
+            msg += '\n\n' + self._formatItemReportJobs(item)
         return msg
 
-    def _formatItemReportFailure(self, pipeline, item, with_jobs=True):
+    def _formatItemReportFailure(self, item, with_jobs=True):
         if item.dequeued_needing_change:
             msg = 'This change depends on a change that failed to merge.\n'
         elif item.didMergerFail():
-            msg = pipeline.merge_failure_message
+            msg = item.pipeline.merge_failure_message
         elif item.getConfigError():
             msg = item.getConfigError()
         else:
-            msg = pipeline.failure_message
+            msg = item.pipeline.failure_message
             if with_jobs:
-                msg += '\n\n' + self._formatItemReportJobs(pipeline, item)
+                msg += '\n\n' + self._formatItemReportJobs(item)
         return msg
 
-    def _formatItemReportMergeFailure(self, pipeline, item, with_jobs=True):
-        return pipeline.merge_failure_message
+    def _formatItemReportMergeFailure(self, item, with_jobs=True):
+        return item.pipeline.merge_failure_message
 
-    def _formatItemReportDisabled(self, pipeline, item, with_jobs=True):
+    def _formatItemReportDisabled(self, item, with_jobs=True):
         if item.current_build_set.result == 'SUCCESS':
-            return self._formatItemReportSuccess(pipeline, item)
+            return self._formatItemReportSuccess(item)
         elif item.current_build_set.result == 'FAILURE':
-            return self._formatItemReportFailure(pipeline, item)
+            return self._formatItemReportFailure(item)
         else:
-            return self._formatItemReport(pipeline, item)
+            return self._formatItemReport(item)
 
-    def _formatItemReportJobs(self, pipeline, item):
+    def _formatItemReportJobs(self, item):
         # Return the list of jobs portion of the report
         ret = ''
 
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 4dc2c97..a63d270 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -259,11 +259,6 @@
 
     def addEvent(self, event):
         self.log.debug("Adding trigger event: %s" % event)
-        try:
-            if self.statsd:
-                self.statsd.incr('gerrit.event.%s' % event.type)
-        except:
-            self.log.exception("Exception reporting event stats")
         self.trigger_event_queue.put(event)
         self.wake_event.set()
         self.log.debug("Done adding trigger event: %s" % event)