Merge "Use configured github server in app mode" into feature/zuulv3
diff --git a/.zuul.yaml b/.zuul.yaml
index 041681a..7473ad3 100644
--- a/.zuul.yaml
+++ b/.zuul.yaml
@@ -36,6 +36,8 @@
             irrelevant-files:
               - zuul/cmd/migrate.py
               - playbooks/zuul-migrate/.*
+            vars:
+              sphinx_python: python3
         - tox-cover:
             irrelevant-files:
               - zuul/cmd/migrate.py
@@ -53,6 +55,8 @@
             irrelevant-files:
               - zuul/cmd/migrate.py
               - playbooks/zuul-migrate/.*
+            vars:
+              sphinx_python: python3
         - tox-pep8
         - tox-py35:
             irrelevant-files:
@@ -61,5 +65,5 @@
         - zuul-stream-functional
     post:
       jobs:
-        - publish-openstack-sphinx-docs-infra
+        - publish-openstack-sphinx-docs-infra-python3
         - publish-openstack-python-branch-tarball
diff --git a/bindep.txt b/bindep.txt
index 85254b4..3dcc3e7 100644
--- a/bindep.txt
+++ b/bindep.txt
@@ -8,7 +8,7 @@
 zookeeperd [platform:dpkg]
 build-essential [platform:dpkg]
 gcc [platform:rpm]
-graphviz [test]
+graphviz [doc]
 libssl-dev [platform:dpkg]
 openssl-devel [platform:rpm]
 libffi-dev [platform:dpkg]
diff --git a/doc/source/admin/connections.rst b/doc/source/admin/connections.rst
index 29ca3be..55ac629 100644
--- a/doc/source/admin/connections.rst
+++ b/doc/source/admin/connections.rst
@@ -55,6 +55,7 @@
 
    drivers/gerrit
    drivers/github
+   drivers/git
    drivers/smtp
    drivers/sql
    drivers/timer
diff --git a/doc/source/admin/drivers/git.rst b/doc/source/admin/drivers/git.rst
new file mode 100644
index 0000000..e0acec1
--- /dev/null
+++ b/doc/source/admin/drivers/git.rst
@@ -0,0 +1,59 @@
+:title: Git Driver
+
+Git
+===
+
+This driver can be used to load Zuul configuration from public Git repositories,
+for instance from ``openstack-infra/zuul-jobs`` that is suitable for use by
+any Zuul system. It can also be used to trigger jobs from ``ref-updated`` events
+in a pipeline.
+
+Connection Configuration
+------------------------
+
+The supported options in ``zuul.conf`` connections are:
+
+.. attr:: <git connection>
+
+   .. attr:: driver
+      :required:
+
+      .. value:: git
+
+         The connection must set ``driver=git`` for Git connections.
+
+   .. attr:: baseurl
+
+      Path to the base Git URL. Git repos name will be appended to it.
+
+   .. attr:: poll_delay
+      :default: 7200
+
+      The delay in seconds of the Git repositories polling loop.
+
+Trigger Configuration
+---------------------
+
+.. attr:: pipeline.trigger.<git source>
+
+   The dictionary passed to the Git pipeline ``trigger`` attribute
+   supports the following attributes:
+
+   .. attr:: event
+      :required:
+
+      Only ``ref-updated`` is supported.
+
+   .. attr:: ref
+
+      On ref-updated events, a ref such as ``refs/heads/master`` or
+      ``^refs/tags/.*$``. This field is treated as a regular expression,
+      and multiple refs may be listed.
+
+   .. attr:: ignore-deletes
+      :default: true
+
+      When a ref is deleted, a ref-updated event is emitted with a
+      newrev of all zeros specified. The ``ignore-deletes`` field is a
+      boolean value that describes whether or not these newrevs
+      trigger ref-updated events.
diff --git a/doc/source/user/config.rst b/doc/source/user/config.rst
index 173e615..fff673b 100644
--- a/doc/source/user/config.rst
+++ b/doc/source/user/config.rst
@@ -1032,11 +1032,12 @@
    The following attributes may appear in a project:
 
    .. attr:: name
-      :required:
 
       The name of the project.  If Zuul is configured with two or more
       unique projects with the same name, the canonical hostname for
       the project should be included (e.g., `git.example.com/foo`).
+      If not given it is implicitly derived from the project where this
+      is defined.
 
    .. attr:: templates
 
@@ -1097,6 +1098,14 @@
          changes which break the others.  This is a free-form string;
          just set the same value for each group of projects.
 
+      .. attr:: debug
+
+         If this is set to `true`, Zuul will include debugging
+         information in reports it makes about items in the pipeline.
+         This should not normally be set, but in situations were it is
+         difficult to determine why Zuul did or did not run a certain
+         job, the additional information this provides may help.
+
 .. _project-template:
 
 Project Template
@@ -1318,7 +1327,7 @@
 
 .. attr:: pragma
 
-   The pragma item currently only supports one attribute:
+   The pragma item currently supports the following attributes:
 
    .. attr:: implied-branch-matchers
 
@@ -1333,3 +1342,43 @@
 
       Note that if a job contains an explicit branch matcher, it will
       be used regardless of the value supplied here.
+
+   .. attr:: implied-branches
+
+      This is a list of regular expressions, just as
+      :attr:`job.branches`, which may be used to supply the value of
+      the implied branch matcher for all jobs in a file.
+
+      This may be useful if two projects share jobs but have
+      dissimilar branch names.  If, for example, two projects have
+      stable maintenance branches with dissimilar names, but both
+      should use the same job variants, this directive may be used to
+      indicate that all of the jobs defined in the stable branch of
+      the first project may also be used for the stable branch of the
+      other.  For example:
+
+      .. code-block:: yaml
+
+         - pragma:
+             implied-branches:
+               - stable/foo
+               - stable/bar
+
+      The above code, when added to the ``stable/foo`` branch of a
+      project would indicate that the job variants described in that
+      file should not only be used for changes to ``stable/foo``, but
+      also on changes to ``stable/bar``, which may be in another
+      project.
+
+      Note that if a job contains an explicit branch matcher, it will
+      be used regardless of the value supplied here.
+
+      Note also that the presence of `implied-branches` does not
+      automatically set `implied-branch-matchers`.  Zuul will still
+      decide if implied branch matchers are warranted at all, using
+      the heuristics described in :attr:`job.branches`, and only use
+      the value supplied here if that is the case.  If you want to
+      declare specific implied branches on, for example, a
+      :term:`config-project` project (which normally would not use
+      implied branches), you must set `implied-branch-matchers` as
+      well.
diff --git a/tests/base.py b/tests/base.py
index 69d9f55..59c0d2a 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -40,7 +40,6 @@
 import uuid
 import urllib
 
-
 import git
 import gear
 import fixtures
@@ -53,6 +52,7 @@
 from git.exc import NoSuchPathError
 import yaml
 
+import tests.fakegithub
 import zuul.driver.gerrit.gerritsource as gerritsource
 import zuul.driver.gerrit.gerritconnection as gerritconnection
 import zuul.driver.github.githubconnection as githubconnection
@@ -601,98 +601,6 @@
     _points_to_commits_only = True
 
 
-class FakeGithub(object):
-
-    class FakeUser(object):
-        def __init__(self, login):
-            self.login = login
-            self.name = "Github User"
-            self.email = "github.user@example.com"
-
-    class FakeBranch(object):
-        def __init__(self, branch='master'):
-            self.name = branch
-
-    class FakeStatus(object):
-        def __init__(self, state, url, description, context, user):
-            self._state = state
-            self._url = url
-            self._description = description
-            self._context = context
-            self._user = user
-
-        def as_dict(self):
-            return {
-                'state': self._state,
-                'url': self._url,
-                'description': self._description,
-                'context': self._context,
-                'creator': {
-                    'login': self._user
-                }
-            }
-
-    class FakeCommit(object):
-        def __init__(self):
-            self._statuses = []
-
-        def set_status(self, state, url, description, context, user):
-            status = FakeGithub.FakeStatus(
-                state, url, description, context, user)
-            # always insert a status to the front of the list, to represent
-            # the last status provided for a commit.
-            self._statuses.insert(0, status)
-
-        def statuses(self):
-            return self._statuses
-
-    class FakeRepository(object):
-        def __init__(self):
-            self._branches = [FakeGithub.FakeBranch()]
-            self._commits = {}
-
-        def branches(self, protected=False):
-            if protected:
-                # simulate there is no protected branch
-                return []
-            return self._branches
-
-        def create_status(self, sha, state, url, description, context,
-                          user='zuul'):
-            # Since we're bypassing github API, which would require a user, we
-            # default the user as 'zuul' here.
-            commit = self._commits.get(sha, None)
-            if commit is None:
-                commit = FakeGithub.FakeCommit()
-                self._commits[sha] = commit
-            commit.set_status(state, url, description, context, user)
-
-        def commit(self, sha):
-            commit = self._commits.get(sha, None)
-            if commit is None:
-                commit = FakeGithub.FakeCommit()
-                self._commits[sha] = commit
-            return commit
-
-    def __init__(self):
-        self._repos = {}
-
-    def user(self, login):
-        return self.FakeUser(login)
-
-    def repository(self, owner, proj):
-        return self._repos.get((owner, proj), None)
-
-    def repo_from_project(self, project):
-        # This is a convenience method for the tests.
-        owner, proj = project.split('/')
-        return self.repository(owner, proj)
-
-    def addProject(self, project):
-        owner, proj = project.name.split('/')
-        self._repos[(owner, proj)] = self.FakeRepository()
-
-
 class FakeGithubPullRequest(object):
 
     def __init__(self, github, number, project, branch,
@@ -1018,18 +926,18 @@
     log = logging.getLogger("zuul.test.FakeGithubConnection")
 
     def __init__(self, driver, connection_name, connection_config,
-                 upstream_root=None):
+                 changes_db=None, upstream_root=None):
         super(FakeGithubConnection, self).__init__(driver, connection_name,
                                                    connection_config)
         self.connection_name = connection_name
         self.pr_number = 0
-        self.pull_requests = []
+        self.pull_requests = changes_db
         self.statuses = {}
         self.upstream_root = upstream_root
         self.merge_failure = False
         self.merge_not_allowed_count = 0
         self.reports = []
-        self.github_client = FakeGithub()
+        self.github_client = tests.fakegithub.FakeGithub(changes_db)
 
     def getGithubClient(self,
                         project=None,
@@ -1042,7 +950,7 @@
         pull_request = FakeGithubPullRequest(
             self, self.pr_number, project, branch, subject, self.upstream_root,
             files=files, body=body)
-        self.pull_requests.append(pull_request)
+        self.pull_requests[self.pr_number] = pull_request
         return pull_request
 
     def getPushEvent(self, project, ref, old_rev=None, new_rev=None,
@@ -1089,35 +997,8 @@
         super(FakeGithubConnection, self).addProject(project)
         self.getGithubClient(project).addProject(project)
 
-    def getPull(self, project, number):
-        pr = self.pull_requests[number - 1]
-        data = {
-            'number': number,
-            'title': pr.subject,
-            'updated_at': pr.updated_at,
-            'base': {
-                'repo': {
-                    'full_name': pr.project
-                },
-                'ref': pr.branch,
-            },
-            'mergeable': True,
-            'state': pr.state,
-            'head': {
-                'sha': pr.head_sha,
-                'repo': {
-                    'full_name': pr.project
-                }
-            },
-            'files': pr.files,
-            'labels': pr.labels,
-            'merged': pr.is_merged,
-            'body': pr.body
-        }
-        return data
-
     def getPullBySha(self, sha, project):
-        prs = list(set([p for p in self.pull_requests if
+        prs = list(set([p for p in self.pull_requests.values() if
                         sha == p.head_sha and project == p.project]))
         if len(prs) > 1:
             raise Exception('Multiple pulls found with head sha: %s' % sha)
@@ -1125,12 +1006,12 @@
         return self.getPull(pr.project, pr.number)
 
     def _getPullReviews(self, owner, project, number):
-        pr = self.pull_requests[number - 1]
+        pr = self.pull_requests[number]
         return pr.reviews
 
     def getRepoPermission(self, project, login):
         owner, proj = project.split('/')
-        for pr in self.pull_requests:
+        for pr in self.pull_requests.values():
             pr_owner, pr_project = pr.project.split('/')
             if (pr_owner == owner and proj == pr_project):
                 if login in pr.writers:
@@ -1147,13 +1028,13 @@
     def commentPull(self, project, pr_number, message):
         # record that this got reported
         self.reports.append((project, pr_number, 'comment'))
-        pull_request = self.pull_requests[pr_number - 1]
+        pull_request = self.pull_requests[pr_number]
         pull_request.addComment(message)
 
     def mergePull(self, project, pr_number, commit_message='', sha=None):
         # record that this got reported
         self.reports.append((project, pr_number, 'merge'))
-        pull_request = self.pull_requests[pr_number - 1]
+        pull_request = self.pull_requests[pr_number]
         if self.merge_failure:
             raise Exception('Pull request was not merged')
         if self.merge_not_allowed_count > 0:
@@ -1173,32 +1054,15 @@
     def labelPull(self, project, pr_number, label):
         # record that this got reported
         self.reports.append((project, pr_number, 'label', label))
-        pull_request = self.pull_requests[pr_number - 1]
+        pull_request = self.pull_requests[pr_number]
         pull_request.addLabel(label)
 
     def unlabelPull(self, project, pr_number, label):
         # record that this got reported
         self.reports.append((project, pr_number, 'unlabel', label))
-        pull_request = self.pull_requests[pr_number - 1]
+        pull_request = self.pull_requests[pr_number]
         pull_request.removeLabel(label)
 
-    def _getNeededByFromPR(self, change):
-        prs = []
-        pattern = re.compile(r"Depends-On.*https://%s/%s/pull/%s" %
-                             (self.server, change.project.name,
-                              change.number))
-        for pr in self.pull_requests:
-            if not pr.body:
-                body = ''
-            else:
-                body = pr.body
-            if pattern.search(body):
-                # Get our version of a pull so that it's a dict
-                pull = self.getPull(pr.project, pr.number)
-                prs.append(pull)
-
-        return prs
-
 
 class BuildHistory(object):
     def __init__(self, **kw):
@@ -1432,7 +1296,8 @@
         self.log.debug("hostlist")
         hosts = super(RecordingAnsibleJob, self).getHostList(args)
         for host in hosts:
-            host['host_vars']['ansible_connection'] = 'local'
+            if not host['host_vars'].get('ansible_connection'):
+                host['host_vars']['ansible_connection'] = 'local'
 
         hosts.append(dict(
             name=['localhost'],
@@ -1738,6 +1603,9 @@
                     executor='fake-nodepool')
         if 'fakeuser' in node_type:
             data['username'] = 'fakeuser'
+        if 'windows' in node_type:
+            data['connection_type'] = 'winrm'
+
         data = json.dumps(data).encode('utf8')
         path = self.client.create(path, data,
                                   makepath=True,
@@ -2162,6 +2030,7 @@
         # Set a changes database so multiple FakeGerrit's can report back to
         # a virtual canonical database given by the configured hostname
         self.gerrit_changes_dbs = {}
+        self.github_changes_dbs = {}
 
         def getGerritConnection(driver, name, config):
             db = self.gerrit_changes_dbs.setdefault(config['server'], {})
@@ -2177,7 +2046,10 @@
             getGerritConnection))
 
         def getGithubConnection(driver, name, config):
+            server = config.get('server', 'github.com')
+            db = self.github_changes_dbs.setdefault(server, {})
             con = FakeGithubConnection(driver, name, config,
+                                       changes_db=db,
                                        upstream_root=self.upstream_root)
             self.event_queues.append(con.event_queue)
             setattr(self, 'fake_' + name, con)
@@ -2833,6 +2705,16 @@
                         os.path.join(FIXTURE_DIR, f.name))
         self.setupAllProjectKeys()
 
+    def addTagToRepo(self, project, name, sha):
+        path = os.path.join(self.upstream_root, project)
+        repo = git.Repo(path)
+        repo.git.tag(name, sha)
+
+    def delTagFromRepo(self, project, name):
+        path = os.path.join(self.upstream_root, project)
+        repo = git.Repo(path)
+        repo.git.tag('-d', name)
+
     def addCommitToRepo(self, project, message, files,
                         branch='master', tag=None):
         path = os.path.join(self.upstream_root, project)
diff --git a/tests/fakegithub.py b/tests/fakegithub.py
new file mode 100644
index 0000000..6fb2d66
--- /dev/null
+++ b/tests/fakegithub.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+
+# Copyright 2018 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import re
+
+
+class FakeUser(object):
+    def __init__(self, login):
+        self.login = login
+        self.name = "Github User"
+        self.email = "github.user@example.com"
+
+
+class FakeBranch(object):
+    def __init__(self, branch='master'):
+        self.name = branch
+
+
+class FakeStatus(object):
+    def __init__(self, state, url, description, context, user):
+        self._state = state
+        self._url = url
+        self._description = description
+        self._context = context
+        self._user = user
+
+    def as_dict(self):
+        return {
+            'state': self._state,
+            'url': self._url,
+            'description': self._description,
+            'context': self._context,
+            'creator': {
+                'login': self._user
+            }
+        }
+
+
+class FakeCommit(object):
+    def __init__(self):
+        self._statuses = []
+
+    def set_status(self, state, url, description, context, user):
+        status = FakeStatus(
+            state, url, description, context, user)
+        # always insert a status to the front of the list, to represent
+        # the last status provided for a commit.
+        self._statuses.insert(0, status)
+
+    def statuses(self):
+        return self._statuses
+
+
+class FakeRepository(object):
+    def __init__(self):
+        self._branches = [FakeBranch()]
+        self._commits = {}
+
+    def branches(self, protected=False):
+        if protected:
+            # simulate there is no protected branch
+            return []
+        return self._branches
+
+    def create_status(self, sha, state, url, description, context,
+                      user='zuul'):
+        # Since we're bypassing github API, which would require a user, we
+        # default the user as 'zuul' here.
+        commit = self._commits.get(sha, None)
+        if commit is None:
+            commit = FakeCommit()
+            self._commits[sha] = commit
+        commit.set_status(state, url, description, context, user)
+
+    def commit(self, sha):
+        commit = self._commits.get(sha, None)
+        if commit is None:
+            commit = FakeCommit()
+            self._commits[sha] = commit
+        return commit
+
+
+class FakeLabel(object):
+    def __init__(self, name):
+        self.name = name
+
+
+class FakeIssue(object):
+    def __init__(self, fake_pull_request):
+        self._fake_pull_request = fake_pull_request
+
+    def pull_request(self):
+        return FakePull(self._fake_pull_request)
+
+    def labels(self):
+        return [FakeLabel(l)
+                for l in self._fake_pull_request.labels]
+
+
+class FakeFile(object):
+    def __init__(self, filename):
+        self.filename = filename
+
+
+class FakePull(object):
+    def __init__(self, fake_pull_request):
+        self._fake_pull_request = fake_pull_request
+
+    def issue(self):
+        return FakeIssue(self._fake_pull_request)
+
+    def files(self):
+        return [FakeFile(fn)
+                for fn in self._fake_pull_request.files]
+
+    def as_dict(self):
+        pr = self._fake_pull_request
+        connection = pr.github
+        data = {
+            'number': pr.number,
+            'title': pr.subject,
+            'url': 'https://%s/%s/pull/%s' % (
+                connection.server, pr.project, pr.number
+            ),
+            'updated_at': pr.updated_at,
+            'base': {
+                'repo': {
+                    'full_name': pr.project
+                },
+                'ref': pr.branch,
+            },
+            'mergeable': True,
+            'state': pr.state,
+            'head': {
+                'sha': pr.head_sha,
+                'repo': {
+                    'full_name': pr.project
+                }
+            },
+            'merged': pr.is_merged,
+            'body': pr.body
+        }
+        return data
+
+
+class FakeIssueSearchResult(object):
+    def __init__(self, issue):
+        self.issue = issue
+
+
+class FakeGithub(object):
+    def __init__(self, pull_requests):
+        self._pull_requests = pull_requests
+        self._repos = {}
+
+    def user(self, login):
+        return FakeUser(login)
+
+    def repository(self, owner, proj):
+        return self._repos.get((owner, proj), None)
+
+    def repo_from_project(self, project):
+        # This is a convenience method for the tests.
+        owner, proj = project.split('/')
+        return self.repository(owner, proj)
+
+    def addProject(self, project):
+        owner, proj = project.name.split('/')
+        self._repos[(owner, proj)] = FakeRepository()
+
+    def pull_request(self, owner, project, number):
+        fake_pr = self._pull_requests[number]
+        return FakePull(fake_pr)
+
+    def search_issues(self, query):
+        def tokenize(s):
+            return re.findall(r'[\w]+', s)
+
+        parts = tokenize(query)
+        terms = set()
+        results = []
+        for part in parts:
+            kv = part.split(':', 1)
+            if len(kv) == 2:
+                if kv[0] in set('type', 'is', 'in'):
+                    # We only perform one search now and these aren't
+                    # important; we can honor these terms later if
+                    # necessary.
+                    continue
+            terms.add(part)
+
+        for pr in self._pull_requests.values():
+            if not pr.body:
+                body = set()
+            else:
+                body = set(tokenize(pr.body))
+            if terms.intersection(body):
+                issue = FakeIssue(pr)
+                results.append(FakeIssueSearchResult(issue))
+
+        return results
diff --git a/tests/fixtures/config/git-driver/git/common-config/playbooks/project-test2.yaml b/tests/fixtures/config/git-driver/git/common-config/playbooks/project-test2.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/git-driver/git/common-config/playbooks/project-test2.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+  tasks: []
diff --git a/tests/fixtures/config/git-driver/git/common-config/zuul.yaml b/tests/fixtures/config/git-driver/git/common-config/zuul.yaml
index 784b5f2..53fc210 100644
--- a/tests/fixtures/config/git-driver/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/git-driver/git/common-config/zuul.yaml
@@ -19,6 +19,10 @@
     name: project-test1
     run: playbooks/project-test1.yaml
 
+- job:
+    name: project-test2
+    run: playbooks/project-test2.yaml
+
 - project:
     name: org/project
     check:
diff --git a/tests/fixtures/config/implicit-project/git/common-config/playbooks/test-common.yaml b/tests/fixtures/config/implicit-project/git/common-config/playbooks/test-common.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/implicit-project/git/common-config/playbooks/test-common.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+  tasks: []
diff --git a/tests/fixtures/config/implicit-project/git/common-config/zuul.yaml b/tests/fixtures/config/implicit-project/git/common-config/zuul.yaml
new file mode 100644
index 0000000..038c412
--- /dev/null
+++ b/tests/fixtures/config/implicit-project/git/common-config/zuul.yaml
@@ -0,0 +1,57 @@
+- pipeline:
+    name: check
+    manager: independent
+    post-review: true
+    trigger:
+      gerrit:
+        - event: patchset-created
+    success:
+      gerrit:
+        Verified: 1
+    failure:
+      gerrit:
+        Verified: -1
+
+- pipeline:
+    name: gate
+    manager: dependent
+    success-message: Build succeeded (gate).
+    trigger:
+      gerrit:
+        - event: comment-added
+          approval:
+            - Approved: 1
+    success:
+      gerrit:
+        Verified: 2
+        submit: true
+    failure:
+      gerrit:
+        Verified: -2
+    start:
+      gerrit:
+        Verified: 0
+    precedence: high
+
+
+- job:
+    name: base
+    parent: null
+
+- job:
+    name: test-common
+    run: playbooks/test-common.yaml
+
+- project:
+    check:
+      jobs:
+        - test-common
+
+- project:
+    name: org/project
+    check:
+      jobs:
+        - test-common
+    gate:
+      jobs:
+        - test-common
diff --git a/tests/fixtures/config/implicit-project/git/org_project/.zuul.yaml b/tests/fixtures/config/implicit-project/git/org_project/.zuul.yaml
new file mode 100644
index 0000000..bce195c
--- /dev/null
+++ b/tests/fixtures/config/implicit-project/git/org_project/.zuul.yaml
@@ -0,0 +1,11 @@
+- job:
+    name: test-project
+    run: playbooks/test-project.yaml
+
+- project:
+    check:
+      jobs:
+        - test-project
+    gate:
+      jobs:
+        - test-project
diff --git a/tests/fixtures/config/implicit-project/git/org_project/playbooks/test-project.yaml b/tests/fixtures/config/implicit-project/git/org_project/playbooks/test-project.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/implicit-project/git/org_project/playbooks/test-project.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+  tasks: []
diff --git a/tests/fixtures/config/implicit-project/main.yaml b/tests/fixtures/config/implicit-project/main.yaml
new file mode 100644
index 0000000..208e274
--- /dev/null
+++ b/tests/fixtures/config/implicit-project/main.yaml
@@ -0,0 +1,8 @@
+- tenant:
+    name: tenant-one
+    source:
+      gerrit:
+        config-projects:
+          - common-config
+        untrusted-projects:
+          - org/project
diff --git a/tests/fixtures/config/inventory/git/common-config/zuul.yaml b/tests/fixtures/config/inventory/git/common-config/zuul.yaml
index ad530a7..36789a3 100644
--- a/tests/fixtures/config/inventory/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/inventory/git/common-config/zuul.yaml
@@ -38,6 +38,8 @@
         label: default-label
       - name: fakeuser
         label: fakeuser-label
+      - name: windows
+        label: windows-label
 
 - job:
     name: base
diff --git a/tests/fixtures/config/pragma-multibranch/git/common-config/zuul.yaml b/tests/fixtures/config/pragma-multibranch/git/common-config/zuul.yaml
new file mode 100644
index 0000000..dc83f9d
--- /dev/null
+++ b/tests/fixtures/config/pragma-multibranch/git/common-config/zuul.yaml
@@ -0,0 +1,61 @@
+- pipeline:
+    name: check
+    manager: independent
+    trigger:
+      gerrit:
+        - event: patchset-created
+    success:
+      gerrit:
+        Verified: 1
+    failure:
+      gerrit:
+        Verified: -1
+
+- pipeline:
+    name: gate
+    manager: dependent
+    post-review: True
+    trigger:
+      gerrit:
+        - event: comment-added
+          approval:
+            - Approved: 1
+    success:
+      gerrit:
+        Verified: 2
+        submit: true
+    failure:
+      gerrit:
+        Verified: -2
+    start:
+      gerrit:
+        Verified: 0
+    precedence: high
+
+- job:
+    name: base
+    parent: null
+
+- project:
+    name: common-config
+    check:
+      jobs: []
+    gate:
+      jobs:
+        - noop
+
+- project:
+    name: org/project1
+    check:
+      jobs: []
+    gate:
+      jobs:
+        - noop
+
+- project:
+    name: org/project2
+    check:
+      jobs: []
+    gate:
+      jobs:
+        - noop
diff --git a/tests/fixtures/config/pragma-multibranch/git/org_project1/README b/tests/fixtures/config/pragma-multibranch/git/org_project1/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/pragma-multibranch/git/org_project1/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/pragma-multibranch/git/org_project1/playbooks/test-job1.yaml b/tests/fixtures/config/pragma-multibranch/git/org_project1/playbooks/test-job1.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/pragma-multibranch/git/org_project1/playbooks/test-job1.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+  tasks: []
diff --git a/tests/fixtures/config/pragma-multibranch/git/org_project1/playbooks/test-job2.yaml b/tests/fixtures/config/pragma-multibranch/git/org_project1/playbooks/test-job2.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/pragma-multibranch/git/org_project1/playbooks/test-job2.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+  tasks: []
diff --git a/tests/fixtures/config/pragma-multibranch/git/org_project1/zuul.yaml b/tests/fixtures/config/pragma-multibranch/git/org_project1/zuul.yaml
new file mode 100644
index 0000000..6c8352a
--- /dev/null
+++ b/tests/fixtures/config/pragma-multibranch/git/org_project1/zuul.yaml
@@ -0,0 +1,13 @@
+- job:
+    name: test-job1
+    run: playbooks/test-job1.yaml
+
+- job:
+    name: test-job2
+    run: playbooks/test-job2.yaml
+
+- project-template:
+    name: test-template
+    check:
+      jobs:
+        - test-job1
diff --git a/tests/fixtures/config/pragma-multibranch/git/org_project2/README b/tests/fixtures/config/pragma-multibranch/git/org_project2/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/pragma-multibranch/git/org_project2/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/pragma-multibranch/git/org_project2/zuul.yaml b/tests/fixtures/config/pragma-multibranch/git/org_project2/zuul.yaml
new file mode 100644
index 0000000..748cab2
--- /dev/null
+++ b/tests/fixtures/config/pragma-multibranch/git/org_project2/zuul.yaml
@@ -0,0 +1,7 @@
+- project:
+    name: org/project2
+    templates:
+      - test-template
+    check:
+      jobs:
+        - test-job2
diff --git a/tests/fixtures/config/pragma-multibranch/main.yaml b/tests/fixtures/config/pragma-multibranch/main.yaml
new file mode 100644
index 0000000..950b117
--- /dev/null
+++ b/tests/fixtures/config/pragma-multibranch/main.yaml
@@ -0,0 +1,9 @@
+- tenant:
+    name: tenant-one
+    source:
+      gerrit:
+        config-projects:
+          - common-config
+        untrusted-projects:
+          - org/project1
+          - org/project2
diff --git a/tests/fixtures/layouts/basic-git.yaml b/tests/fixtures/layouts/basic-git.yaml
new file mode 100644
index 0000000..068d0a0
--- /dev/null
+++ b/tests/fixtures/layouts/basic-git.yaml
@@ -0,0 +1,37 @@
+- pipeline:
+    name: post
+    manager: independent
+    trigger:
+      git:
+        - event: ref-updated
+          ref: ^refs/heads/.*$
+
+- pipeline:
+    name: tag
+    manager: independent
+    trigger:
+      git:
+        - event: ref-updated
+          ref: ^refs/tags/.*$
+
+- job:
+    name: base
+    parent: null
+    run: playbooks/base.yaml
+
+- job:
+    name: post-job
+    run: playbooks/post-job.yaml
+
+- job:
+    name: tag-job
+    run: playbooks/post-job.yaml
+
+- project:
+    name: org/project
+    post:
+      jobs:
+        - post-job
+    tag:
+      jobs:
+        - tag-job
diff --git a/tests/fixtures/zuul-git-driver.conf b/tests/fixtures/zuul-git-driver.conf
index b24b0a1..23a2a62 100644
--- a/tests/fixtures/zuul-git-driver.conf
+++ b/tests/fixtures/zuul-git-driver.conf
@@ -21,6 +21,7 @@
 [connection git]
 driver=git
 baseurl=""
+poll_delay=0.1
 
 [connection outgoing_smtp]
 driver=smtp
diff --git a/tests/unit/test_git_driver.py b/tests/unit/test_git_driver.py
index 1cfadf4..b9e6c6e 100644
--- a/tests/unit/test_git_driver.py
+++ b/tests/unit/test_git_driver.py
@@ -12,7 +12,12 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
-from tests.base import ZuulTestCase
+
+import os
+import time
+import yaml
+
+from tests.base import ZuulTestCase, simple_layout
 
 
 class TestGitDriver(ZuulTestCase):
@@ -23,7 +28,7 @@
         super(TestGitDriver, self).setup_config()
         self.config.set('connection git', 'baseurl', self.upstream_root)
 
-    def test_git_driver(self):
+    def test_basic(self):
         tenant = self.sched.abide.tenants.get('tenant-one')
         # Check that we have the git source for common-config and the
         # gerrit source for the project.
@@ -40,3 +45,127 @@
         self.waitUntilSettled()
         self.assertEqual(len(self.history), 1)
         self.assertEqual(A.reported, 1)
+
+    def test_config_refreshed(self):
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+        self.assertEqual(len(self.history), 1)
+        self.assertEqual(A.reported, 1)
+        self.assertEqual(self.history[0].name, 'project-test1')
+
+        # Update zuul.yaml to force a tenant reconfiguration
+        path = os.path.join(self.upstream_root, 'common-config', 'zuul.yaml')
+        config = yaml.load(open(path, 'r').read())
+        change = {
+            'name': 'org/project',
+            'check': {
+                'jobs': [
+                    'project-test2'
+                ]
+            }
+        }
+        config[4]['project'] = change
+        files = {'zuul.yaml': yaml.dump(config)}
+        self.addCommitToRepo(
+            'common-config', 'Change zuul.yaml configuration', files)
+
+        # Let some time for the tenant reconfiguration to happen
+        time.sleep(2)
+        self.waitUntilSettled()
+
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+        self.assertEqual(len(self.history), 2)
+        self.assertEqual(A.reported, 1)
+        # We make sure the new job has run
+        self.assertEqual(self.history[1].name, 'project-test2')
+
+        # Let's stop the git Watcher to let us merge some changes commits
+        # We want to verify that config changes are detected for commits
+        # on the range oldrev..newrev
+        self.sched.connections.getSource('git').connection.w_pause = True
+        # Add a config change
+        change = {
+            'name': 'org/project',
+            'check': {
+                'jobs': [
+                    'project-test1'
+                ]
+            }
+        }
+        config[4]['project'] = change
+        files = {'zuul.yaml': yaml.dump(config)}
+        self.addCommitToRepo(
+            'common-config', 'Change zuul.yaml configuration', files)
+        # Add two other changes
+        self.addCommitToRepo(
+            'common-config', 'Adding f1',
+            {'f1': "Content"})
+        self.addCommitToRepo(
+            'common-config', 'Adding f2',
+            {'f2': "Content"})
+        # Restart the git watcher
+        self.sched.connections.getSource('git').connection.w_pause = False
+
+        # Let some time for the tenant reconfiguration to happen
+        time.sleep(2)
+        self.waitUntilSettled()
+
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+        self.assertEqual(len(self.history), 3)
+        self.assertEqual(A.reported, 1)
+        # We make sure the new job has run
+        self.assertEqual(self.history[2].name, 'project-test1')
+
+    def ensure_watcher_has_context(self):
+        # Make sure watcher have read initial refs shas
+        cnx = self.sched.connections.getSource('git').connection
+        delay = 0.1
+        max_delay = 1
+        while not cnx.projects_refs:
+            time.sleep(delay)
+            max_delay -= delay
+            if max_delay <= 0:
+                raise Exception("Timeout waiting for initial read")
+
+    @simple_layout('layouts/basic-git.yaml', driver='git')
+    def test_ref_updated_event(self):
+        self.ensure_watcher_has_context()
+        # Add a commit to trigger a ref-updated event
+        self.addCommitToRepo(
+            'org/project', 'A change for ref-updated', {'f1': 'Content'})
+        # Let some time for the git watcher to detect the ref-update event
+        time.sleep(0.2)
+        self.waitUntilSettled()
+        self.assertEqual(len(self.history), 1)
+        self.assertEqual('SUCCESS',
+                         self.getJobFromHistory('post-job').result)
+
+    @simple_layout('layouts/basic-git.yaml', driver='git')
+    def test_ref_created(self):
+        self.ensure_watcher_has_context()
+        # Tag HEAD to trigger a ref-updated event
+        self.addTagToRepo(
+            'org/project', 'atag', 'HEAD')
+        # Let some time for the git watcher to detect the ref-update event
+        time.sleep(0.2)
+        self.waitUntilSettled()
+        self.assertEqual(len(self.history), 1)
+        self.assertEqual('SUCCESS',
+                         self.getJobFromHistory('tag-job').result)
+
+    @simple_layout('layouts/basic-git.yaml', driver='git')
+    def test_ref_deleted(self):
+        self.ensure_watcher_has_context()
+        # Delete default tag init to trigger a ref-updated event
+        self.delTagFromRepo(
+            'org/project', 'init')
+        # Let some time for the git watcher to detect the ref-update event
+        time.sleep(0.2)
+        self.waitUntilSettled()
+        # Make sure no job as run as ignore-delete is True by default
+        self.assertEqual(len(self.history), 0)
diff --git a/tests/unit/test_inventory.py b/tests/unit/test_inventory.py
index 1c41f5f..be50447 100644
--- a/tests/unit/test_inventory.py
+++ b/tests/unit/test_inventory.py
@@ -119,5 +119,15 @@
             self.assertEqual(
                 inventory['all']['hosts'][node_name]['ansible_user'], username)
 
+            # check if the nodes use the correct or no ansible_connection
+            if node_name == 'windows':
+                self.assertEqual(
+                    inventory['all']['hosts'][node_name]['ansible_connection'],
+                    'winrm')
+            else:
+                self.assertEqual(
+                    'local',
+                    inventory['all']['hosts'][node_name]['ansible_connection'])
+
         self.executor_server.release()
         self.waitUntilSettled()
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index aacc81e..6bbf098 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -6070,6 +6070,77 @@
         self.assertEqual(B.reported, 1)
 
 
+class TestImplicitProject(ZuulTestCase):
+    tenant_config_file = 'config/implicit-project/main.yaml'
+
+    def test_implicit_project(self):
+        # config project should work with implicit project name
+        A = self.fake_gerrit.addFakeChange('common-config', 'master', 'A')
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+
+        # untrusted project should work with implicit project name
+        B = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+        self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+
+        self.waitUntilSettled()
+
+        self.assertEqual(A.data['status'], 'NEW')
+        self.assertEqual(A.reported, 1)
+        self.assertEqual(B.data['status'], 'NEW')
+        self.assertEqual(B.reported, 1)
+        self.assertHistory([
+            dict(name='test-common', result='SUCCESS', changes='1,1'),
+            dict(name='test-common', result='SUCCESS', changes='2,1'),
+            dict(name='test-project', result='SUCCESS', changes='2,1'),
+        ], ordered=False)
+
+        # now test adding a further project in repo
+        in_repo_conf = textwrap.dedent(
+            """
+            - job:
+                name: test-project
+                run: playbooks/test-project.yaml
+            - job:
+                name: test2-project
+                run: playbooks/test-project.yaml
+
+            - project:
+                check:
+                  jobs:
+                    - test-project
+                gate:
+                  jobs:
+                    - test-project
+
+            - project:
+                check:
+                  jobs:
+                    - test2-project
+                gate:
+                  jobs:
+                    - test2-project
+
+            """)
+        file_dict = {'.zuul.yaml': in_repo_conf}
+        C = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+                                           files=file_dict)
+        C.addApproval('Code-Review', 2)
+        self.fake_gerrit.addEvent(C.addApproval('Approved', 1))
+        self.waitUntilSettled()
+
+        # change C must be merged
+        self.assertEqual(C.data['status'], 'MERGED')
+        self.assertEqual(C.reported, 2)
+        self.assertHistory([
+            dict(name='test-common', result='SUCCESS', changes='1,1'),
+            dict(name='test-common', result='SUCCESS', changes='2,1'),
+            dict(name='test-project', result='SUCCESS', changes='2,1'),
+            dict(name='test-common', result='SUCCESS', changes='3,1'),
+            dict(name='test-project', result='SUCCESS', changes='3,1'),
+            dict(name='test2-project', result='SUCCESS', changes='3,1'),
+        ], ordered=False)
+
+
 class TestSemaphoreInRepo(ZuulTestCase):
     config_file = 'zuul-connections-gerrit-and-github.conf'
     tenant_config_file = 'config/in-repo/main.yaml'
diff --git a/tests/unit/test_streaming.py b/tests/unit/test_streaming.py
index 4bb541a..59dd8b0 100644
--- a/tests/unit/test_streaming.py
+++ b/tests/unit/test_streaming.py
@@ -82,7 +82,7 @@
         s = socket.create_connection((self.host, port))
         self.addCleanup(s.close)
 
-        req = '%s\n' % build_uuid
+        req = '%s\r\n' % build_uuid
         s.sendall(req.encode('utf-8'))
         self.test_streaming_event.set()
 
@@ -196,7 +196,7 @@
                 time.sleep(0.1)
 
         with socket.create_connection(gateway_address) as s:
-            msg = "%s\n" % build_uuid
+            msg = "%s\r\n" % build_uuid
             s.sendall(msg.encode('utf-8'))
             event.set()  # notify we are connected and req sent
             while True:
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 1f401d0..2779e6e 100755
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -543,11 +543,23 @@
                 name: project-test2
                 run: playbooks/project-test2.yaml
 
+            - job:
+                name: project-test3
+                run: playbooks/project-test2.yaml
+
+            # add a job by the short project name
             - project:
                 name: org/project
                 tenant-one-gate:
                   jobs:
                     - project-test2
+
+            # add a job by the canonical project name
+            - project:
+                name: review.example.com/org/project
+                tenant-one-gate:
+                  jobs:
+                    - project-test3
             """)
 
         in_repo_playbook = textwrap.dedent(
@@ -569,7 +581,9 @@
         self.assertIn('tenant-one-gate', A.messages[1],
                       "A should transit tenant-one gate")
         self.assertHistory([
-            dict(name='project-test2', result='SUCCESS', changes='1,1')])
+            dict(name='project-test2', result='SUCCESS', changes='1,1'),
+            dict(name='project-test3', result='SUCCESS', changes='1,1'),
+        ], ordered=False)
 
         self.fake_gerrit.addEvent(A.getChangeMergedEvent())
         self.waitUntilSettled()
@@ -584,7 +598,10 @@
                          'SUCCESS')
         self.assertHistory([
             dict(name='project-test2', result='SUCCESS', changes='1,1'),
-            dict(name='project-test2', result='SUCCESS', changes='2,1')])
+            dict(name='project-test3', result='SUCCESS', changes='1,1'),
+            dict(name='project-test2', result='SUCCESS', changes='2,1'),
+            dict(name='project-test3', result='SUCCESS', changes='2,1'),
+        ], ordered=False)
 
     def test_dynamic_template(self):
         # Tests that a project can't update a template in another
@@ -1552,6 +1569,32 @@
                       C.messages[0],
                       "C should have an error reported")
 
+    def test_pipeline_debug(self):
+        in_repo_conf = textwrap.dedent(
+            """
+            - job:
+                name: project-test1
+                run: playbooks/project-test1.yaml
+            - project:
+                name: org/project
+                check:
+                  debug: True
+                  jobs:
+                    - project-test1
+            """)
+
+        file_dict = {'.zuul.yaml': in_repo_conf}
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+                                           files=file_dict)
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+
+        self.assertEqual(A.data['status'], 'NEW')
+        self.assertEqual(A.reported, 1,
+                         "A should report success")
+        self.assertIn('Debug information:',
+                      A.messages[0], "A should have debug info")
+
 
 class TestInRepoJoin(ZuulTestCase):
     # In this config, org/project is not a member of any pipelines, so
@@ -2232,6 +2275,115 @@
             self.assertIsNone(job.branch_matcher)
 
 
+class TestPragmaMultibranch(ZuulTestCase):
+    tenant_config_file = 'config/pragma-multibranch/main.yaml'
+
+    def test_no_branch_matchers(self):
+        self.create_branch('org/project1', 'stable/pike')
+        self.create_branch('org/project2', 'stable/jewel')
+        self.fake_gerrit.addEvent(
+            self.fake_gerrit.getFakeBranchCreatedEvent(
+                'org/project1', 'stable/pike'))
+        self.fake_gerrit.addEvent(
+            self.fake_gerrit.getFakeBranchCreatedEvent(
+                'org/project2', 'stable/jewel'))
+        self.waitUntilSettled()
+        # We want the jobs defined on the stable/pike branch of
+        # project1 to apply to the stable/jewel branch of project2.
+
+        # First, without the pragma line, the jobs should not run
+        # because in project1 they have branch matchers for pike, so
+        # they will not match a jewel change.
+        B = self.fake_gerrit.addFakeChange('org/project2', 'stable/jewel', 'B')
+        self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+        self.assertHistory([])
+
+        # Add a pragma line to disable implied branch matchers in
+        # project1, so that the jobs and templates apply to both
+        # branches.
+        with open(os.path.join(FIXTURE_DIR,
+                               'config/pragma-multibranch/git/',
+                               'org_project1/zuul.yaml')) as f:
+            config = f.read()
+        extra_conf = textwrap.dedent(
+            """
+            - pragma:
+                implied-branch-matchers: False
+            """)
+        config = extra_conf + config
+        file_dict = {'zuul.yaml': config}
+        A = self.fake_gerrit.addFakeChange('org/project1', 'stable/pike', 'A',
+                                           files=file_dict)
+        A.addApproval('Code-Review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+        self.waitUntilSettled()
+        self.fake_gerrit.addEvent(A.getChangeMergedEvent())
+        self.waitUntilSettled()
+
+        # Now verify that when we propose a change to jewel, we get
+        # the pike/jewel jobs.
+        self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+        self.assertHistory([
+            dict(name='test-job1', result='SUCCESS', changes='1,1'),
+            dict(name='test-job2', result='SUCCESS', changes='1,1'),
+        ], ordered=False)
+
+    def test_supplied_branch_matchers(self):
+        self.create_branch('org/project1', 'stable/pike')
+        self.create_branch('org/project2', 'stable/jewel')
+        self.fake_gerrit.addEvent(
+            self.fake_gerrit.getFakeBranchCreatedEvent(
+                'org/project1', 'stable/pike'))
+        self.fake_gerrit.addEvent(
+            self.fake_gerrit.getFakeBranchCreatedEvent(
+                'org/project2', 'stable/jewel'))
+        self.waitUntilSettled()
+        # We want the jobs defined on the stable/pike branch of
+        # project1 to apply to the stable/jewel branch of project2.
+
+        # First, without the pragma line, the jobs should not run
+        # because in project1 they have branch matchers for pike, so
+        # they will not match a jewel change.
+        B = self.fake_gerrit.addFakeChange('org/project2', 'stable/jewel', 'B')
+        self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+        self.assertHistory([])
+
+        # Add a pragma line to disable implied branch matchers in
+        # project1, so that the jobs and templates apply to both
+        # branches.
+        with open(os.path.join(FIXTURE_DIR,
+                               'config/pragma-multibranch/git/',
+                               'org_project1/zuul.yaml')) as f:
+            config = f.read()
+        extra_conf = textwrap.dedent(
+            """
+            - pragma:
+                implied-branches:
+                  - stable/pike
+                  - stable/jewel
+            """)
+        config = extra_conf + config
+        file_dict = {'zuul.yaml': config}
+        A = self.fake_gerrit.addFakeChange('org/project1', 'stable/pike', 'A',
+                                           files=file_dict)
+        A.addApproval('Code-Review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+        self.waitUntilSettled()
+        self.fake_gerrit.addEvent(A.getChangeMergedEvent())
+        self.waitUntilSettled()
+        # Now verify that when we propose a change to jewel, we get
+        # the pike/jewel jobs.
+        self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+        self.assertHistory([
+            dict(name='test-job1', result='SUCCESS', changes='1,1'),
+            dict(name='test-job2', result='SUCCESS', changes='1,1'),
+        ], ordered=False)
+
+
 class TestBaseJobs(ZuulTestCase):
     tenant_config_file = 'config/base-jobs/main.yaml'
 
diff --git a/tests/unit/test_web.py b/tests/unit/test_web.py
new file mode 100644
index 0000000..6881a83
--- /dev/null
+++ b/tests/unit/test_web.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env python
+
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+# Copyright 2014 Rackspace Australia
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import asyncio
+import threading
+import os
+import json
+import urllib
+import time
+import socket
+from unittest import skip
+
+import webob
+
+import zuul.web
+
+from tests.base import ZuulTestCase, FIXTURE_DIR
+
+
+class TestWeb(ZuulTestCase):
+    tenant_config_file = 'config/single-tenant/main.yaml'
+
+    def setUp(self):
+        super(TestWeb, self).setUp()
+        self.executor_server.hold_jobs_in_build = True
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+        A.addApproval('Code-Review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+        B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
+        B.addApproval('Code-Review', 2)
+        self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
+        self.waitUntilSettled()
+
+        # Start the web server
+        self.web = zuul.web.ZuulWeb(
+            listen_address='127.0.0.1', listen_port=0,
+            gear_server='127.0.0.1', gear_port=self.gearman_server.port)
+        loop = asyncio.new_event_loop()
+        loop.set_debug(True)
+        ws_thread = threading.Thread(target=self.web.run, args=(loop,))
+        ws_thread.start()
+        self.addCleanup(loop.close)
+        self.addCleanup(ws_thread.join)
+        self.addCleanup(self.web.stop)
+
+        self.host = 'localhost'
+        # Wait until web server is started
+        while True:
+            time.sleep(0.1)
+            if self.web.server is None:
+                continue
+            self.port = self.web.server.sockets[0].getsockname()[1]
+            print(self.host, self.port)
+            try:
+                with socket.create_connection((self.host, self.port)):
+                    break
+            except ConnectionRefusedError:
+                pass
+
+    def tearDown(self):
+        self.executor_server.hold_jobs_in_build = False
+        self.executor_server.release()
+        self.waitUntilSettled()
+        super(TestWeb, self).tearDown()
+
+    def test_web_status(self):
+        "Test that we can filter to only certain changes in the webapp."
+
+        req = urllib.request.Request(
+            "http://localhost:%s/tenant-one/status.json" % self.port)
+        f = urllib.request.urlopen(req)
+        data = json.loads(f.read().decode('utf8'))
+
+        self.assertIn('pipelines', data)
+
+    def test_web_bad_url(self):
+        # do we 404 correctly
+        req = urllib.request.Request(
+            "http://localhost:%s/status/foo" % self.port)
+        self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, req)
+
+    @skip("This is not supported by zuul-web")
+    def test_web_find_change(self):
+        # can we filter by change id
+        req = urllib.request.Request(
+            "http://localhost:%s/tenant-one/status/change/1,1" % self.port)
+        f = urllib.request.urlopen(req)
+        data = json.loads(f.read().decode('utf8'))
+
+        self.assertEqual(1, len(data), data)
+        self.assertEqual("org/project", data[0]['project'])
+
+        req = urllib.request.Request(
+            "http://localhost:%s/tenant-one/status/change/2,1" % self.port)
+        f = urllib.request.urlopen(req)
+        data = json.loads(f.read().decode('utf8'))
+
+        self.assertEqual(1, len(data), data)
+        self.assertEqual("org/project1", data[0]['project'], data)
+
+    def test_web_keys(self):
+        with open(os.path.join(FIXTURE_DIR, 'public.pem'), 'rb') as f:
+            public_pem = f.read()
+
+        req = urllib.request.Request(
+            "http://localhost:%s/tenant-one/org/project.pub" %
+            self.port)
+        f = urllib.request.urlopen(req)
+        self.assertEqual(f.read(), public_pem)
+
+    @skip("This may not apply to zuul-web")
+    def test_web_custom_handler(self):
+        def custom_handler(path, tenant_name, request):
+            return webob.Response(body='ok')
+
+        self.webapp.register_path('/custom', custom_handler)
+        req = urllib.request.Request(
+            "http://localhost:%s/custom" % self.port)
+        f = urllib.request.urlopen(req)
+        self.assertEqual(b'ok', f.read())
+
+        self.webapp.unregister_path('/custom')
+        self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, req)
+
+    @skip("This returns a 500")
+    def test_web_404_on_unknown_tenant(self):
+        req = urllib.request.Request(
+            "http://localhost:{}/non-tenant/status.json".format(self.port))
+        e = self.assertRaises(
+            urllib.error.HTTPError, urllib.request.urlopen, req)
+        self.assertEqual(404, e.code)
diff --git a/tools/encrypt_secret.py b/tools/encrypt_secret.py
index 2a4ea1d..c0ee9be 100755
--- a/tools/encrypt_secret.py
+++ b/tools/encrypt_secret.py
@@ -58,7 +58,7 @@
                         "to standard output.")
     args = parser.parse_args()
 
-    req = Request("%s/%s.pub" % (args.url, args.project))
+    req = Request("%s/%s.pub" % (args.url.rstrip('/'), args.project))
     pubkey = urlopen(req)
 
     if args.infile:
diff --git a/tools/github-debugging.py b/tools/github-debugging.py
new file mode 100644
index 0000000..171627a
--- /dev/null
+++ b/tools/github-debugging.py
@@ -0,0 +1,55 @@
+import github3
+import logging
+import time
+
+# This is a template with boilerplate code for debugging github issues
+
+# TODO: for real use override the following variables
+url = 'https://example.com'
+api_token = 'xxxx'
+org = 'org'
+project = 'project'
+pull_nr = 3
+
+
+# Send the logs to stderr as well
+stream_handler = logging.StreamHandler()
+
+
+logger_urllib3 = logging.getLogger('requests.packages.logger_urllib3')
+# logger_urllib3.addHandler(stream_handler)
+logger_urllib3.setLevel(logging.DEBUG)
+
+logger = logging.getLogger('github3')
+# logger.addHandler(stream_handler)
+logger.setLevel(logging.DEBUG)
+
+
+github = github3.GitHubEnterprise(url)
+
+
+# This is the currently broken cache adapter, enable or replace it to debug
+# caching
+
+# import cachecontrol
+# from cachecontrol.cache import DictCache
+# cache_adapter = cachecontrol.CacheControlAdapter(
+#             DictCache(),
+#             cache_etags=True)
+#
+# github.session.mount('http://', cache_adapter)
+# github.session.mount('https://', cache_adapter)
+
+
+github.login(token=api_token)
+
+i = 0
+while True:
+    pr = github.pull_request(org, project, pull_nr)
+    prdict = pr.as_dict()
+    issue = pr.issue()
+    labels = list(issue.labels())
+    print(labels)
+    i += 1
+    print(i)
+    time.sleep(1)
diff --git a/zuul/configloader.py b/zuul/configloader.py
index 227e352..3a7e9b9 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -358,6 +358,7 @@
 class PragmaParser(object):
     pragma = {
         'implied-branch-matchers': bool,
+        'implied-branches': to_list(str),
         '_source_context': model.SourceContext,
         '_start_mark': ZuulMark,
     }
@@ -372,11 +373,14 @@
             self.schema(conf)
 
         bm = conf.get('implied-branch-matchers')
-        if bm is None:
-            return
 
         source_context = conf['_source_context']
-        source_context.implied_branch_matchers = bm
+        if bm is not None:
+            source_context.implied_branch_matchers = bm
+
+        branches = conf.get('implied-branches')
+        if branches is not None:
+            source_context.implied_branches = as_list(branches)
 
 
 class NodeSetParser(object):
@@ -528,6 +532,8 @@
         # If the user has set a pragma directive for this, use the
         # value (if unset, the value is None).
         if job.source_context.implied_branch_matchers is True:
+            if job.source_context.implied_branches is not None:
+                return job.source_context.implied_branches
             return [job.source_context.branch]
         elif job.source_context.implied_branch_matchers is False:
             return None
@@ -543,6 +549,8 @@
         if len(branches) == 1:
             return None
 
+        if job.source_context.implied_branches is not None:
+            return job.source_context.implied_branches
         return [job.source_context.branch]
 
     @staticmethod
@@ -781,7 +789,11 @@
 
         job = {str: vs.Any(str, JobParser.job_attributes)}
         job_list = [vs.Any(str, job)]
-        pipeline_contents = {'queue': str, 'jobs': job_list}
+        pipeline_contents = {
+            'queue': str,
+            'debug': bool,
+            'jobs': job_list,
+        }
 
         for p in self.layout.pipelines.values():
             project_template[p.name] = pipeline_contents
@@ -801,6 +813,7 @@
             project_pipeline = model.ProjectPipelineConfig()
             project_template.pipelines[pipeline.name] = project_pipeline
             project_pipeline.queue_name = conf_pipeline.get('queue')
+            project_pipeline.debug = conf_pipeline.get('debug')
             self.parseJobList(
                 conf_pipeline.get('jobs', []),
                 source_context, start_mark, project_pipeline.job_list)
@@ -839,7 +852,7 @@
 
     def getSchema(self):
         project = {
-            vs.Required('name'): str,
+            'name': str,
             'description': str,
             'templates': [str],
             'merge-mode': vs.Any('merge', 'merge-resolve',
@@ -851,7 +864,11 @@
 
         job = {str: vs.Any(str, JobParser.job_attributes)}
         job_list = [vs.Any(str, job)]
-        pipeline_contents = {'queue': str, 'jobs': job_list}
+        pipeline_contents = {
+            'queue': str,
+            'debug': bool,
+            'jobs': job_list
+        }
 
         for p in self.layout.pipelines.values():
             project[p.name] = pipeline_contents
@@ -912,6 +929,7 @@
         for pipeline in self.layout.pipelines.values():
             project_pipeline = model.ProjectPipelineConfig()
             queue_name = None
+            debug = False
             # For every template, iterate over the job tree and replace or
             # create the jobs in the final definition as needed.
             pipeline_defined = False
@@ -924,8 +942,12 @@
                         implied_branch)
                     if template_pipeline.queue_name:
                         queue_name = template_pipeline.queue_name
+                    if template_pipeline.debug is not None:
+                        debug = template_pipeline.debug
             if queue_name:
                 project_pipeline.queue_name = queue_name
+            if debug:
+                project_pipeline.debug = True
             if pipeline_defined:
                 project_config.pipelines[pipeline.name] = project_pipeline
         return project_config
@@ -1206,8 +1228,8 @@
                                                   tenant.config_projects,
                                                   tenant.untrusted_projects,
                                                   cached, tenant)
-        unparsed_config.extend(tenant.config_projects_config, tenant=tenant)
-        unparsed_config.extend(tenant.untrusted_projects_config, tenant=tenant)
+        unparsed_config.extend(tenant.config_projects_config, tenant)
+        unparsed_config.extend(tenant.untrusted_projects_config, tenant)
         tenant.layout = TenantParser._parseLayout(base, tenant,
                                                   unparsed_config,
                                                   scheduler,
@@ -1462,10 +1484,10 @@
                     (job.project,))
                 if job.config_project:
                     config_projects_config.extend(
-                        job.project.unparsed_config)
+                        job.project.unparsed_config, tenant)
                 else:
                     untrusted_projects_config.extend(
-                        job.project.unparsed_config)
+                        job.project.unparsed_config, tenant)
                 continue
             TenantParser.log.debug("Waiting for cat job %s" % (job,))
             job.wait()
@@ -1496,17 +1518,18 @@
                     branch = source_context.branch
                     if source_context.trusted:
                         incdata = TenantParser._parseConfigProjectLayout(
-                            job.files[fn], source_context)
-                        config_projects_config.extend(incdata)
+                            job.files[fn], source_context, tenant)
+                        config_projects_config.extend(incdata, tenant)
                     else:
                         incdata = TenantParser._parseUntrustedProjectLayout(
-                            job.files[fn], source_context)
-                        untrusted_projects_config.extend(incdata)
-                    new_project_unparsed_config[project].extend(incdata)
+                            job.files[fn], source_context, tenant)
+                        untrusted_projects_config.extend(incdata, tenant)
+                    new_project_unparsed_config[project].extend(
+                        incdata, tenant)
                     if branch in new_project_unparsed_branch_config.get(
                             project, {}):
                         new_project_unparsed_branch_config[project][branch].\
-                            extend(incdata)
+                            extend(incdata, tenant)
         # Now that we've sucessfully loaded all of the configuration,
         # cache the unparsed data on the project objects.
         for project, data in new_project_unparsed_config.items():
@@ -1518,18 +1541,18 @@
         return config_projects_config, untrusted_projects_config
 
     @staticmethod
-    def _parseConfigProjectLayout(data, source_context):
+    def _parseConfigProjectLayout(data, source_context, tenant):
         # This is the top-level configuration for a tenant.
         config = model.UnparsedTenantConfig()
         with early_configuration_exceptions(source_context):
-            config.extend(safe_load_yaml(data, source_context))
+            config.extend(safe_load_yaml(data, source_context), tenant)
         return config
 
     @staticmethod
-    def _parseUntrustedProjectLayout(data, source_context):
+    def _parseUntrustedProjectLayout(data, source_context, tenant):
         config = model.UnparsedTenantConfig()
         with early_configuration_exceptions(source_context):
-            config.extend(safe_load_yaml(data, source_context))
+            config.extend(safe_load_yaml(data, source_context), tenant)
         if config.pipelines:
             with configuration_exceptions('pipeline', config.pipelines[0]):
                 raise PipelineNotPermittedError()
@@ -1731,7 +1754,7 @@
                 else:
                     incdata = project.unparsed_branch_config.get(branch)
                 if incdata:
-                    config.extend(incdata)
+                    config.extend(incdata, tenant)
                 continue
             # Otherwise, do not use the cached config (even if the
             # files are empty as that likely means they were deleted).
@@ -1760,12 +1783,12 @@
 
                     if trusted:
                         incdata = TenantParser._parseConfigProjectLayout(
-                            data, source_context)
+                            data, source_context, tenant)
                     else:
                         incdata = TenantParser._parseUntrustedProjectLayout(
-                            data, source_context)
+                            data, source_context, tenant)
 
-                    config.extend(incdata)
+                    config.extend(incdata, tenant)
 
     def createDynamicLayout(self, tenant, files,
                             include_config_projects=False,
diff --git a/zuul/driver/gerrit/gerrittrigger.py b/zuul/driver/gerrit/gerrittrigger.py
index cfedd4e..67608ad 100644
--- a/zuul/driver/gerrit/gerrittrigger.py
+++ b/zuul/driver/gerrit/gerrittrigger.py
@@ -63,16 +63,6 @@
         return efilters
 
 
-def validate_conf(trigger_conf):
-    """Validates the layout's trigger data."""
-    events_with_ref = ('ref-updated', )
-    for event in trigger_conf:
-        if event['event'] not in events_with_ref and event.get('ref', False):
-            raise v.Invalid(
-                "The event %s does not include ref information, Zuul cannot "
-                "use ref filter 'ref: %s'" % (event['event'], event['ref']))
-
-
 def getSchema():
     variable_dict = v.Schema(dict)
 
diff --git a/zuul/driver/git/__init__.py b/zuul/driver/git/__init__.py
index 0faa036..1fe43f6 100644
--- a/zuul/driver/git/__init__.py
+++ b/zuul/driver/git/__init__.py
@@ -15,6 +15,7 @@
 from zuul.driver import Driver, ConnectionInterface, SourceInterface
 from zuul.driver.git import gitconnection
 from zuul.driver.git import gitsource
+from zuul.driver.git import gittrigger
 
 
 class GitDriver(Driver, ConnectionInterface, SourceInterface):
@@ -23,9 +24,15 @@
     def getConnection(self, name, config):
         return gitconnection.GitConnection(self, name, config)
 
+    def getTrigger(self, connection, config=None):
+        return gittrigger.GitTrigger(self, connection, config)
+
     def getSource(self, connection):
         return gitsource.GitSource(self, connection)
 
+    def getTriggerSchema(self):
+        return gittrigger.getSchema()
+
     def getRequireSchema(self):
         return {}
 
diff --git a/zuul/driver/git/gitconnection.py b/zuul/driver/git/gitconnection.py
index f93824d..03b24ca 100644
--- a/zuul/driver/git/gitconnection.py
+++ b/zuul/driver/git/gitconnection.py
@@ -13,12 +13,119 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+import os
+import git
+import time
 import logging
 import urllib
+import threading
 
 import voluptuous as v
 
 from zuul.connection import BaseConnection
+from zuul.driver.git.gitmodel import GitTriggerEvent, EMPTY_GIT_REF
+from zuul.model import Ref, Branch
+
+
+class GitWatcher(threading.Thread):
+    log = logging.getLogger("connection.git.GitWatcher")
+
+    def __init__(self, git_connection, baseurl, poll_delay):
+        threading.Thread.__init__(self)
+        self.daemon = True
+        self.git_connection = git_connection
+        self.baseurl = baseurl
+        self.poll_delay = poll_delay
+        self._stopped = False
+        self.projects_refs = self.git_connection.projects_refs
+
+    def compareRefs(self, project, refs):
+        partial_events = []
+        # Fetch previous refs state
+        base_refs = self.projects_refs.get(project)
+        # Create list of created refs
+        rcreateds = set(refs.keys()) - set(base_refs.keys())
+        # Create list of deleted refs
+        rdeleteds = set(base_refs.keys()) - set(refs.keys())
+        # Create the list of updated refs
+        updateds = {}
+        for ref, sha in refs.items():
+            if ref in base_refs and base_refs[ref] != sha:
+                updateds[ref] = sha
+        for ref in rcreateds:
+            event = {
+                'ref': ref,
+                'branch_created': True,
+                'oldrev': EMPTY_GIT_REF,
+                'newrev': refs[ref]
+            }
+            partial_events.append(event)
+        for ref in rdeleteds:
+            event = {
+                'ref': ref,
+                'branch_deleted': True,
+                'oldrev': base_refs[ref],
+                'newrev': EMPTY_GIT_REF
+            }
+            partial_events.append(event)
+        for ref, sha in updateds.items():
+            event = {
+                'ref': ref,
+                'branch_updated': True,
+                'oldrev': base_refs[ref],
+                'newrev': sha
+            }
+            partial_events.append(event)
+        events = []
+        for pevent in partial_events:
+            event = GitTriggerEvent()
+            event.type = 'ref-updated'
+            event.project_hostname = self.git_connection.canonical_hostname
+            event.project_name = project
+            for attr in ('ref', 'oldrev', 'newrev', 'branch_created',
+                         'branch_deleted', 'branch_updated'):
+                if attr in pevent:
+                    setattr(event, attr, pevent[attr])
+            events.append(event)
+        return events
+
+    def _run(self):
+        self.log.debug("Walk through projects refs for connection: %s" %
+                       self.git_connection.connection_name)
+        try:
+            for project in self.git_connection.projects:
+                refs = self.git_connection.lsRemote(project)
+                self.log.debug("Read refs %s for project %s" % (refs, project))
+                if not self.projects_refs.get(project):
+                    # State for this project does not exist yet so add it.
+                    # No event will be triggered in this loop as
+                    # projects_refs['project'] and refs are equal
+                    self.projects_refs[project] = refs
+                events = self.compareRefs(project, refs)
+                self.projects_refs[project] = refs
+                # Send events to the scheduler
+                for event in events:
+                    self.log.debug("Handling event: %s" % event)
+                    # Force changes cache update before passing
+                    # the event to the scheduler
+                    self.git_connection.getChange(event)
+                    self.git_connection.logEvent(event)
+                    # Pass the event to the scheduler
+                    self.git_connection.sched.addEvent(event)
+        except Exception as e:
+            self.log.debug("Unexpected issue in _run loop: %s" % str(e))
+
+    def run(self):
+        while not self._stopped:
+            if not self.git_connection.w_pause:
+                self._run()
+                # Polling wait delay
+            else:
+                self.log.debug("Watcher is on pause")
+            time.sleep(self.poll_delay)
+
+    def stop(self):
+        self._stopped = True
 
 
 class GitConnection(BaseConnection):
@@ -32,6 +139,8 @@
             raise Exception('baseurl is required for git connections in '
                             '%s' % self.connection_name)
         self.baseurl = self.connection_config.get('baseurl')
+        self.poll_timeout = float(
+            self.connection_config.get('poll_delay', 3600 * 2))
         self.canonical_hostname = self.connection_config.get(
             'canonical_hostname')
         if not self.canonical_hostname:
@@ -40,7 +149,10 @@
                 self.canonical_hostname = r.hostname
             else:
                 self.canonical_hostname = 'localhost'
+        self.w_pause = False
         self.projects = {}
+        self.projects_refs = {}
+        self._change_cache = {}
 
     def getProject(self, name):
         return self.projects.get(name)
@@ -48,15 +160,97 @@
     def addProject(self, project):
         self.projects[project.name] = project
 
+    def getChangeFilesUpdated(self, project_name, branch, tosha):
+        job = self.sched.merger.getFilesChanges(
+            self.connection_name, project_name, branch, tosha)
+        self.log.debug("Waiting for fileschanges job %s" % job)
+        job.wait()
+        if not job.updated:
+            raise Exception("Fileschanges job %s failed" % job)
+        self.log.debug("Fileschanges job %s got changes on files %s" %
+                       (job, job.files))
+        return job.files
+
+    def lsRemote(self, project):
+        refs = {}
+        client = git.cmd.Git()
+        output = client.ls_remote(
+            os.path.join(self.baseurl, project))
+        for line in output.splitlines():
+            sha, ref = line.split('\t')
+            if ref.startswith('refs/'):
+                refs[ref] = sha
+        return refs
+
+    def maintainCache(self, relevant):
+        remove = {}
+        for branch, refschange in self._change_cache.items():
+            for ref, change in refschange.items():
+                if change not in relevant:
+                    remove.setdefault(branch, []).append(ref)
+        for branch, refs in remove.items():
+            for ref in refs:
+                del self._change_cache[branch][ref]
+            if not self._change_cache[branch]:
+                del self._change_cache[branch]
+
+    def getChange(self, event, refresh=False):
+        if event.ref and event.ref.startswith('refs/heads/'):
+            branch = event.ref[len('refs/heads/'):]
+            change = self._change_cache.get(branch, {}).get(event.newrev)
+            if change:
+                return change
+            project = self.getProject(event.project_name)
+            change = Branch(project)
+            change.branch = branch
+            for attr in ('ref', 'oldrev', 'newrev'):
+                setattr(change, attr, getattr(event, attr))
+            change.url = ""
+            change.files = self.getChangeFilesUpdated(
+                event.project_name, change.branch, event.oldrev)
+            self._change_cache.setdefault(branch, {})[event.newrev] = change
+        elif event.ref:
+            # catch-all ref (ie, not a branch or head)
+            project = self.getProject(event.project_name)
+            change = Ref(project)
+            for attr in ('ref', 'oldrev', 'newrev'):
+                setattr(change, attr, getattr(event, attr))
+            change.url = ""
+        else:
+            self.log.warning("Unable to get change for %s" % (event,))
+            change = None
+        return change
+
     def getProjectBranches(self, project, tenant):
-        # TODO(jeblair): implement; this will need to handle local or
-        # remote git urls.
-        return ['master']
+        refs = self.lsRemote(project.name)
+        branches = [ref[len('refs/heads/'):] for ref in
+                    refs if ref.startswith('refs/heads/')]
+        return branches
 
     def getGitUrl(self, project):
         url = '%s/%s' % (self.baseurl, project.name)
         return url
 
+    def onLoad(self):
+        self.log.debug("Starting Git Watcher")
+        self._start_watcher_thread()
+
+    def onStop(self):
+        self.log.debug("Stopping Git Watcher")
+        self._stop_watcher_thread()
+
+    def _stop_watcher_thread(self):
+        if self.watcher_thread:
+            self.watcher_thread.stop()
+            self.watcher_thread.join()
+
+    def _start_watcher_thread(self):
+        self.watcher_thread = GitWatcher(
+            self,
+            self.baseurl,
+            self.poll_timeout)
+        self.watcher_thread.start()
+
 
 def getSchema():
     git_connection = v.Any(str, v.Schema(dict))
diff --git a/zuul/driver/git/gitmodel.py b/zuul/driver/git/gitmodel.py
new file mode 100644
index 0000000..5d12b36
--- /dev/null
+++ b/zuul/driver/git/gitmodel.py
@@ -0,0 +1,86 @@
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import re
+
+from zuul.model import TriggerEvent
+from zuul.model import EventFilter
+
+
+EMPTY_GIT_REF = '0' * 40  # git sha of all zeros, used during creates/deletes
+
+
+class GitTriggerEvent(TriggerEvent):
+    """Incoming event from an external system."""
+
+    def __repr__(self):
+        ret = '<GitTriggerEvent %s %s' % (self.type,
+                                          self.project_name)
+
+        if self.branch:
+            ret += " %s" % self.branch
+        ret += " oldrev:%s" % self.oldrev
+        ret += " newrev:%s" % self.newrev
+        ret += '>'
+
+        return ret
+
+
+class GitEventFilter(EventFilter):
+    def __init__(self, trigger, types=[], refs=[],
+                 ignore_deletes=True):
+
+        super().__init__(trigger)
+
+        self._refs = refs
+        self.types = types
+        self.refs = [re.compile(x) for x in refs]
+        self.ignore_deletes = ignore_deletes
+
+    def __repr__(self):
+        ret = '<GitEventFilter'
+
+        if self.types:
+            ret += ' types: %s' % ', '.join(self.types)
+        if self._refs:
+            ret += ' refs: %s' % ', '.join(self._refs)
+        if self.ignore_deletes:
+            ret += ' ignore_deletes: %s' % self.ignore_deletes
+        ret += '>'
+
+        return ret
+
+    def matches(self, event, change):
+        # event types are ORed
+        matches_type = False
+        for etype in self.types:
+            if etype == event.type:
+                matches_type = True
+        if self.types and not matches_type:
+            return False
+
+        # refs are ORed
+        matches_ref = False
+        if event.ref is not None:
+            for ref in self.refs:
+                if ref.match(event.ref):
+                    matches_ref = True
+        if self.refs and not matches_ref:
+            return False
+        if self.ignore_deletes and event.newrev == EMPTY_GIT_REF:
+            # If the updated ref has an empty git sha (all 0s),
+            # then the ref is being deleted
+            return False
+
+        return True
diff --git a/zuul/driver/git/gitsource.py b/zuul/driver/git/gitsource.py
index 8d85c08..78ae04e 100644
--- a/zuul/driver/git/gitsource.py
+++ b/zuul/driver/git/gitsource.py
@@ -36,7 +36,7 @@
         raise NotImplemented()
 
     def getChange(self, event, refresh=False):
-        raise NotImplemented()
+        return self.connection.getChange(event, refresh)
 
     def getProject(self, name):
         p = self.connection.getProject(name)
diff --git a/zuul/driver/git/gittrigger.py b/zuul/driver/git/gittrigger.py
new file mode 100644
index 0000000..2885230
--- /dev/null
+++ b/zuul/driver/git/gittrigger.py
@@ -0,0 +1,49 @@
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import voluptuous as v
+from zuul.trigger import BaseTrigger
+from zuul.driver.git.gitmodel import GitEventFilter
+from zuul.driver.util import scalar_or_list, to_list
+
+
+class GitTrigger(BaseTrigger):
+    name = 'git'
+    log = logging.getLogger("zuul.GitTrigger")
+
+    def getEventFilters(self, trigger_conf):
+        efilters = []
+        for trigger in to_list(trigger_conf):
+            f = GitEventFilter(
+                trigger=self,
+                types=to_list(trigger['event']),
+                refs=to_list(trigger.get('ref')),
+                ignore_deletes=trigger.get(
+                    'ignore-deletes', True)
+            )
+            efilters.append(f)
+
+        return efilters
+
+
+def getSchema():
+    git_trigger = {
+        v.Required('event'):
+            scalar_or_list(v.Any('ref-updated')),
+        'ref': scalar_or_list(str),
+        'ignore-deletes': bool,
+    }
+
+    return git_trigger
diff --git a/zuul/driver/github/githubconnection.py b/zuul/driver/github/githubconnection.py
index 61ee9c6..702f6b0 100644
--- a/zuul/driver/github/githubconnection.py
+++ b/zuul/driver/github/githubconnection.py
@@ -24,6 +24,7 @@
 
 import cachecontrol
 from cachecontrol.cache import DictCache
+from cachecontrol.heuristics import BaseHeuristic
 import iso8601
 import jwt
 import requests
@@ -135,7 +136,6 @@
     """Move events from GitHub into the scheduler"""
 
     log = logging.getLogger("zuul.GithubEventConnector")
-    delay = 10.0
 
     def __init__(self, connection):
         super(GithubEventConnector, self).__init__()
@@ -151,14 +151,6 @@
         ts, json_body, event_type = self.connection.getEvent()
         if self._stopped:
             return
-        # Github can produce inconsistent data immediately after an
-        # event, So ensure that we do not deliver the event to Zuul
-        # until at least a certain amount of time has passed.  Note
-        # that if we receive several events in succession, we will
-        # only need to delay for the first event.  In essence, Zuul
-        # should always be a constant number of seconds behind Github.
-        now = time.time()
-        time.sleep(max((ts + self.delay) - now, 0.0))
 
         # If there's any installation mapping information in the body then
         # update the project mapping before any requests are made.
@@ -434,9 +426,26 @@
         # NOTE(jamielennox): Better here would be to cache to memcache or file
         # or something external - but zuul already sucks at restarting so in
         # memory probably doesn't make this much worse.
+
+        # NOTE(tobiash): Unlike documented cachecontrol doesn't priorize
+        # the etag caching but doesn't even re-request until max-age was
+        # elapsed.
+        #
+        # Thus we need to add a custom caching heuristic which simply drops
+        # the cache-control header containing max-age. This way we force
+        # cachecontrol to only rely on the etag headers.
+        #
+        # http://cachecontrol.readthedocs.io/en/latest/etags.html
+        # http://cachecontrol.readthedocs.io/en/latest/custom_heuristics.html
+        class NoAgeHeuristic(BaseHeuristic):
+            def update_headers(self, response):
+                if 'cache-control' in response.headers:
+                    del response.headers['cache-control']
+
         self.cache_adapter = cachecontrol.CacheControlAdapter(
             DictCache(),
-            cache_etags=True)
+            cache_etags=True,
+            heuristic=NoAgeHeuristic())
 
         # The regex is based on the connection host. We do not yet support
         # cross-connection dependency gathering
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index 7a93f89..5a710a6 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -931,6 +931,10 @@
             if username:
                 host_vars['ansible_user'] = username
 
+            connection_type = node.get('connection_type')
+            if connection_type:
+                host_vars['ansible_connection'] = connection_type
+
             host_keys = []
             for key in node.get('host_keys'):
                 if port != 22:
@@ -1706,6 +1710,7 @@
         self.merger_worker.registerFunction("merger:merge")
         self.merger_worker.registerFunction("merger:cat")
         self.merger_worker.registerFunction("merger:refstate")
+        self.merger_worker.registerFunction("merger:fileschanges")
 
     def register_work(self):
         if self._running:
@@ -1859,6 +1864,9 @@
             elif job.name == 'merger:refstate':
                 self.log.debug("Got refstate job: %s" % job.unique)
                 self.refstate(job)
+            elif job.name == 'merger:fileschanges':
+                self.log.debug("Got fileschanges job: %s" % job.unique)
+                self.fileschanges(job)
             else:
                 self.log.error("Unable to handle job %s" % job.name)
                 job.sendWorkFail()
@@ -1970,6 +1978,19 @@
                       files=files)
         job.sendWorkComplete(json.dumps(result))
 
+    def fileschanges(self, job):
+        args = json.loads(job.arguments)
+        task = self.update(args['connection'], args['project'])
+        task.wait()
+        with self.merger_lock:
+            files = self.merger.getFilesChanges(
+                args['connection'], args['project'],
+                args['branch'],
+                args['tosha'])
+        result = dict(updated=True,
+                      files=files)
+        job.sendWorkComplete(json.dumps(result))
+
     def refstate(self, job):
         args = json.loads(job.arguments)
         with self.merger_lock:
diff --git a/zuul/lib/fingergw.py b/zuul/lib/fingergw.py
index c89ed0f..b56fe04 100644
--- a/zuul/lib/fingergw.py
+++ b/zuul/lib/fingergw.py
@@ -66,11 +66,19 @@
         try:
             build_uuid = self.getCommand()
             port_location = self.rpc.get_job_log_stream_address(build_uuid)
+
+            if not port_location:
+                msg = 'Invalid build UUID %s' % build_uuid
+                self.request.sendall(msg.encode('utf-8'))
+                return
+
             self._fingerClient(
                 port_location['server'],
                 port_location['port'],
                 build_uuid,
             )
+        except BrokenPipeError:   # Client disconnect
+            return
         except Exception:
             self.log.exception('Finger request handling exception:')
             msg = 'Internal streaming error'
diff --git a/zuul/lib/log_streamer.py b/zuul/lib/log_streamer.py
index 5c894b4..c778812 100644
--- a/zuul/lib/log_streamer.py
+++ b/zuul/lib/log_streamer.py
@@ -56,8 +56,6 @@
             self.request.sendall(msg.encode("utf-8"))
             return
 
-        build_uuid = build_uuid.rstrip()
-
         # validate build ID
         if not re.match("[0-9A-Fa-f]+$", build_uuid):
             msg = 'Build ID %s is not valid' % build_uuid
diff --git a/zuul/lib/streamer_utils.py b/zuul/lib/streamer_utils.py
index 985f3c3..43bc286 100644
--- a/zuul/lib/streamer_utils.py
+++ b/zuul/lib/streamer_utils.py
@@ -60,7 +60,8 @@
                 ret = buffer.decode('utf-8')
                 x = ret.find('\n')
                 if x > 0:
-                    return ret[:x]
+                    # rstrip to remove any other unnecessary chars (e.g. \r)
+                    return ret[:x].rstrip()
             except UnicodeDecodeError:
                 pass
 
diff --git a/zuul/merger/client.py b/zuul/merger/client.py
index 2614e58..c89a6fb 100644
--- a/zuul/merger/client.py
+++ b/zuul/merger/client.py
@@ -131,6 +131,15 @@
         job = self.submitJob('merger:cat', data, None, precedence)
         return job
 
+    def getFilesChanges(self, connection_name, project_name, branch,
+                        tosha=None, precedence=zuul.model.PRECEDENCE_HIGH):
+        data = dict(connection=connection_name,
+                    project=project_name,
+                    branch=branch,
+                    tosha=tosha)
+        job = self.submitJob('merger:fileschanges', data, None, precedence)
+        return job
+
     def onBuildCompleted(self, job):
         data = getJobData(job)
         merged = data.get('merged', False)
diff --git a/zuul/merger/merger.py b/zuul/merger/merger.py
index 06ec4b2..bd4ca58 100644
--- a/zuul/merger/merger.py
+++ b/zuul/merger/merger.py
@@ -314,6 +314,18 @@
                             'utf-8')
         return ret
 
+    def getFilesChanges(self, branch, tosha=None):
+        repo = self.createRepoObject()
+        files = set()
+        head = repo.heads[branch].commit
+        files.update(set(head.stats.files.keys()))
+        if tosha:
+            for cmt in head.iter_parents():
+                if cmt.hexsha == tosha:
+                    break
+                files.update(set(cmt.stats.files.keys()))
+        return list(files)
+
     def deleteRemote(self, remote):
         repo = self.createRepoObject()
         repo.delete_remote(repo.remotes[remote])
@@ -581,3 +593,8 @@
     def getFiles(self, connection_name, project_name, branch, files, dirs=[]):
         repo = self.getRepo(connection_name, project_name)
         return repo.getFiles(files, dirs, branch=branch)
+
+    def getFilesChanges(self, connection_name, project_name, branch,
+                        tosha=None):
+        repo = self.getRepo(connection_name, project_name)
+        return repo.getFilesChanges(branch, tosha)
diff --git a/zuul/merger/server.py b/zuul/merger/server.py
index 576d41e..aa04fc2 100644
--- a/zuul/merger/server.py
+++ b/zuul/merger/server.py
@@ -81,6 +81,7 @@
         self.worker.registerFunction("merger:merge")
         self.worker.registerFunction("merger:cat")
         self.worker.registerFunction("merger:refstate")
+        self.worker.registerFunction("merger:fileschanges")
 
     def stop(self):
         self.log.debug("Stopping")
@@ -117,6 +118,9 @@
                     elif job.name == 'merger:refstate':
                         self.log.debug("Got refstate job: %s" % job.unique)
                         self.refstate(job)
+                    elif job.name == 'merger:fileschanges':
+                        self.log.debug("Got fileschanges job: %s" % job.unique)
+                        self.fileschanges(job)
                     else:
                         self.log.error("Unable to handle job %s" % job.name)
                         job.sendWorkFail()
@@ -158,3 +162,12 @@
         result = dict(updated=True,
                       files=files)
         job.sendWorkComplete(json.dumps(result))
+
+    def fileschanges(self, job):
+        args = json.loads(job.arguments)
+        self.merger.updateRepo(args['connection'], args['project'])
+        files = self.merger.getFilesChanges(
+            args['connection'], args['project'], args['branch'], args['tosha'])
+        result = dict(updated=True,
+                      files=files)
+        job.sendWorkComplete(json.dumps(result))
diff --git a/zuul/model.py b/zuul/model.py
index e53a357..16a701d 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -384,6 +384,7 @@
         self.private_ipv4 = None
         self.public_ipv6 = None
         self.connection_port = 22
+        self.connection_type = None
         self._keys = []
         self.az = None
         self.provider = None
@@ -641,6 +642,7 @@
         self.path = path
         self.trusted = trusted
         self.implied_branch_matchers = None
+        self.implied_branches = None
 
     def __str__(self):
         return '%s/%s@%s' % (self.project, self.path, self.branch)
@@ -1336,6 +1338,7 @@
         self.unable_to_merge = False
         self.config_error = None  # None or an error message string.
         self.failing_reasons = []
+        self.debug_messages = []
         self.merge_state = self.NEW
         self.nodesets = {}  # job -> nodeset
         self.node_requests = {}  # job -> reqs
@@ -1383,6 +1386,8 @@
         build.build_set = self
 
     def removeBuild(self, build):
+        if build.job.name not in self.builds:
+            return
         self.tries[build.job.name] += 1
         del self.builds[build.job.name]
 
@@ -1500,6 +1505,17 @@
     def setReportedResult(self, result):
         self.current_build_set.result = result
 
+    def debug(self, msg, indent=0):
+        ppc = self.layout.getProjectPipelineConfig(self.change.project,
+                                                   self.pipeline)
+        if not ppc.debug:
+            return
+        if indent:
+            indent = '  ' * indent
+        else:
+            indent = ''
+        self.current_build_set.debug_messages.append(indent + msg)
+
     def freezeJobGraph(self):
         """Find or create actual matching jobs for this item's change and
         store the resulting job tree."""
@@ -2219,6 +2235,7 @@
     def __init__(self):
         self.job_list = JobList()
         self.queue_name = None
+        self.debug = False
         self.merge_mode = None
 
 
@@ -2241,7 +2258,7 @@
 
 
 class ProjectConfig(object):
-    # Represents a project cofiguration
+    # Represents a project configuration
     def __init__(self, name, source_context=None):
         self.name = name
         # If this is a template, it will have a source_context, but
@@ -2386,7 +2403,7 @@
         r.semaphores = copy.deepcopy(self.semaphores)
         return r
 
-    def extend(self, conf, tenant=None):
+    def extend(self, conf, tenant):
         if isinstance(conf, UnparsedTenantConfig):
             self.pragmas.extend(conf.pragmas)
             self.pipelines.extend(conf.pipelines)
@@ -2394,16 +2411,14 @@
             self.project_templates.extend(conf.project_templates)
             for k, v in conf.projects.items():
                 name = k
-                # If we have the tenant add the projects to
-                # the according canonical name instead of the given project
-                # name. If it is not found, it's ok to add this to the given
-                # name. We also don't need to throw the
+                # Add the projects to the according canonical name instead of
+                # the given project name. If it is not found, it's ok to add
+                # this to the given name. We also don't need to throw the
                 # ProjectNotFoundException here as semantic validation occurs
                 # later where it will fail then.
-                if tenant is not None:
-                    trusted, project = tenant.getProject(k)
-                    if project is not None:
-                        name = project.canonical_name
+                trusted, project = tenant.getProject(k)
+                if project is not None:
+                    name = project.canonical_name
                 self.projects.setdefault(name, []).extend(v)
             self.nodesets.extend(conf.nodesets)
             self.secrets.extend(conf.secrets)
@@ -2420,7 +2435,12 @@
                 raise ConfigItemMultipleKeysError()
             key, value = list(item.items())[0]
             if key == 'project':
-                name = value['name']
+                name = value.get('name')
+                if not name:
+                    # There is no name defined so implicitly add the name
+                    # of the project where it is defined.
+                    name = value['_source_context'].project.canonical_name
+                    value['name'] = name
                 self.projects.setdefault(name, []).append(value)
             elif key == 'job':
                 self.jobs.append(value)
@@ -2544,7 +2564,8 @@
     def addProjectConfig(self, project_config):
         self.project_configs[project_config.name] = project_config
 
-    def collectJobs(self, jobname, change, path=None, jobs=None, stack=None):
+    def collectJobs(self, item, jobname, change, path=None, jobs=None,
+                    stack=None):
         if stack is None:
             stack = []
         if jobs is None:
@@ -2553,13 +2574,20 @@
             path = []
         path.append(jobname)
         matched = False
+        indent = len(path) + 1
+        item.debug("Collecting job variants for {jobname}".format(
+            jobname=jobname), indent=indent)
         for variant in self.getJobs(jobname):
             if not variant.changeMatches(change):
                 self.log.debug("Variant %s did not match %s", repr(variant),
                                change)
+                item.debug("Variant {variant} did not match".format(
+                    variant=repr(variant)), indent=indent)
                 continue
             else:
                 self.log.debug("Variant %s matched %s", repr(variant), change)
+                item.debug("Variant {variant} matched".format(
+                    variant=repr(variant)), indent=indent)
             if not variant.isBase():
                 parent = variant.parent
                 if not jobs and parent is None:
@@ -2569,30 +2597,38 @@
             if parent and parent not in path:
                 if parent in stack:
                     raise Exception("Dependency cycle in jobs: %s" % stack)
-                self.collectJobs(parent, change, path, jobs, stack + [jobname])
+                self.collectJobs(item, parent, change, path, jobs,
+                                 stack + [jobname])
             matched = True
             jobs.append(variant)
         if not matched:
+            self.log.debug("No matching parents for job %s and change %s",
+                           jobname, change)
+            item.debug("No matching parent for {jobname}".format(
+                jobname=repr(jobname)), indent=indent)
             raise NoMatchingParentError()
         return jobs
 
     def _createJobGraph(self, item, job_list, job_graph):
         change = item.change
         pipeline = item.pipeline
+        item.debug("Freezing job graph")
         for jobname in job_list.jobs:
             # This is the final job we are constructing
             frozen_job = None
             self.log.debug("Collecting jobs %s for %s", jobname, change)
+            item.debug("Freezing job {jobname}".format(
+                jobname=jobname), indent=1)
             try:
-                variants = self.collectJobs(jobname, change)
+                variants = self.collectJobs(item, jobname, change)
             except NoMatchingParentError:
-                self.log.debug("No matching parents for job %s and change %s",
-                               jobname, change)
                 variants = None
             if not variants:
                 # A change must match at least one defined job variant
                 # (that is to say that it must match more than just
                 # the job that is defined in the tree).
+                item.debug("No matching variants for {jobname}".format(
+                    jobname=jobname), indent=2)
                 continue
             for variant in variants:
                 if frozen_job is None:
@@ -2611,12 +2647,18 @@
                     matched = True
                     self.log.debug("Pipeline variant %s matched %s",
                                    repr(variant), change)
-            else:
-                self.log.debug("Pipeline variant %s did not match %s",
-                               repr(variant), change)
+                    item.debug("Pipeline variant {variant} matched".format(
+                        variant=repr(variant)), indent=2)
+                else:
+                    self.log.debug("Pipeline variant %s did not match %s",
+                                   repr(variant), change)
+                    item.debug("Pipeline variant {variant} did not match".
+                               format(variant=repr(variant)), indent=2)
             if not matched:
                 # A change must match at least one project pipeline
                 # job variant.
+                item.debug("No matching pipeline variants for {jobname}".
+                           format(jobname=jobname), indent=2)
                 continue
             if (frozen_job.allowed_projects and
                 change.project.name not in frozen_job.allowed_projects):
diff --git a/zuul/reporter/__init__.py b/zuul/reporter/__init__.py
index 49181a7..ecf8855 100644
--- a/zuul/reporter/__init__.py
+++ b/zuul/reporter/__init__.py
@@ -64,6 +64,10 @@
         a reporter taking free-form text."""
         ret = self._getFormatter()(item, with_jobs)
 
+        if item.current_build_set.debug_messages:
+            debug = '\n  '.join(item.current_build_set.debug_messages)
+            ret += '\nDebug information:\n  ' + debug + '\n'
+
         if item.pipeline.footer_message:
             ret += '\n' + item.pipeline.footer_message
 
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index b978979..c3f2f23 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -823,8 +823,7 @@
         if self.statsd:
             self.log.debug("Statsd enabled")
         else:
-            self.log.debug("Statsd disabled because python statsd "
-                           "package not found")
+            self.log.debug("Statsd not configured")
         while True:
             self.log.debug("Run handler sleeping")
             self.wake_event.wait()
diff --git a/zuul/web/__init__.py b/zuul/web/__init__.py
index cefc922..a98a6c8 100755
--- a/zuul/web/__init__.py
+++ b/zuul/web/__init__.py
@@ -305,6 +305,7 @@
         self.listen_port = listen_port
         self.event_loop = None
         self.term = None
+        self.server = None
         self.static_cache_expiry = static_cache_expiry
         # instanciate handlers
         self.rpc = zuul.rpcclient.RPCClient(gear_server, gear_port,