Merge "Permit config shadowing" into feature/zuulv3
diff --git a/doc/source/admin/components.rst b/doc/source/admin/components.rst
index c77c0fc..a24b833 100644
--- a/doc/source/admin/components.rst
+++ b/doc/source/admin/components.rst
@@ -51,6 +51,17 @@
 **ssl_key**
   Optional: An openssl file containing the client private key in PEM format.
 
+zookeeper
+"""""""""
+
+.. NOTE: this is a white lie at this point, since only the scheduler
+   uses this, however, we expect other components to use it later, so
+   it's reasonable for admins to plan for this now.
+
+**hosts**
+  A list of zookeeper hosts for Zuul to use when communicating with
+  Nodepool.  ``hosts=zk1.example.com,zk2.example.com,zk3.example.com``
+
 
 Scheduler
 ---------
@@ -111,9 +122,13 @@
   optional value and ``1`` is used by default.
   ``status_expiry=1``
 
+**status_url**
+  URL that will be posted in Zuul comments made to changes when
+  starting jobs for a change.  Used by zuul-scheduler only.
+  ``status_url=https://zuul.example.com/status``
+
 scheduler
 """""""""
-.. TODO: rename this to 'scheduler' (currently 'zuul') and update to match these docs
 
 **tenant_config**
   Path to tenant config file.
diff --git a/doc/source/admin/drivers/gerrit.rst b/doc/source/admin/drivers/gerrit.rst
index 6cd2f3d..29e136b 100644
--- a/doc/source/admin/drivers/gerrit.rst
+++ b/doc/source/admin/drivers/gerrit.rst
@@ -35,8 +35,12 @@
 **canonical_hostname**
   The canonical hostname associated with the git repos on the Gerrit
   server.  Defaults to the value of **server**.  This is used to
-  identify repos from this connection by name and in preparing repos
-  on the filesystem for use by jobs.
+  identify projects from this connection by name and in preparing
+  repos on the filesystem for use by jobs.  Note that Zuul will still
+  only communicate with the Gerrit server identified by **server**;
+  this option is useful if users customarily use a different hostname
+  to clone or pull git repos so that when Zuul places them in the
+  job's working directory, they appear under this directory name.
   ``canonical_hostname=git.example.com``
 
 **port**
@@ -73,82 +77,82 @@
 The supported pipeline trigger options are:
 
 **event**
-The event name from gerrit.  Examples: ``patchset-created``,
-``comment-added``, ``ref-updated``.  This field is treated as a
-regular expression.
+  The event name from gerrit.  Examples: ``patchset-created``,
+  ``comment-added``, ``ref-updated``.  This field is treated as a
+  regular expression.
 
 **branch**
-The branch associated with the event.  Example: ``master``.  This
-field is treated as a regular expression, and multiple branches may
-be listed.
+  The branch associated with the event.  Example: ``master``.  This
+  field is treated as a regular expression, and multiple branches may
+  be listed.
 
 **ref**
-On ref-updated events, the branch parameter is not used, instead the
-ref is provided.  Currently Gerrit has the somewhat idiosyncratic
-behavior of specifying bare refs for branch names (e.g., ``master``),
-but full ref names for other kinds of refs (e.g., ``refs/tags/foo``).
-Zuul matches what you put here exactly against what Gerrit
-provides.  This field is treated as a regular expression, and
-multiple refs may be listed.
+  On ref-updated events, the branch parameter is not used, instead the
+  ref is provided.  Currently Gerrit has the somewhat idiosyncratic
+  behavior of specifying bare refs for branch names (e.g.,
+  ``master``), but full ref names for other kinds of refs (e.g.,
+  ``refs/tags/foo``).  Zuul matches what you put here exactly against
+  what Gerrit provides.  This field is treated as a regular
+  expression, and multiple refs may be listed.
 
 **ignore-deletes**
-When a branch is deleted, a ref-updated event is emitted with a newrev
-of all zeros specified. The ``ignore-deletes`` field is a boolean value
-that describes whether or not these newrevs trigger ref-updated events.
-The default is True, which will not trigger ref-updated events.
+  When a branch is deleted, a ref-updated event is emitted with a
+  newrev of all zeros specified. The ``ignore-deletes`` field is a
+  boolean value that describes whether or not these newrevs trigger
+  ref-updated events.  The default is True, which will not trigger
+  ref-updated events.
 
 **approval**
-This is only used for ``comment-added`` events.  It only matches if
-the event has a matching approval associated with it.  Example:
-``code-review: 2`` matches a ``+2`` vote on the code review category.
-Multiple approvals may be listed.
+  This is only used for ``comment-added`` events.  It only matches if
+  the event has a matching approval associated with it.  Example:
+  ``code-review: 2`` matches a ``+2`` vote on the code review
+  category.  Multiple approvals may be listed.
 
 **email**
-This is used for any event.  It takes a regex applied on the performer
-email, i.e. Gerrit account email address.  If you want to specify
-several email filters, you must use a YAML list.  Make sure to use non
-greedy matchers and to escapes dots!
-Example: ``email: ^.*?@example\.org$``.
+  This is used for any event.  It takes a regex applied on the
+  performer email, i.e. Gerrit account email address.  If you want to
+  specify several email filters, you must use a YAML list.  Make sure
+  to use non greedy matchers and to escapes dots!  Example: ``email:
+  ^.*?@example\.org$``.
 
 **email_filter** (deprecated)
-A deprecated alternate spelling of *email*.  Only one of *email* or
-*email_filter* should be used.
+  A deprecated alternate spelling of *email*.  Only one of *email* or
+  *email_filter* should be used.
 
 **username**
-This is used for any event.  It takes a regex applied on the performer
-username, i.e. Gerrit account name.  If you want to specify several
-username filters, you must use a YAML list.  Make sure to use non greedy
-matchers and to escapes dots!
-Example: ``username: ^jenkins$``.
+  This is used for any event.  It takes a regex applied on the
+  performer username, i.e. Gerrit account name.  If you want to
+  specify several username filters, you must use a YAML list.  Make
+  sure to use non greedy matchers and to escapes dots!  Example:
+  ``username: ^jenkins$``.
 
 **username_filter** (deprecated)
-A deprecated alternate spelling of *username*.  Only one of *username* or
-*username_filter* should be used.
+  A deprecated alternate spelling of *username*.  Only one of
+  *username* or *username_filter* should be used.
 
 **comment**
-This is only used for ``comment-added`` events.  It accepts a list of
-regexes that are searched for in the comment string. If any of these
-regexes matches a portion of the comment string the trigger is
-matched. ``comment: retrigger`` will match when comments
-containing 'retrigger' somewhere in the comment text are added to a
-change.
+  This is only used for ``comment-added`` events.  It accepts a list
+  of regexes that are searched for in the comment string. If any of
+  these regexes matches a portion of the comment string the trigger is
+  matched. ``comment: retrigger`` will match when comments containing
+  'retrigger' somewhere in the comment text are added to a change.
 
 **comment_filter** (deprecated)
-A deprecated alternate spelling of *comment*.  Only one of *comment* or
-*comment_filter* should be used.
+  A deprecated alternate spelling of *comment*.  Only one of *comment*
+  or *comment_filter* should be used.
 
-*require-approval*
-This may be used for any event.  It requires that a certain kind
-of approval be present for the current patchset of the change (the
-approval could be added by the event in question).  It follows the
-same syntax as the :ref:`"approval" pipeline requirement
-<pipeline-require-approval>`. For each specified criteria there must
-exist a matching approval.
+**require-approval**
+  This may be used for any event.  It requires that a certain kind of
+  approval be present for the current patchset of the change (the
+  approval could be added by the event in question).  It follows the
+  same syntax as the :ref:`"approval" pipeline requirement
+  <pipeline-require-approval>`. For each specified criteria there must
+  exist a matching approval.
 
-*reject-approval*
-This takes a list of approvals in the same format as
-*require-approval* but will fail to enter the pipeline if there is
-a matching approval.
+**reject-approval**
+  This takes a list of approvals in the same format as
+  *require-approval* but will fail to enter the pipeline if there is a
+  matching approval.
 
 Reporter Configuration
 ----------------------
diff --git a/doc/source/admin/drivers/github.rst b/doc/source/admin/drivers/github.rst
index cc2a5c9..9740292 100644
--- a/doc/source/admin/drivers/github.rst
+++ b/doc/source/admin/drivers/github.rst
@@ -41,10 +41,21 @@
   Path to SSH key to use when cloning github repositories.
   ``sshkey=/home/zuul/.ssh/id_rsa``
 
-**git_host**
+**server**
   Optional: Hostname of the github install (such as a GitHub Enterprise)
   If not specified, defaults to ``github.com``
-  ``git_host=github.myenterprise.com``
+  ``server=github.myenterprise.com``
+
+**canonical_hostname**
+  The canonical hostname associated with the git repos on the GitHub
+  server.  Defaults to the value of **server**.  This is used to
+  identify projects from this connection by name and in preparing
+  repos on the filesystem for use by jobs.  Note that Zuul will still
+  only communicate with the GitHub server identified by **server**;
+  this option is useful if users customarily use a different hostname
+  to clone or pull git repos so that when Zuul places them in the
+  job's working directory, they appear under this directory name.
+  ``canonical_hostname=git.example.com``
 
 Trigger Configuration
 ---------------------
@@ -54,71 +65,75 @@
 following options.
 
 **event**
-The event from github. Supported events are ``pull_request``,
-``pull_request_review``,  and ``push``.
+  The event from github. Supported events are ``pull_request``,
+  ``pull_request_review``, and ``push``.
 
-A ``pull_request`` event will
-have associated action(s) to trigger from. The supported actions are:
+  A ``pull_request`` event will have associated action(s) to trigger
+  from. The supported actions are:
 
-  *opened* - pull request opened
+    *opened* - pull request opened
 
-  *changed* - pull request synchronized
+    *changed* - pull request synchronized
 
-  *closed* - pull request closed
+    *closed* - pull request closed
 
-  *reopened* - pull request reopened
+    *reopened* - pull request reopened
 
-  *comment* - comment added on pull request
+    *comment* - comment added on pull request
 
-  *labeled* - label added on pull request
+    *labeled* - label added on pull request
 
-  *unlabeled* - label removed from pull request
+    *unlabeled* - label removed from pull request
 
-  *status* - status set on commit
+    *status* - status set on commit
 
-A ``pull_request_review`` event will
-have associated action(s) to trigger from. The supported actions are:
+  A ``pull_request_review`` event will
+  have associated action(s) to trigger from. The supported actions are:
 
-  *submitted* - pull request review added
+    *submitted* - pull request review added
 
-  *dismissed* - pull request review removed
+    *dismissed* - pull request review removed
 
 **branch**
-The branch associated with the event. Example: ``master``.  This
-field is treated as a regular expression, and multiple branches may
-be listed. Used for ``pull_request`` and ``pull_request_review`` events.
+  The branch associated with the event. Example: ``master``.  This
+  field is treated as a regular expression, and multiple branches may
+  be listed. Used for ``pull_request`` and ``pull_request_review``
+  events.
 
 **comment**
-This is only used for ``pull_request`` ``comment`` actions.  It accepts a
-list of regexes that are searched for in the comment string. If any of these
-regexes matches a portion of the comment string the trigger is matched.
-``comment: retrigger`` will match when comments containing 'retrigger'
-somewhere in the comment text are added to a pull request.
+  This is only used for ``pull_request`` ``comment`` actions.  It
+  accepts a list of regexes that are searched for in the comment
+  string. If any of these regexes matches a portion of the comment
+  string the trigger is matched.  ``comment: retrigger`` will match
+  when comments containing 'retrigger' somewhere in the comment text
+  are added to a pull request.
 
 **label**
-This is only used for ``labeled`` and ``unlabeled`` ``pull_request`` actions.
-It accepts a list of strings each of which matches the label name in the
-event literally.  ``label: recheck`` will match a ``labeled`` action when
-pull request is labeled with a ``recheck`` label. ``label: 'do not test'``
-will match a ``unlabeled`` action when a label with name ``do not test`` is
-removed from the pull request.
+  This is only used for ``labeled`` and ``unlabeled`` ``pull_request``
+  actions.  It accepts a list of strings each of which matches the
+  label name in the event literally.  ``label: recheck`` will match a
+  ``labeled`` action when pull request is labeled with a ``recheck``
+  label. ``label: 'do not test'`` will match a ``unlabeled`` action
+  when a label with name ``do not test`` is removed from the pull
+  request.
 
 **state**
-This is only used for ``pull_request_review`` events.  It accepts a list of
-strings each of which is matched to the review state, which can be one of
-``approved``, ``comment``, or ``request_changes``.
+  This is only used for ``pull_request_review`` events.  It accepts a
+  list of strings each of which is matched to the review state, which
+  can be one of ``approved``, ``comment``, or ``request_changes``.
 
 **status**
-This is used for ``pull-request`` and ``status`` actions. It accepts a
-list of strings each of which matches the user setting the status, the
-status context, and the status itself in the format of
-``user:context:status``.  For example,
-``zuul_github_ci_bot:check_pipeline:success``.
+  This is used for ``pull-request`` and ``status`` actions. It accepts
+  a list of strings each of which matches the user setting the status,
+  the status context, and the status itself in the format of
+  ``user:context:status``.  For example,
+  ``zuul_github_ci_bot:check_pipeline:success``.
 
 **ref**
-This is only used for ``push`` events. This field is treated as a regular
-expression and multiple refs may be listed. GitHub always sends full ref
-name, eg. ``refs/tags/bar`` and this string is matched against the regexp.
+  This is only used for ``push`` events. This field is treated as a
+  regular expression and multiple refs may be listed. GitHub always
+  sends full ref name, eg. ``refs/tags/bar`` and this string is
+  matched against the regexp.
 
 Reporter Configuration
 ----------------------
@@ -131,32 +146,31 @@
 supplied to the reporter. It has the following options:
 
 **status**
-String value (``pending``, ``success``, ``failure``) that the reporter should
-set as the commit status on github.
-``status: 'success'``
+  String value (``pending``, ``success``, ``failure``) that the
+  reporter should set as the commit status on github.  ``status:
+  'success'``
 
 **status-url**
-String value for a link url to set in the github status. Defaults to the zuul
-server status_url, or the empty string if that is unset.
+  String value for a link url to set in the github status. Defaults to
+  the zuul server status_url, or the empty string if that is unset.
 
 **comment**
-Boolean value (``true`` or ``false``) that determines if the reporter should
-add a comment to the pipeline status to the github pull request. Defaults
-to ``true``. Only used for Pull Request based events.
-``comment: false``
+  Boolean value (``true`` or ``false``) that determines if the
+  reporter should add a comment to the pipeline status to the github
+  pull request. Defaults to ``true``. Only used for Pull Request based
+  events.  ``comment: false``
 
 **merge**
-Boolean value (``true`` or ``false``) that determines if the reporter should
-merge the pull reqeust. Defaults to ``false``. Only used for Pull Request based
-events.
-``merge=true``
+  Boolean value (``true`` or ``false``) that determines if the
+  reporter should merge the pull reqeust. Defaults to ``false``. Only
+  used for Pull Request based events.  ``merge=true``
 
 **label**
-List of strings each representing an exact label name which should be added
-to the pull request by reporter. Only used for Pull Request based events.
-``label: 'test successful'``
+  List of strings each representing an exact label name which should
+  be added to the pull request by reporter. Only used for Pull Request
+  based events.  ``label: 'test successful'``
 
 **unlabel**
-List of strings each representing an exact label name which should be removed
-from the pull request by reporter. Only used for Pull Request based events.
-``unlabel: 'test failed'``
+  List of strings each representing an exact label name which should
+  be removed from the pull request by reporter. Only used for Pull
+  Request based events.  ``unlabel: 'test failed'``
diff --git a/doc/source/admin/drivers/timer.rst b/doc/source/admin/drivers/timer.rst
index 574ee1e..c70df5c 100644
--- a/doc/source/admin/drivers/timer.rst
+++ b/doc/source/admin/drivers/timer.rst
@@ -19,6 +19,6 @@
 pipeline will run in response to that event.
 
 **time**
-The time specification in cron syntax.  Only the 5 part syntax is
-supported, not the symbolic names.  Example: ``0 0 * * *`` runs
-at midnight.
+  The time specification in cron syntax.  Only the 5 part syntax is
+  supported, not the symbolic names.  Example: ``0 0 * * *`` runs at
+  midnight.
diff --git a/doc/source/admin/drivers/zuul.rst b/doc/source/admin/drivers/zuul.rst
index a875457..a23c875 100644
--- a/doc/source/admin/drivers/zuul.rst
+++ b/doc/source/admin/drivers/zuul.rst
@@ -13,28 +13,28 @@
 can simply be used by listing **zuul** as the trigger.
 
 **event**
-The event name.  Currently supported:
+  The event name.  Currently supported:
 
-  *project-change-merged* when Zuul merges a change to a project,
-  it generates this event for every open change in the project.
+  *project-change-merged* when Zuul merges a change to a project, it
+  generates this event for every open change in the project.
 
   *parent-change-enqueued* when Zuul enqueues a change into any
   pipeline, it generates this event for every child of that
   change.
 
 **pipeline**
-Only available for ``parent-change-enqueued`` events.  This is the
-name of the pipeline in which the parent change was enqueued.
+  Only available for ``parent-change-enqueued`` events.  This is the
+  name of the pipeline in which the parent change was enqueued.
 
-*require-approval*
-This may be used for any event.  It requires that a certain kind
-of approval be present for the current patchset of the change (the
-approval could be added by the event in question).  It follows the
-same syntax as the :ref:`"approval" pipeline requirement
-<pipeline-require-approval>`. For each specified criteria there must
-exist a matching approval.
+**require-approval**
+  This may be used for any event.  It requires that a certain kind of
+  approval be present for the current patchset of the change (the
+  approval could be added by the event in question).  It follows the
+  same syntax as the :ref:`"approval" pipeline requirement
+  <pipeline-require-approval>`. For each specified criteria there must
+  exist a matching approval.
 
-*reject-approval*
-This takes a list of approvals in the same format as
-*require-approval* but will fail to enter the pipeline if there is
-a matching approval.
+**reject-approval**
+  This takes a list of approvals in the same format as
+  *require-approval* but will fail to enter the pipeline if there is a
+  matching approval.
diff --git a/doc/source/user/config.rst b/doc/source/user/config.rst
index e7226e9..0b2b5d4 100644
--- a/doc/source/user/config.rst
+++ b/doc/source/user/config.rst
@@ -99,7 +99,7 @@
 +1, or if at least one of them fails, a -1::
 
   - pipeline:
-    name: check
+      name: check
       manager: independent
       trigger:
         my_gerrit:
@@ -164,6 +164,17 @@
     For more detail on the theory and operation of Zuul's dependent
     pipeline manager, see: :doc:`gating`.
 
+**allow-secrets**
+  This is a boolean which can be used to prevent jobs which require
+  secrets from running in this pipeline.  Some pipelines run on
+  proposed changes and therefore execute code which has not yet been
+  reviewed.  In such a case, allowing a job to use a secret could
+  result in that secret being exposed.  The default is False, meaning
+  that in order to run jobs with secrets, this must be explicitly
+  enabled on each Pipeline where that is safe.
+
+  For more information, see :ref:`secret`.
+
 **description**
   This field may be used to provide a textual description of the
   pipeline.  It may appear in the status page or in documentation.
diff --git a/doc/source/user/jobs.rst b/doc/source/user/jobs.rst
index ad26327..5637552 100644
--- a/doc/source/user/jobs.rst
+++ b/doc/source/user/jobs.rst
@@ -48,6 +48,19 @@
 Do not use any git remotes; the local repositories are guaranteed to
 be up to date.
 
+The repositories will be placed on the filesystem in directories
+corresponding with the canonical hostname of their source connection.
+For example::
+
+  work/src/git.example.com/project1
+  work/src/github.com/project2
+
+Is the layout that would be present for a job which included project1
+from the connection associated to git.example.com and project2 from
+GitHub.  This helps avoid collisions between projects with the same
+name, and some language environments, such as Go, expect repositories
+in this format.
+
 Note that these git repositories are located on the executor; in order
 to be useful to most kinds of jobs, they will need to be present on
 the test nodes.  The ``base`` job in the standard library contains a
diff --git a/etc/zuul.conf-sample b/etc/zuul.conf-sample
index 2909ea6..9b8406c 100644
--- a/etc/zuul.conf-sample
+++ b/etc/zuul.conf-sample
@@ -4,6 +4,9 @@
 ;ssl_cert=/path/to/client.pem
 ;ssl_key=/path/to/client.key
 
+[zookeeper]
+hosts=127.0.0.1:2181
+
 [gearman_server]
 start=true
 ;ssl_ca=/path/to/ca.pem
@@ -11,12 +14,13 @@
 ;ssl_key=/path/to/server.key
 
 [zuul]
-layout_config=/etc/zuul/layout.yaml
+status_url=https://zuul.example.com/status
+
+[scheduler]
+tenant_config=/etc/zuul/main.yaml
 log_config=/etc/zuul/logging.conf
 pidfile=/var/run/zuul/zuul.pid
 state_dir=/var/lib/zuul
-status_url=https://jenkins.example.com/zuul/status
-zookeeper_hosts=127.0.0.1:2181
 
 [merger]
 git_dir=/var/lib/zuul/git
@@ -29,6 +33,10 @@
 trusted_ro_dirs=/opt/zuul-scripts:/var/cache
 trusted_rw_dirs=/opt/zuul-logs
 
+[web]
+listen_address=127.0.0.1
+port=9000
+
 [webapp]
 listen_address=0.0.0.0
 port=8001
diff --git a/requirements.txt b/requirements.txt
index 5caa1b5..69509d0 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -24,3 +24,5 @@
 cachecontrol
 pyjwt
 iso8601
+aiohttp
+uvloop;python_version>='3.5'
diff --git a/setup.cfg b/setup.cfg
index 0d22cb1..ce7a40e 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -26,6 +26,7 @@
     zuul-cloner = zuul.cmd.cloner:main
     zuul-executor = zuul.cmd.executor:main
     zuul-bwrap = zuul.driver.bubblewrap:main
+    zuul-web = zuul.cmd.web:main
 
 [build_sphinx]
 source-dir = doc/source
diff --git a/tests/base.py b/tests/base.py
index 696156f..2e4e9a5 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -554,7 +554,7 @@
 
     def __init__(self, github, number, project, branch,
                  subject, upstream_root, files=[], number_of_commits=1,
-                 writers=[], body=''):
+                 writers=[], body=None):
         """Creates a new PR with several commits.
         Sends an event about opened PR."""
         self.github = github
@@ -880,7 +880,7 @@
         self.reports = []
 
     def openFakePullRequest(self, project, branch, subject, files=[],
-                            body=''):
+                            body=None):
         self.pr_number += 1
         pull_request = FakeGithubPullRequest(
             self, self.pr_number, project, branch, subject, self.upstream_root,
@@ -922,7 +922,7 @@
             'http://localhost:%s/connection/%s/payload'
             % (port, self.connection_name),
             data=payload, headers=headers)
-        urllib.request.urlopen(req)
+        return urllib.request.urlopen(req)
 
     def getPull(self, project, number):
         pr = self.pull_requests[number - 1]
@@ -1048,10 +1048,14 @@
     def _getNeededByFromPR(self, change):
         prs = []
         pattern = re.compile(r"Depends-On.*https://%s/%s/pull/%s" %
-                             (self.git_host, change.project.name,
+                             (self.server, change.project.name,
                               change.number))
         for pr in self.pull_requests:
-            if pattern.search(pr.body):
+            if not pr.body:
+                body = ''
+            else:
+                body = pr.body
+            if pattern.search(body):
                 # Get our version of a pull so that it's a dict
                 pull = self.getPull(pr.project, pr.number)
                 prs.append(pull)
@@ -1262,7 +1266,6 @@
         self.build_history = []
         self.fail_tests = {}
         self.job_builds = {}
-        self.hostname = 'zl.example.com'
 
     def failJob(self, name, change):
         """Instruct the executor to report matching builds as failures.
@@ -1882,12 +1885,16 @@
         self.merger_src_root = os.path.join(self.test_root, "merger-git")
         self.executor_src_root = os.path.join(self.test_root, "executor-git")
         self.state_root = os.path.join(self.test_root, "lib")
+        self.merger_state_root = os.path.join(self.test_root, "merger-lib")
+        self.executor_state_root = os.path.join(self.test_root, "executor-lib")
 
         if os.path.exists(self.test_root):
             shutil.rmtree(self.test_root)
         os.makedirs(self.test_root)
         os.makedirs(self.upstream_root)
         os.makedirs(self.state_root)
+        os.makedirs(self.merger_state_root)
+        os.makedirs(self.executor_state_root)
 
         # Make per test copy of Configuration.
         self.setup_config()
@@ -1902,10 +1909,12 @@
                         os.path.join(
                             FIXTURE_DIR,
                             self.config.get('scheduler', 'tenant_config')))
-        self.config.set('zuul', 'state_dir', self.state_root)
+        self.config.set('scheduler', 'state_dir', self.state_root)
         self.config.set('merger', 'git_dir', self.merger_src_root)
+        self.config.set('merger', 'state_dir', self.merger_state_root)
         self.config.set('executor', 'git_dir', self.executor_src_root)
         self.config.set('executor', 'private_key_file', self.private_key_file)
+        self.config.set('executor', 'state_dir', self.executor_state_root)
 
         self.statsd = FakeStatsd()
         # note, use 127.0.0.1 rather than localhost to avoid getting ipv6
diff --git a/tests/fixtures/config/ansible/git/common-config/playbooks/post-broken.yaml b/tests/fixtures/config/ansible/git/common-config/playbooks/post-broken.yaml
new file mode 100644
index 0000000..cf61187
--- /dev/null
+++ b/tests/fixtures/config/ansible/git/common-config/playbooks/post-broken.yaml
@@ -0,0 +1,5 @@
+- hosts: all
+  tasks:
+    - shell: |+
+        echo "I am broken"
+        exit 1
diff --git a/tests/fixtures/config/ansible/git/common-config/zuul.yaml b/tests/fixtures/config/ansible/git/common-config/zuul.yaml
index fd3fc6d..aa57d08 100644
--- a/tests/fixtures/config/ansible/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/ansible/git/common-config/zuul.yaml
@@ -48,8 +48,13 @@
         Z3QSO1NjbBxWnaHKZYT7nkrJm8AMCgZU0ZArFLpaufKCeiK5ECSsDxic4FIsY1OkWT42qEUfL0Wd
         +150AKGNZpPJnnP3QYY4W/MWcKH/zdO400+zWN52WevbSqZy90tqKDJrBkMl1ydqbuw1E4ZHvIs=
 
+- job:
+    name: base-urls
+    success-url: https://success.example.com/zuul-logs/{build.uuid}/
+    failure-url: https://failure.example.com/zuul-logs/{build.uuid}/
 
 - job:
+    parent: base-urls
     name: python27
     pre-run: playbooks/pre
     post-run: playbooks/post
@@ -74,5 +79,11 @@
         label: ubuntu-xenial
 
 - job:
+    parent: base-urls
     name: hello
     post-run: playbooks/hello-post
+
+- job:
+    parent: python27
+    name: failpost
+    post-run: playbooks/post-broken
diff --git a/tests/fixtures/config/ansible/git/org_project/.zuul.yaml b/tests/fixtures/config/ansible/git/org_project/.zuul.yaml
index ca734c5..e87d988 100644
--- a/tests/fixtures/config/ansible/git/org_project/.zuul.yaml
+++ b/tests/fixtures/config/ansible/git/org_project/.zuul.yaml
@@ -15,3 +15,4 @@
         - check-vars
         - timeout
         - hello-world
+        - failpost
diff --git a/tests/fixtures/config/split-config/git/common-config/playbooks/project-test1.yaml b/tests/fixtures/config/split-config/git/common-config/playbooks/project-test1.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/split-config/git/common-config/playbooks/project-test1.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+  tasks: []
diff --git a/tests/fixtures/config/split-config/git/common-config/zuul.d/jobs.yaml b/tests/fixtures/config/split-config/git/common-config/zuul.d/jobs.yaml
new file mode 100644
index 0000000..280342c
--- /dev/null
+++ b/tests/fixtures/config/split-config/git/common-config/zuul.d/jobs.yaml
@@ -0,0 +1,2 @@
+- job:
+    name: project-test1
diff --git a/tests/fixtures/config/split-config/git/common-config/zuul.d/org-project.yaml b/tests/fixtures/config/split-config/git/common-config/zuul.d/org-project.yaml
new file mode 100644
index 0000000..872e126
--- /dev/null
+++ b/tests/fixtures/config/split-config/git/common-config/zuul.d/org-project.yaml
@@ -0,0 +1,5 @@
+- project:
+    name: org/project
+    check:
+      jobs:
+        - project-test1
diff --git a/tests/fixtures/config/split-config/git/common-config/zuul.d/pipelines.yaml b/tests/fixtures/config/split-config/git/common-config/zuul.d/pipelines.yaml
new file mode 100644
index 0000000..ba91fb5
--- /dev/null
+++ b/tests/fixtures/config/split-config/git/common-config/zuul.d/pipelines.yaml
@@ -0,0 +1,12 @@
+- pipeline:
+    name: check
+    manager: independent
+    trigger:
+      gerrit:
+        - event: patchset-created
+    success:
+      gerrit:
+        verified: 1
+    failure:
+      gerrit:
+        verified: -1
diff --git a/tests/fixtures/config/split-config/git/org_project/README b/tests/fixtures/config/split-config/git/org_project/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/split-config/git/org_project/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/split-config/git/org_project1/.zuul.d/gate.yaml b/tests/fixtures/config/split-config/git/org_project1/.zuul.d/gate.yaml
new file mode 100644
index 0000000..4bc0d81
--- /dev/null
+++ b/tests/fixtures/config/split-config/git/org_project1/.zuul.d/gate.yaml
@@ -0,0 +1,7 @@
+- project:
+    name: org/project1
+    check:
+      jobs:
+        - project-test1
+        - project1-project2-integration:
+            dependencies: project-test1
diff --git a/tests/fixtures/config/split-config/git/org_project1/.zuul.d/jobs.yaml b/tests/fixtures/config/split-config/git/org_project1/.zuul.d/jobs.yaml
new file mode 100644
index 0000000..33d74f3
--- /dev/null
+++ b/tests/fixtures/config/split-config/git/org_project1/.zuul.d/jobs.yaml
@@ -0,0 +1,2 @@
+- job:
+    name: project1-project2-integration
diff --git a/tests/fixtures/config/split-config/git/org_project1/README b/tests/fixtures/config/split-config/git/org_project1/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/split-config/git/org_project1/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/split-config/git/org_project1/playbooks/project1-project2-integration.yaml b/tests/fixtures/config/split-config/git/org_project1/playbooks/project1-project2-integration.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/split-config/git/org_project1/playbooks/project1-project2-integration.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+  tasks: []
diff --git a/tests/fixtures/config/split-config/main.yaml b/tests/fixtures/config/split-config/main.yaml
new file mode 100644
index 0000000..5f57245
--- /dev/null
+++ b/tests/fixtures/config/split-config/main.yaml
@@ -0,0 +1,9 @@
+- tenant:
+    name: tenant-one
+    source:
+      gerrit:
+        config-projects:
+          - common-config
+        untrusted-projects:
+          - org/project
+          - org/project1
diff --git a/tests/fixtures/config/templated-project/git/common-config/zuul.d/jobs.yaml b/tests/fixtures/config/templated-project/git/common-config/zuul.d/jobs.yaml
new file mode 100644
index 0000000..e051871
--- /dev/null
+++ b/tests/fixtures/config/templated-project/git/common-config/zuul.d/jobs.yaml
@@ -0,0 +1,17 @@
+- job:
+    name: project-test1
+
+- job:
+    name: project-test2
+
+- job:
+    name: layered-project-test3
+
+- job:
+    name: layered-project-test4
+
+- job:
+    name: layered-project-foo-test5
+
+- job:
+    name: project-test6
diff --git a/tests/fixtures/config/templated-project/git/common-config/zuul.d/pipelines.yaml b/tests/fixtures/config/templated-project/git/common-config/zuul.d/pipelines.yaml
new file mode 100644
index 0000000..4a19796
--- /dev/null
+++ b/tests/fixtures/config/templated-project/git/common-config/zuul.d/pipelines.yaml
@@ -0,0 +1,41 @@
+- pipeline:
+    name: check
+    manager: independent
+    trigger:
+      gerrit:
+        - event: patchset-created
+    success:
+      gerrit:
+        verified: 1
+    failure:
+      gerrit:
+        verified: -1
+
+- pipeline:
+    name: gate
+    manager: dependent
+    success-message: Build succeeded (gate).
+    trigger:
+      gerrit:
+        - event: comment-added
+          approval:
+            - approved: 1
+    success:
+      gerrit:
+        verified: 2
+        submit: true
+    failure:
+      gerrit:
+        verified: -2
+    start:
+      gerrit:
+        verified: 0
+    precedence: high
+
+- pipeline:
+    name: post
+    manager: independent
+    trigger:
+      gerrit:
+        - event: ref-updated
+          ref: ^(?!refs/).*$
diff --git a/tests/fixtures/config/templated-project/git/common-config/zuul.d/projects.yaml b/tests/fixtures/config/templated-project/git/common-config/zuul.d/projects.yaml
new file mode 100644
index 0000000..891c863
--- /dev/null
+++ b/tests/fixtures/config/templated-project/git/common-config/zuul.d/projects.yaml
@@ -0,0 +1,14 @@
+- project:
+    name: org/templated-project
+    templates:
+      - test-one-and-two
+
+- project:
+    name: org/layered-project
+    templates:
+      - test-one-and-two
+      - test-three-and-four
+      - test-five
+    check:
+      jobs:
+        - project-test6
diff --git a/tests/fixtures/config/templated-project/git/common-config/zuul.d/templates.yaml b/tests/fixtures/config/templated-project/git/common-config/zuul.d/templates.yaml
new file mode 100644
index 0000000..27d2f16
--- /dev/null
+++ b/tests/fixtures/config/templated-project/git/common-config/zuul.d/templates.yaml
@@ -0,0 +1,19 @@
+- project-template:
+    name: test-one-and-two
+    check:
+      jobs:
+        - project-test1
+        - project-test2
+
+- project-template:
+    name: test-three-and-four
+    check:
+      jobs:
+        - layered-project-test3
+        - layered-project-test4
+
+- project-template:
+    name: test-five
+    check:
+      jobs:
+        - layered-project-foo-test5
diff --git a/tests/fixtures/config/templated-project/git/common-config/zuul.yaml b/tests/fixtures/config/templated-project/git/common-config/zuul.yaml
deleted file mode 100644
index 251a3cd..0000000
--- a/tests/fixtures/config/templated-project/git/common-config/zuul.yaml
+++ /dev/null
@@ -1,94 +0,0 @@
-- pipeline:
-    name: check
-    manager: independent
-    trigger:
-      gerrit:
-        - event: patchset-created
-    success:
-      gerrit:
-        verified: 1
-    failure:
-      gerrit:
-        verified: -1
-
-- pipeline:
-    name: gate
-    manager: dependent
-    success-message: Build succeeded (gate).
-    trigger:
-      gerrit:
-        - event: comment-added
-          approval:
-            - approved: 1
-    success:
-      gerrit:
-        verified: 2
-        submit: true
-    failure:
-      gerrit:
-        verified: -2
-    start:
-      gerrit:
-        verified: 0
-    precedence: high
-
-- pipeline:
-    name: post
-    manager: independent
-    trigger:
-      gerrit:
-        - event: ref-updated
-          ref: ^(?!refs/).*$
-
-- project-template:
-    name: test-one-and-two
-    check:
-      jobs:
-        - project-test1
-        - project-test2
-
-- project-template:
-    name: test-three-and-four
-    check:
-      jobs:
-        - layered-project-test3
-        - layered-project-test4
-
-- project-template:
-    name: test-five
-    check:
-      jobs:
-        - layered-project-foo-test5
-
-- job:
-    name: project-test1
-
-- job:
-    name: project-test2
-
-- job:
-    name: layered-project-test3
-
-- job:
-    name: layered-project-test4
-
-- job:
-    name: layered-project-foo-test5
-
-- job:
-    name: project-test6
-
-- project:
-    name: org/templated-project
-    templates:
-      - test-one-and-two
-
-- project:
-    name: org/layered-project
-    templates:
-      - test-one-and-two
-      - test-three-and-four
-      - test-five
-    check:
-      jobs:
-        - project-test6
diff --git a/tests/fixtures/zuul-connections-merger.conf b/tests/fixtures/zuul-connections-merger.conf
index 4499493..df465d5 100644
--- a/tests/fixtures/zuul-connections-merger.conf
+++ b/tests/fixtures/zuul-connections-merger.conf
@@ -1,7 +1,7 @@
 [gearman]
 server=127.0.0.1
 
-[zuul]
+[webapp]
 status_url=http://zuul.example.com/status
 
 [merger]
diff --git a/tests/fixtures/zuul-github-driver.conf b/tests/fixtures/zuul-github-driver.conf
index dc28f98..3d61ab6 100644
--- a/tests/fixtures/zuul-github-driver.conf
+++ b/tests/fixtures/zuul-github-driver.conf
@@ -1,7 +1,7 @@
 [gearman]
 server=127.0.0.1
 
-[zuul]
+[webapp]
 status_url=http://zuul.example.com/status/#{change.number},{change.patchset}
 
 [merger]
@@ -23,4 +23,4 @@
 [connection github_ent]
 driver=github
 sshkey=/home/zuul/.ssh/id_rsa
-git_host=github.enterprise.io
+server=github.enterprise.io
diff --git a/tests/fixtures/zuul-push-reqs.conf b/tests/fixtures/zuul-push-reqs.conf
index c5272aa..4faac13 100644
--- a/tests/fixtures/zuul-push-reqs.conf
+++ b/tests/fixtures/zuul-push-reqs.conf
@@ -1,7 +1,7 @@
 [gearman]
 server=127.0.0.1
 
-[zuul]
+[webapp]
 status_url=http://zuul.example.com/status
 
 [merger]
diff --git a/tests/unit/test_configloader.py b/tests/unit/test_configloader.py
index fe2e371..573ccbf 100644
--- a/tests/unit/test_configloader.py
+++ b/tests/unit/test_configloader.py
@@ -12,6 +12,7 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+import textwrap
 
 from tests.base import ZuulTestCase
 
@@ -207,3 +208,40 @@
                         project2_config.pipelines['check'].job_list.jobs)
         self.assertTrue('project2-job' in
                         project2_config.pipelines['check'].job_list.jobs)
+
+
+class TestSplitConfig(ZuulTestCase):
+    tenant_config_file = 'config/split-config/main.yaml'
+
+    def setup_config(self):
+        super(TestSplitConfig, self).setup_config()
+
+    def test_split_config(self):
+        tenant = self.sched.abide.tenants.get('tenant-one')
+        self.assertIn('project-test1', tenant.layout.jobs)
+        project_config = tenant.layout.project_configs.get(
+            'review.example.com/org/project')
+        self.assertIn('project-test1',
+                      project_config.pipelines['check'].job_list.jobs)
+        project1_config = tenant.layout.project_configs.get(
+            'review.example.com/org/project1')
+        self.assertIn('project1-project2-integration',
+                      project1_config.pipelines['check'].job_list.jobs)
+
+    def test_dynamic_split_config(self):
+        in_repo_conf = textwrap.dedent(
+            """
+            - project:
+                name: org/project1
+                check:
+                  jobs:
+                    - project-test1
+            """)
+        file_dict = {'.zuul.d/gate.yaml': in_repo_conf}
+        A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A',
+                                           files=file_dict)
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+        # project1-project2-integration test removed, only want project-test1
+        self.assertHistory([
+            dict(name='project-test1', result='SUCCESS', changes='1,1')])
diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py
index fcfaf5d..8d9d127 100644
--- a/tests/unit/test_connection.py
+++ b/tests/unit/test_connection.py
@@ -121,7 +121,8 @@
         self.assertEqual('project-merge', buildset0_builds[0]['job_name'])
         self.assertEqual("SUCCESS", buildset0_builds[0]['result'])
         self.assertEqual(
-            'finger://zl.example.com/{uuid}'.format(
+            'finger://{hostname}/{uuid}'.format(
+                hostname=self.executor_server.hostname,
                 uuid=buildset0_builds[0]['uuid']),
             buildset0_builds[0]['log_url'])
         self.assertEqual('check', buildset1['pipeline'])
@@ -144,7 +145,8 @@
         self.assertEqual('project-test1', buildset1_builds[-2]['job_name'])
         self.assertEqual("FAILURE", buildset1_builds[-2]['result'])
         self.assertEqual(
-            'finger://zl.example.com/{uuid}'.format(
+            'finger://{hostname}/{uuid}'.format(
+                hostname=self.executor_server.hostname,
                 uuid=buildset1_builds[-2]['uuid']),
             buildset1_builds[-2]['log_url'])
 
diff --git a/tests/unit/test_executor.py b/tests/unit/test_executor.py
old mode 100644
new mode 100755
index 39b6070..7b76802
--- a/tests/unit/test_executor.py
+++ b/tests/unit/test_executor.py
@@ -18,6 +18,9 @@
 import logging
 import time
 
+import zuul.executor.server
+import zuul.model
+
 from tests.base import ZuulTestCase, simple_layout
 
 
@@ -305,3 +308,27 @@
         ]
 
         self.assertBuildStates(states, projects)
+
+
+class TestAnsibleJob(ZuulTestCase):
+    tenant_config_file = 'config/ansible/main.yaml'
+
+    def setUp(self):
+        super(TestAnsibleJob, self).setUp()
+        job = zuul.model.Job('test')
+        job.unique = 'test'
+        self.test_job = zuul.executor.server.AnsibleJob(self.executor_server,
+                                                        job)
+
+    def test_getHostList_host_keys(self):
+        # Test without ssh_port set
+        node = {'name': 'fake-host',
+                'host_keys': ['fake-host-key'],
+                'interface_ip': 'localhost'}
+        keys = self.test_job.getHostList({'nodes': [node]})[0]['host_keys']
+        self.assertEqual(keys[0], 'localhost fake-host-key')
+
+        # Test with custom ssh_port set
+        node['ssh_port'] = 22022
+        keys = self.test_job.getHostList({'nodes': [node]})[0]['host_keys']
+        self.assertEqual(keys[0], '[localhost]:22022 fake-host-key')
diff --git a/tests/unit/test_github_driver.py b/tests/unit/test_github_driver.py
index a19073c..f360866 100644
--- a/tests/unit/test_github_driver.py
+++ b/tests/unit/test_github_driver.py
@@ -14,6 +14,7 @@
 
 import re
 from testtools.matchers import MatchesRegex, StartsWith
+import urllib
 import time
 
 from tests.base import ZuulTestCase, simple_layout, random_sha1
@@ -584,3 +585,18 @@
         new = self.sched.tenant_last_reconfigured.get('tenant-one', 0)
         # New timestamp should be greater than the old timestamp
         self.assertLess(old, new)
+
+    @simple_layout('layouts/basic-github.yaml', driver='github')
+    def test_ping_event(self):
+        # Test valid ping
+        pevent = {'repository': {'full_name': 'org/project'}}
+        req = self.fake_github.emitEvent(('ping', pevent))
+        self.assertEqual(req.status, 200, "Ping event didn't succeed")
+
+        # Test invalid ping
+        pevent = {'repository': {'full_name': 'unknown-project'}}
+        self.assertRaises(
+            urllib.error.HTTPError,
+            self.fake_github.emitEvent,
+            ('ping', pevent),
+        )
diff --git a/tests/unit/test_github_requirements.py b/tests/unit/test_github_requirements.py
index 135f7ab..f125d1e 100644
--- a/tests/unit/test_github_requirements.py
+++ b/tests/unit/test_github_requirements.py
@@ -240,13 +240,10 @@
 
         # The first negative review from derp should not cause it to be
         # enqueued
-        for i in range(1, 4):
-            submitted_at = time.time() - 72 * 60 * 60
-            A.addReview('derp', 'CHANGES_REQUESTED',
-                        submitted_at)
-            self.fake_github.emitEvent(comment)
-            self.waitUntilSettled()
-            self.assertEqual(len(self.history), 0)
+        A.addReview('derp', 'CHANGES_REQUESTED')
+        self.fake_github.emitEvent(comment)
+        self.waitUntilSettled()
+        self.assertEqual(len(self.history), 0)
 
         # A positive review from derp should cause it to be enqueued
         A.addReview('derp', 'APPROVED')
@@ -256,6 +253,37 @@
         self.assertEqual(self.history[0].name, 'project5-reviewuserstate')
 
     @simple_layout('layouts/requirements-github.yaml', driver='github')
+    def test_pipeline_require_review_comment_masked(self):
+        "Test pipeline requirement: review comments on top of votes"
+
+        A = self.fake_github.openFakePullRequest('org/project5', 'master', 'A')
+        # Add derp to writers
+        A.writers.append('derp')
+        # A comment event that we will keep submitting to trigger
+        comment = A.getCommentAddedEvent('test me')
+        self.fake_github.emitEvent(comment)
+        self.waitUntilSettled()
+        # No positive review from derp so should not be enqueued
+        self.assertEqual(len(self.history), 0)
+
+        # The first negative review from derp should not cause it to be
+        # enqueued
+        A.addReview('derp', 'CHANGES_REQUESTED')
+        self.fake_github.emitEvent(comment)
+        self.waitUntilSettled()
+        self.assertEqual(len(self.history), 0)
+
+        # A positive review is required, so provide it
+        A.addReview('derp', 'APPROVED')
+
+        # Add a comment review on top to make sure we can still enqueue
+        A.addReview('derp', 'COMMENTED')
+        self.fake_github.emitEvent(comment)
+        self.waitUntilSettled()
+        self.assertEqual(len(self.history), 1)
+        self.assertEqual(self.history[0].name, 'project5-reviewuserstate')
+
+    @simple_layout('layouts/requirements-github.yaml', driver='github')
     def test_require_review_newer_than(self):
 
         A = self.fake_github.openFakePullRequest('org/project6', 'master', 'A')
diff --git a/tests/unit/test_log_streamer.py b/tests/unit/test_log_streamer.py
index b0ef2c2..f47a8c8 100644
--- a/tests/unit/test_log_streamer.py
+++ b/tests/unit/test_log_streamer.py
@@ -14,6 +14,10 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+import aiohttp
+import asyncio
+import logging
+import json
 import os
 import os.path
 import socket
@@ -21,6 +25,7 @@
 import threading
 import time
 
+import zuul.web
 import zuul.lib.log_streamer
 import tests.base
 
@@ -57,6 +62,7 @@
 class TestStreaming(tests.base.AnsibleZuulTestCase):
 
     tenant_config_file = 'config/streamer/main.yaml'
+    log = logging.getLogger("zuul.test.test_log_streamer.TestStreaming")
 
     def setUp(self):
         super(TestStreaming, self).setUp()
@@ -146,9 +152,116 @@
         # job and deleted. However, we still have a file handle to it, so we
         # can make sure that we read the entire contents at this point.
         # Compact the returned lines into a single string for easy comparison.
-        file_contents = ''.join(logfile.readlines())
+        file_contents = logfile.read()
         logfile.close()
 
         self.log.debug("\n\nFile contents: %s\n\n", file_contents)
         self.log.debug("\n\nStreamed: %s\n\n", self.streaming_data)
         self.assertEqual(file_contents, self.streaming_data)
+
+    def runWSClient(self, build_uuid, event):
+        async def client(loop, build_uuid, event):
+            uri = 'http://127.0.0.1:9000/console-stream'
+            try:
+                session = aiohttp.ClientSession(loop=loop)
+                async with session.ws_connect(uri) as ws:
+                    req = {'uuid': build_uuid, 'logfile': None}
+                    ws.send_str(json.dumps(req))
+                    event.set()  # notify we are connected and req sent
+                    async for msg in ws:
+                        if msg.type == aiohttp.WSMsgType.TEXT:
+                            self.ws_client_results += msg.data
+                        elif msg.type == aiohttp.WSMsgType.CLOSED:
+                            break
+                        elif msg.type == aiohttp.WSMsgType.ERROR:
+                            break
+                session.close()
+            except Exception as e:
+                self.log.exception("client exception:")
+
+        loop = asyncio.new_event_loop()
+        loop.set_debug(True)
+        loop.run_until_complete(client(loop, build_uuid, event))
+        loop.close()
+
+    def test_websocket_streaming(self):
+        # Need to set the streaming port before submitting the job
+        finger_port = 7902
+        self.executor_server.log_streaming_port = finger_port
+
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+
+        # We don't have any real synchronization for the ansible jobs, so
+        # just wait until we get our running build.
+        while not len(self.builds):
+            time.sleep(0.1)
+        build = self.builds[0]
+        self.assertEqual(build.name, 'python27')
+
+        build_dir = os.path.join(self.executor_server.jobdir_root, build.uuid)
+        while not os.path.exists(build_dir):
+            time.sleep(0.1)
+
+        # Need to wait to make sure that jobdir gets set
+        while build.jobdir is None:
+            time.sleep(0.1)
+            build = self.builds[0]
+
+        # Wait for the job to begin running and create the ansible log file.
+        # The job waits to complete until the flag file exists, so we can
+        # safely access the log here. We only open it (to force a file handle
+        # to be kept open for it after the job finishes) but wait to read the
+        # contents until the job is done.
+        ansible_log = os.path.join(build.jobdir.log_root, 'job-output.txt')
+        while not os.path.exists(ansible_log):
+            time.sleep(0.1)
+        logfile = open(ansible_log, 'r')
+        self.addCleanup(logfile.close)
+
+        # Start the finger streamer daemon
+        streamer = zuul.lib.log_streamer.LogStreamer(
+            None, self.host, finger_port, self.executor_server.jobdir_root)
+        self.addCleanup(streamer.stop)
+
+        # Start the web server
+        web_server = zuul.web.ZuulWeb(
+            listen_address='127.0.0.1', listen_port=9000,
+            gear_server='127.0.0.1', gear_port=self.gearman_server.port)
+        loop = asyncio.new_event_loop()
+        loop.set_debug(True)
+        ws_thread = threading.Thread(target=web_server.run, args=(loop,))
+        ws_thread.start()
+        self.addCleanup(loop.close)
+        self.addCleanup(ws_thread.join)
+        self.addCleanup(web_server.stop)
+
+        # Wait until web server is started
+        with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+            while s.connect_ex((self.host, 9000)):
+                time.sleep(0.1)
+
+        # Start a thread with the websocket client
+        ws_client_event = threading.Event()
+        self.ws_client_results = ''
+        ws_client_thread = threading.Thread(
+            target=self.runWSClient, args=(build.uuid, ws_client_event)
+        )
+        ws_client_thread.start()
+        ws_client_event.wait()
+
+        # Allow the job to complete
+        flag_file = os.path.join(build_dir, 'test_wait')
+        open(flag_file, 'w').close()
+
+        # Wait for the websocket client to complete, which it should when
+        # it's received the full log.
+        ws_client_thread.join()
+
+        self.waitUntilSettled()
+
+        file_contents = logfile.read()
+        logfile.close()
+        self.log.debug("\n\nFile contents: %s\n\n", file_contents)
+        self.log.debug("\n\nStreamed: %s\n\n", self.ws_client_results)
+        self.assertEqual(file_contents, self.ws_client_results)
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index e9eee54..1d24585 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -2289,22 +2289,40 @@
                             status_jobs.append(job)
         self.assertEqual('project-merge', status_jobs[0]['name'])
         # TODO(mordred) pull uuids from self.builds
-        self.assertEqual('finger://zl.example.com/%s' % status_jobs[0]['uuid'],
-                         status_jobs[0]['url'])
+        self.assertEqual(
+            'finger://{hostname}/{uuid}'.format(
+                hostname=self.executor_server.hostname,
+                uuid=status_jobs[0]['uuid']),
+            status_jobs[0]['url'])
         # TOOD(mordred) configure a success-url on the base job
-        self.assertEqual('finger://zl.example.com/%s' % status_jobs[0]['uuid'],
-                         status_jobs[0]['report_url'])
+        self.assertEqual(
+            'finger://{hostname}/{uuid}'.format(
+                hostname=self.executor_server.hostname,
+                uuid=status_jobs[0]['uuid']),
+            status_jobs[0]['report_url'])
         self.assertEqual('project-test1', status_jobs[1]['name'])
-        self.assertEqual('finger://zl.example.com/%s' % status_jobs[1]['uuid'],
-                         status_jobs[1]['url'])
-        self.assertEqual('finger://zl.example.com/%s' % status_jobs[1]['uuid'],
-                         status_jobs[1]['report_url'])
+        self.assertEqual(
+            'finger://{hostname}/{uuid}'.format(
+                hostname=self.executor_server.hostname,
+                uuid=status_jobs[1]['uuid']),
+            status_jobs[1]['url'])
+        self.assertEqual(
+            'finger://{hostname}/{uuid}'.format(
+                hostname=self.executor_server.hostname,
+                uuid=status_jobs[1]['uuid']),
+            status_jobs[1]['report_url'])
 
         self.assertEqual('project-test2', status_jobs[2]['name'])
-        self.assertEqual('finger://zl.example.com/%s' % status_jobs[2]['uuid'],
-                         status_jobs[2]['url'])
-        self.assertEqual('finger://zl.example.com/%s' % status_jobs[2]['uuid'],
-                         status_jobs[2]['report_url'])
+        self.assertEqual(
+            'finger://{hostname}/{uuid}'.format(
+                hostname=self.executor_server.hostname,
+                uuid=status_jobs[2]['uuid']),
+            status_jobs[2]['url'])
+        self.assertEqual(
+            'finger://{hostname}/{uuid}'.format(
+                hostname=self.executor_server.hostname,
+                uuid=status_jobs[2]['uuid']),
+            status_jobs[2]['report_url'])
 
     def test_live_reconfiguration(self):
         "Test that live reconfiguration works"
@@ -3577,8 +3595,11 @@
                 self.assertEqual('project-merge', job['name'])
                 self.assertEqual('gate', job['pipeline'])
                 self.assertEqual(False, job['retry'])
-                self.assertEqual('finger://zl.example.com/%s' % job['uuid'],
-                                 job['url'])
+                self.assertEqual(
+                    'finger://{hostname}/{uuid}'.format(
+                        hostname=self.executor_server.hostname,
+                        uuid=job['uuid']),
+                    job['url'])
                 self.assertEqual(2, len(job['worker']))
                 self.assertEqual(False, job['canceled'])
                 self.assertEqual(True, job['voting'])
@@ -4674,7 +4695,8 @@
 
         # NOTE: This default URL is currently hard-coded in executor/server.py
         self.assertIn(
-            '- docs-draft-test2 finger://zl.example.com/{uuid}'.format(
+            '- docs-draft-test2 finger://{hostname}/{uuid}'.format(
+                hostname=self.executor_server.hostname,
                 uuid=uuid_test2),
             body[3])
 
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index f765a53..112f48c 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -496,39 +496,52 @@
         A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
         self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
         self.waitUntilSettled()
-        build = self.getJobFromHistory('timeout')
-        self.assertEqual(build.result, 'TIMED_OUT')
-        build = self.getJobFromHistory('faillocal')
-        self.assertEqual(build.result, 'FAILURE')
-        build = self.getJobFromHistory('check-vars')
-        self.assertEqual(build.result, 'SUCCESS')
-        build = self.getJobFromHistory('hello-world')
-        self.assertEqual(build.result, 'SUCCESS')
-        build = self.getJobFromHistory('python27')
-        self.assertEqual(build.result, 'SUCCESS')
-        flag_path = os.path.join(self.test_root, build.uuid + '.flag')
+        build_timeout = self.getJobFromHistory('timeout')
+        self.assertEqual(build_timeout.result, 'TIMED_OUT')
+        build_faillocal = self.getJobFromHistory('faillocal')
+        self.assertEqual(build_faillocal.result, 'FAILURE')
+        build_failpost = self.getJobFromHistory('failpost')
+        self.assertEqual(build_failpost.result, 'POST_FAILURE')
+        build_check_vars = self.getJobFromHistory('check-vars')
+        self.assertEqual(build_check_vars.result, 'SUCCESS')
+        build_hello = self.getJobFromHistory('hello-world')
+        self.assertEqual(build_hello.result, 'SUCCESS')
+        build_python27 = self.getJobFromHistory('python27')
+        self.assertEqual(build_python27.result, 'SUCCESS')
+        flag_path = os.path.join(self.test_root, build_python27.uuid + '.flag')
         self.assertTrue(os.path.exists(flag_path))
-        copied_path = os.path.join(self.test_root, build.uuid +
+        copied_path = os.path.join(self.test_root, build_python27.uuid +
                                    '.copied')
         self.assertTrue(os.path.exists(copied_path))
-        failed_path = os.path.join(self.test_root, build.uuid +
+        failed_path = os.path.join(self.test_root, build_python27.uuid +
                                    '.failed')
         self.assertFalse(os.path.exists(failed_path))
-        pre_flag_path = os.path.join(self.test_root, build.uuid +
+        pre_flag_path = os.path.join(self.test_root, build_python27.uuid +
                                      '.pre.flag')
         self.assertTrue(os.path.exists(pre_flag_path))
-        post_flag_path = os.path.join(self.test_root, build.uuid +
+        post_flag_path = os.path.join(self.test_root, build_python27.uuid +
                                       '.post.flag')
         self.assertTrue(os.path.exists(post_flag_path))
         bare_role_flag_path = os.path.join(self.test_root,
-                                           build.uuid + '.bare-role.flag')
+                                           build_python27.uuid +
+                                           '.bare-role.flag')
         self.assertTrue(os.path.exists(bare_role_flag_path))
 
         secrets_path = os.path.join(self.test_root,
-                                    build.uuid + '.secrets')
+                                    build_python27.uuid + '.secrets')
         with open(secrets_path) as f:
             self.assertEqual(f.read(), "test-username test-password")
 
+        msg = A.messages[0]
+        success = "{} https://success.example.com/zuul-logs/{}"
+        fail = "{} https://failure.example.com/zuul-logs/{}"
+        self.assertIn(success.format("python27", build_python27.uuid), msg)
+        self.assertIn(fail.format("faillocal", build_faillocal.uuid), msg)
+        self.assertIn(success.format("check-vars", build_check_vars.uuid), msg)
+        self.assertIn(success.format("hello-world", build_hello.uuid), msg)
+        self.assertIn(fail.format("timeout", build_timeout.uuid), msg)
+        self.assertIn(fail.format("failpost", build_failpost.uuid), msg)
+
 
 class TestPrePlaybooks(AnsibleZuulTestCase):
     # A temporary class to hold new tests while others are disabled
diff --git a/tools/encrypt_secret.py b/tools/encrypt_secret.py
old mode 100644
new mode 100755
index e36b24e..72429e9
--- a/tools/encrypt_secret.py
+++ b/tools/encrypt_secret.py
@@ -13,11 +13,19 @@
 # under the License.
 
 import argparse
+import base64
 import os
 import subprocess
 import sys
 import tempfile
-import urllib
+
+# we to import Request and urlopen differently for python 2 and 3
+try:
+    from urllib.request import Request
+    from urllib.request import urlopen
+except ImportError:
+    from urllib2 import Request
+    from urllib2 import urlopen
 
 DESCRIPTION = """Encrypt a secret for Zuul.
 
@@ -50,9 +58,9 @@
                         "to standard output.")
     args = parser.parse_args()
 
-    req = urllib.request.Request("%s/keys/%s/%s.pub" % (
+    req = Request("%s/keys/%s/%s.pub" % (
         args.url, args.source, args.project))
-    pubkey = urllib.request.urlopen(req)
+    pubkey = urlopen(req)
 
     if args.infile:
         with open(args.infile) as f:
@@ -70,18 +78,18 @@
                               pubkey_file.name],
                              stdin=subprocess.PIPE,
                              stdout=subprocess.PIPE)
-        (stdout, stderr) = p.communicate(plaintext)
+        (stdout, stderr) = p.communicate(plaintext.encode("utf-8"))
         if p.returncode != 0:
             raise Exception("Return code %s from openssl" % p.returncode)
-        ciphertext = stdout.encode('base64')
+        ciphertext = base64.b64encode(stdout)
     finally:
         os.unlink(pubkey_file.name)
 
     if args.outfile:
-        with open(args.outfile, "w") as f:
+        with open(args.outfile, "wb") as f:
             f.write(ciphertext)
     else:
-        print(ciphertext)
+        print(ciphertext.decode("utf-8"))
 
 
 if __name__ == '__main__':
diff --git a/zuul/ansible/action/normal.py b/zuul/ansible/action/normal.py
index ece1411..74e732e 100644
--- a/zuul/ansible/action/normal.py
+++ b/zuul/ansible/action/normal.py
@@ -29,6 +29,12 @@
                     and self._task.delegate_to.startswtih('127.'))):
             if self._task.action == 'stat':
                 paths._fail_if_unsafe(self._task.args['path'])
+            elif self._task.action == 'file':
+                dest = self._task.args.get(
+                    'path', self._task.args.get(
+                        'dest', self._task.args.get(
+                            'name')))
+                paths._fail_if_unsafe(dest)
             else:
                 return dict(
                     failed=True,
diff --git a/zuul/ansible/callback/zuul_stream.py b/zuul/ansible/callback/zuul_stream.py
index c6cc7ab..5c2ce8a 100644
--- a/zuul/ansible/callback/zuul_stream.py
+++ b/zuul/ansible/callback/zuul_stream.py
@@ -119,7 +119,7 @@
 
     def _read_log(self, host, ip, log_id, task_name, hosts):
         self._log("[%s] Starting to log %s for task %s"
-                  % (host, log_id, task_name), executor=True)
+                  % (host, log_id, task_name), job=False, executor=True)
         s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         while True:
             try:
@@ -204,7 +204,8 @@
         else:
             task_host = result._host.get_name()
             task_hostvars = result._task._variable_manager._hostvars[task_host]
-            if task_hostvars['ansible_host'] in localhost_names:
+            if task_hostvars.get('ansible_host', task_hostvars.get(
+                    'ansible_inventory_host')) in localhost_names:
                 is_localhost = True
 
         if not is_localhost and is_task:
@@ -251,6 +252,13 @@
         result_dict = dict(result._result)
 
         self._clean_results(result_dict, result._task.action)
+        if '_zuul_nolog_return' in result_dict:
+            # We have a custom zuul module that doesn't want the parameters
+            # from its returned splatted to stdout. This is typically for
+            # modules that are collecting data to be displayed some other way.
+            for key in result_dict.keys():
+                if key != 'changed':
+                    result_dict.pop(key)
 
         if result_dict.get('changed', False):
             status = 'changed'
diff --git a/zuul/ansible/library/command.py b/zuul/ansible/library/command.py
index 00020c7..d27c83e 100644
--- a/zuul/ansible/library/command.py
+++ b/zuul/ansible/library/command.py
@@ -356,6 +356,10 @@
     if umask:
         old_umask = os.umask(umask)
 
+    t = None
+    fail_json_kwargs = None
+    ret = None
+
     try:
         if self._debug:
             self.log('Executing: ' + clean_args)
@@ -394,11 +398,27 @@
     except (OSError, IOError):
         e = get_exception()
         self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(e)))
-        self.fail_json(rc=e.errno, msg=to_native(e), cmd=clean_args)
+        fail_json_kwargs=dict(rc=e.errno, msg=str(e), cmd=clean_args)
     except Exception:
         e = get_exception()
         self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(traceback.format_exc())))
-        self.fail_json(rc=257, msg=to_native(e), exception=traceback.format_exc(), cmd=clean_args)
+        fail_json_kwargs = dict(rc=257, msg=str(e), exception=traceback.format_exc(), cmd=clean_args)
+    finally:
+        if t:
+            with Console(zuul_log_id) as console:
+                if t.isAlive():
+                    console.addLine("[Zuul] standard output/error still open "
+                                    "after child exited")
+                if not ret and fail_json_kwargs:
+                    ret = fail_json_kwargs['rc']
+                elif not ret and not fail_json_kwargs:
+                    ret = -1
+                console.addLine("[Zuul] Task exit code: %s\n" % ret)
+                if ret == -1 and not fail_json_kwargs:
+                    self.fail_json(rc=ret, msg="Something went horribly wrong during task execution")
+
+        if fail_json_kwargs:
+            self.fail_json(**fail_json_kwargs)
 
     # Restore env settings
     for key, val in old_env_vals.items():
diff --git a/zuul/cmd/executor.py b/zuul/cmd/executor.py
index 44a7d3f..6a1a214 100755
--- a/zuul/cmd/executor.py
+++ b/zuul/cmd/executor.py
@@ -61,7 +61,7 @@
         self.args = parser.parse_args()
 
     def send_command(self, cmd):
-        state_dir = get_default(self.config, 'zuul', 'state_dir',
+        state_dir = get_default(self.config, 'executor', 'state_dir',
                                 '/var/lib/zuul', expand_user=True)
         path = os.path.join(state_dir, 'executor.socket')
         s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
@@ -111,9 +111,9 @@
 
         self.user = get_default(self.config, 'executor', 'user', 'zuul')
 
-        if self.config.has_option('zuul', 'jobroot_dir'):
+        if self.config.has_option('executor', 'jobroot_dir'):
             self.jobroot_dir = os.path.expanduser(
-                self.config.get('zuul', 'jobroot_dir'))
+                self.config.get('executor', 'jobroot_dir'))
             if not os.path.isdir(self.jobroot_dir):
                 print("Invalid jobroot_dir: {jobroot_dir}".format(
                     jobroot_dir=self.jobroot_dir))
diff --git a/zuul/cmd/merger.py b/zuul/cmd/merger.py
index 97f208c..c5cfd6c 100755
--- a/zuul/cmd/merger.py
+++ b/zuul/cmd/merger.py
@@ -80,7 +80,7 @@
     server.read_config()
     server.configure_connections(source_only=True)
 
-    state_dir = get_default(server.config, 'zuul', 'state_dir',
+    state_dir = get_default(server.config, 'merger', 'state_dir',
                             '/var/lib/zuul', expand_user=True)
     test_fn = os.path.join(state_dir, 'test')
     try:
diff --git a/zuul/cmd/scheduler.py b/zuul/cmd/scheduler.py
index e89480b..b7b12fe 100755
--- a/zuul/cmd/scheduler.py
+++ b/zuul/cmd/scheduler.py
@@ -59,7 +59,7 @@
         signal.signal(signal.SIGHUP, signal.SIG_IGN)
         self.log.debug("Reconfiguration triggered")
         self.read_config()
-        self.setup_logging('zuul', 'log_config')
+        self.setup_logging('scheduler', 'log_config')
         try:
             self.sched.reconfigure(self.config)
         except Exception:
@@ -140,7 +140,7 @@
             self.config.getboolean('gearman_server', 'start')):
             self.start_gear_server()
 
-        self.setup_logging('zuul', 'log_config')
+        self.setup_logging('scheduler', 'log_config')
         self.log = logging.getLogger("zuul.Scheduler")
 
         self.sched = zuul.scheduler.Scheduler(self.config)
@@ -150,8 +150,8 @@
         nodepool = zuul.nodepool.Nodepool(self.sched)
 
         zookeeper = zuul.zk.ZooKeeper()
-        zookeeper_hosts = get_default(self.config, 'zuul', 'zookeeper_hosts',
-                                      '127.0.0.1:2181')
+        zookeeper_hosts = get_default(self.config, 'zookeeper',
+                                      'hosts', '127.0.0.1:2181')
 
         zookeeper.connect(zookeeper_hosts)
 
@@ -207,7 +207,7 @@
     if scheduler.args.validate:
         sys.exit(scheduler.test_config())
 
-    pid_fn = get_default(scheduler.config, 'zuul', 'pidfile',
+    pid_fn = get_default(scheduler.config, 'scheduler', 'pidfile',
                          '/var/run/zuul-scheduler/zuul-scheduler.pid',
                          expand_user=True)
     pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
diff --git a/zuul/cmd/web.py b/zuul/cmd/web.py
new file mode 100755
index 0000000..9869a2c
--- /dev/null
+++ b/zuul/cmd/web.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import argparse
+import asyncio
+import daemon
+import extras
+import logging
+import signal
+import sys
+import threading
+
+import zuul.cmd
+import zuul.web
+
+from zuul.lib.config import get_default
+
+# as of python-daemon 1.6 it doesn't bundle pidlockfile anymore
+# instead it depends on lockfile-0.9.1 which uses pidfile.
+pid_file_module = extras.try_imports(['daemon.pidlockfile', 'daemon.pidfile'])
+
+
+class WebServer(zuul.cmd.ZuulApp):
+
+    def parse_arguments(self):
+        parser = argparse.ArgumentParser(description='Zuul Web Server.')
+        parser.add_argument('-c', dest='config',
+                            help='specify the config file')
+        parser.add_argument('-d', dest='nodaemon', action='store_true',
+                            help='do not run as a daemon')
+        parser.add_argument('--version', dest='version', action='version',
+                            version=self._get_version(),
+                            help='show zuul version')
+        self.args = parser.parse_args()
+
+    def exit_handler(self, signum, frame):
+        self.web.stop()
+
+    def _main(self):
+        params = dict()
+
+        params['listen_address'] = get_default(self.config,
+                                               'web', 'listen_address',
+                                               '127.0.0.1')
+        params['listen_port'] = get_default(self.config, 'web', 'port', 9000)
+        params['gear_server'] = get_default(self.config, 'gearman', 'server')
+        params['gear_port'] = get_default(self.config, 'gearman', 'port', 4730)
+        params['ssl_key'] = get_default(self.config, 'gearman', 'ssl_key')
+        params['ssl_cert'] = get_default(self.config, 'gearman', 'ssl_cert')
+        params['ssl_ca'] = get_default(self.config, 'gearman', 'ssl_ca')
+
+        try:
+            self.web = zuul.web.ZuulWeb(**params)
+        except Exception as e:
+            self.log.exception("Error creating ZuulWeb:")
+            sys.exit(1)
+
+        loop = asyncio.get_event_loop()
+        signal.signal(signal.SIGUSR1, self.exit_handler)
+        signal.signal(signal.SIGTERM, self.exit_handler)
+
+        self.log.info('Zuul Web Server starting')
+        self.thread = threading.Thread(target=self.web.run,
+                                       args=(loop,),
+                                       name='web')
+        self.thread.start()
+
+        try:
+            signal.pause()
+        except KeyboardInterrupt:
+            print("Ctrl + C: asking web server to exit nicely...\n")
+            self.exit_handler(signal.SIGINT, None)
+
+        self.thread.join()
+        loop.stop()
+        loop.close()
+        self.log.info("Zuul Web Server stopped")
+
+    def main(self):
+        self.setup_logging('web', 'log_config')
+        self.log = logging.getLogger("zuul.WebServer")
+
+        try:
+            self._main()
+        except Exception:
+            self.log.exception("Exception from WebServer:")
+
+
+def main():
+    server = WebServer()
+    server.parse_arguments()
+    server.read_config()
+
+    pid_fn = get_default(server.config, 'web', 'pidfile',
+                         '/var/run/zuul-web/zuul-web.pid', expand_user=True)
+
+    pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
+
+    if server.args.nodaemon:
+        server.main()
+    else:
+        with daemon.DaemonContext(pidfile=pid):
+            server.main()
diff --git a/zuul/configloader.py b/zuul/configloader.py
index 256a859..735fe38 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -1125,7 +1125,8 @@
             job = merger.getFiles(
                 project.source.connection.connection_name,
                 project.name, 'master',
-                files=['zuul.yaml', '.zuul.yaml'])
+                files=['zuul.yaml', '.zuul.yaml'],
+                dirs=['zuul.d', '.zuul.d'])
             job.source_context = model.SourceContext(project, 'master',
                                                      '', True)
             jobs.append(job)
@@ -1153,7 +1154,8 @@
                 job = merger.getFiles(
                     project.source.connection.connection_name,
                     project.name, branch,
-                    files=['.zuul.yaml'])
+                    files=['zuul.yaml', '.zuul.yaml'],
+                    dirs=['zuul.d', '.zuul.d'])
                 job.source_context = model.SourceContext(
                     project, branch, '', False)
                 jobs.append(job)
@@ -1166,15 +1168,19 @@
             TenantParser.log.debug("Waiting for cat job %s" % (job,))
             job.wait()
             loaded = False
-            for fn in ['zuul.yaml', '.zuul.yaml']:
-                if job.files.get(fn):
-                    # Don't load from more than one file in a repo-branch
-                    if loaded:
+            files = sorted(job.files.keys())
+            for conf_root in ['zuul.yaml', '.zuul.yaml', 'zuul.d', '.zuul.d']:
+                for fn in files:
+                    fn_root = fn.split('/')[0]
+                    if fn_root != conf_root or not job.files.get(fn):
+                        continue
+                    # Don't load from more than configuration in a repo-branch
+                    if loaded and loaded != conf_root:
                         TenantParser.log.warning(
                             "Multiple configuration files in %s" %
                             (job.source_context,))
                         continue
-                    loaded = True
+                    loaded = conf_root
                     job.source_context.path = fn
                     TenantParser.log.info(
                         "Loading configuration from %s" %
@@ -1352,31 +1358,54 @@
     def _loadDynamicProjectData(self, config, project, files, trusted):
         if trusted:
             branches = ['master']
-            fn = 'zuul.yaml'
         else:
             branches = project.source.getProjectBranches(project)
-            fn = '.zuul.yaml'
 
         for branch in branches:
+            fns1 = []
+            fns2 = []
+            files_list = files.connections.get(
+                project.source.connection.connection_name, {}).get(
+                    project.name, {}).get(branch, {}).keys()
+            for fn in files_list:
+                if fn.startswith("zuul.d/"):
+                    fns1.append(fn)
+                if fn.startswith(".zuul.d/"):
+                    fns2.append(fn)
+
+            fns = ['zuul.yaml', '.zuul.yaml'] + sorted(fns1) + sorted(fns2)
             incdata = None
-            data = files.getFile(project.source.connection.connection_name,
-                                 project.name, branch, fn)
-            if data:
-                source_context = model.SourceContext(project, branch,
-                                                     fn, trusted)
-                if trusted:
-                    incdata = TenantParser._parseConfigProjectLayout(
-                        data, source_context)
-                else:
-                    incdata = TenantParser._parseUntrustedProjectLayout(
-                        data, source_context)
-            else:
+            loaded = None
+            for fn in fns:
+                data = files.getFile(project.source.connection.connection_name,
+                                     project.name, branch, fn)
+                if data:
+                    source_context = model.SourceContext(project, branch,
+                                                         fn, trusted)
+                    # Prevent mixing configuration source
+                    conf_root = fn.split('/')[0]
+                    if loaded and loaded != conf_root:
+                        TenantParser.log.warning(
+                            "Multiple configuration in %s" % source_context)
+                        continue
+                    loaded = conf_root
+
+                    if trusted:
+                        incdata = TenantParser._parseConfigProjectLayout(
+                            data, source_context)
+                    else:
+                        incdata = TenantParser._parseUntrustedProjectLayout(
+                            data, source_context)
+
+                    config.extend(incdata)
+
+            if not loaded:
                 if trusted:
                     incdata = project.unparsed_config
                 else:
                     incdata = project.unparsed_branch_config.get(branch)
-            if incdata:
-                config.extend(incdata)
+                if incdata:
+                    config.extend(incdata)
 
     def createDynamicLayout(self, tenant, files,
                             include_config_projects=False):
diff --git a/zuul/driver/bubblewrap/__init__.py b/zuul/driver/bubblewrap/__init__.py
index 95b09e0..5ec2448 100644
--- a/zuul/driver/bubblewrap/__init__.py
+++ b/zuul/driver/bubblewrap/__init__.py
@@ -70,34 +70,11 @@
     name = 'bubblewrap'
     log = logging.getLogger("zuul.BubblewrapDriver")
 
-    bwrap_command = [
-        'bwrap',
-        '--dir', '/tmp',
-        '--tmpfs', '/tmp',
-        '--dir', '/var',
-        '--dir', '/var/tmp',
-        '--dir', '/run/user/{uid}',
-        '--ro-bind', '/usr', '/usr',
-        '--ro-bind', '/lib', '/lib',
-        '--ro-bind', '/lib64', '/lib64',
-        '--ro-bind', '/bin', '/bin',
-        '--ro-bind', '/sbin', '/sbin',
-        '--ro-bind', '/etc/resolv.conf', '/etc/resolv.conf',
-        '--ro-bind', '{ssh_auth_sock}', '{ssh_auth_sock}',
-        '--dir', '{work_dir}',
-        '--bind', '{work_dir}', '{work_dir}',
-        '--dev', '/dev',
-        '--chdir', '{work_dir}',
-        '--unshare-all',
-        '--share-net',
-        '--die-with-parent',
-        '--uid', '{uid}',
-        '--gid', '{gid}',
-        '--file', '{uid_fd}', '/etc/passwd',
-        '--file', '{gid_fd}', '/etc/group',
-    ]
     mounts_map = {'rw': [], 'ro': []}
 
+    def __init__(self):
+        self.bwrap_command = self._bwrap_command()
+
     def reconfigure(self, tenant):
         pass
 
@@ -160,6 +137,38 @@
 
         return wrapped_popen
 
+    def _bwrap_command(self):
+        bwrap_command = [
+            'bwrap',
+            '--dir', '/tmp',
+            '--tmpfs', '/tmp',
+            '--dir', '/var',
+            '--dir', '/var/tmp',
+            '--dir', '/run/user/{uid}',
+            '--ro-bind', '/usr', '/usr',
+            '--ro-bind', '/lib', '/lib',
+            '--ro-bind', '/bin', '/bin',
+            '--ro-bind', '/sbin', '/sbin',
+            '--ro-bind', '/etc/resolv.conf', '/etc/resolv.conf',
+            '--ro-bind', '{ssh_auth_sock}', '{ssh_auth_sock}',
+            '--dir', '{work_dir}',
+            '--bind', '{work_dir}', '{work_dir}',
+            '--dev', '/dev',
+            '--chdir', '{work_dir}',
+            '--unshare-all',
+            '--share-net',
+            '--die-with-parent',
+            '--uid', '{uid}',
+            '--gid', '{gid}',
+            '--file', '{uid_fd}', '/etc/passwd',
+            '--file', '{gid_fd}', '/etc/group',
+        ]
+
+        if os.path.isdir('/lib64'):
+            bwrap_command.extend(['--ro-bind', '/lib64', '/lib64'])
+
+        return bwrap_command
+
 
 def main(args=None):
     logging.basicConfig(level=logging.DEBUG)
diff --git a/zuul/driver/github/githubconnection.py b/zuul/driver/github/githubconnection.py
index 838cba5..1a9e37b 100644
--- a/zuul/driver/github/githubconnection.py
+++ b/zuul/driver/github/githubconnection.py
@@ -75,6 +75,8 @@
 
         try:
             self.__dispatch_event(request)
+        except webob.exc.HTTPNotFound:
+            raise
         except:
             self.log.exception("Exception handling Github event:")
 
@@ -92,7 +94,8 @@
         except AttributeError:
             message = "Unhandled X-Github-Event: {0}".format(event)
             self.log.debug(message)
-            raise webob.exc.HTTPBadRequest(message)
+            # Returns empty 200 on unhandled events
+            raise webob.exc.HTTPOk()
 
         try:
             json_body = request.json_body
@@ -117,6 +120,8 @@
 
         try:
             event = method(json_body)
+        except webob.exc.HTTPNotFound:
+            raise
         except:
             self.log.exception('Exception when handling event:')
             event = None
@@ -219,6 +224,14 @@
         event.action = body.get('action')
         return event
 
+    def _event_ping(self, body):
+        project_name = body['repository']['full_name']
+        if not self.connection.getProject(project_name):
+            self.log.warning("Ping received for unknown project %s" %
+                             project_name)
+            raise webob.exc.HTTPNotFound("Sorry, this project is not "
+                                         "registered")
+
     def _event_status(self, body):
         action = body.get('action')
         if action == 'pending':
@@ -340,9 +353,9 @@
         self._change_cache = {}
         self.projects = {}
         self.git_ssh_key = self.connection_config.get('sshkey')
-        self.git_host = self.connection_config.get('git_host', 'github.com')
+        self.server = self.connection_config.get('server', 'github.com')
         self.canonical_hostname = self.connection_config.get(
-            'canonical_hostname', self.git_host)
+            'canonical_hostname', self.server)
         self.source = driver.getSource(self)
 
         self._github = None
@@ -362,7 +375,7 @@
         # The regex is based on the connection host. We do not yet support
         # cross-connection dependency gathering
         self.depends_on_re = re.compile(
-            r"^Depends-On: https://%s/.+/.+/pull/[0-9]+$" % self.git_host,
+            r"^Depends-On: https://%s/.+/.+/pull/[0-9]+$" % self.server,
             re.MULTILINE | re.IGNORECASE)
 
     def onLoad(self):
@@ -375,8 +388,8 @@
         self.unregisterHttpHandler(self.payload_path)
 
     def _createGithubClient(self):
-        if self.git_host != 'github.com':
-            url = 'https://%s/' % self.git_host
+        if self.server != 'github.com':
+            url = 'https://%s/' % self.server
             github = github3.GitHubEnterprise(url)
         else:
             github = github3.GitHub()
@@ -551,7 +564,7 @@
 
         # This leaves off the protocol, but looks for the specific GitHub
         # hostname, the org/project, and the pull request number.
-        pattern = 'Depends-On %s/%s/pull/%s' % (self.git_host,
+        pattern = 'Depends-On %s/%s/pull/%s' % (self.server,
                                                 change.project.name,
                                                 change.number)
         query = '%s type:pr is:open in:body' % pattern
@@ -595,6 +608,9 @@
                                              change.number)
         change.labels = change.pr.get('labels')
         change.body = change.pr.get('body')
+        # ensure body is at least an empty string
+        if not change.body:
+            change.body = ''
 
         if history is None:
             history = []
@@ -639,18 +655,18 @@
 
     def getGitUrl(self, project):
         if self.git_ssh_key:
-            return 'ssh://git@%s/%s.git' % (self.git_host, project)
+            return 'ssh://git@%s/%s.git' % (self.server, project)
 
         if self.app_id:
             installation_key = self._get_installation_key(project)
             return 'https://x-access-token:%s@%s/%s' % (installation_key,
-                                                        self.git_host,
+                                                        self.server,
                                                         project)
 
-        return 'https://%s/%s' % (self.git_host, project)
+        return 'https://%s/%s' % (self.server, project)
 
     def getGitwebUrl(self, project, sha=None):
-        url = 'https://%s/%s' % (self.git_host, project)
+        url = 'https://%s/%s' % (self.server, project)
         if sha is not None:
             url += '/commit/%s' % sha
         return url
@@ -763,8 +779,19 @@
                 # if there are multiple reviews per user, keep the newest
                 # note that this breaks the ability to set the 'older-than'
                 # option on a review requirement.
+                # BUT do not keep the latest if it's a 'commented' type and the
+                # previous review was 'approved' or 'changes_requested', as
+                # the GitHub model does not change the vote if a comment is
+                # added after the fact. THANKS GITHUB!
                 if review['grantedOn'] > reviews[user]['grantedOn']:
-                    reviews[user] = review
+                    if (review['type'] == 'commented' and reviews[user]['type']
+                            in ('approved', 'changes_requested')):
+                        self.log.debug("Discarding comment review %s due to "
+                                       "an existing vote %s" % (review,
+                                                                reviews[user]))
+                        pass
+                    else:
+                        reviews[user] = review
 
         return reviews.values()
 
@@ -785,7 +812,7 @@
         return GithubUser(self.getGithubClient(), login)
 
     def getUserUri(self, login):
-        return 'https://%s/%s' % (self.git_host, login)
+        return 'https://%s/%s' % (self.server, login)
 
     def getRepoPermission(self, project, login):
         github = self.getGithubClient(project)
diff --git a/zuul/driver/github/githubreporter.py b/zuul/driver/github/githubreporter.py
index 72087bf..ea41ccd 100644
--- a/zuul/driver/github/githubreporter.py
+++ b/zuul/driver/github/githubreporter.py
@@ -85,8 +85,8 @@
         url_pattern = self.config.get('status-url')
         if not url_pattern:
             sched_config = self.connection.sched.config
-            if sched_config.has_option('zuul', 'status_url'):
-                url_pattern = sched_config.get('zuul', 'status_url')
+            if sched_config.has_option('webapp', 'status_url'):
+                url_pattern = sched_config.get('webapp', 'status_url')
         url = item.formatUrlPattern(url_pattern) if url_pattern else ''
 
         description = ''
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index aaef34e..442d1c5 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -284,6 +284,7 @@
                               host_keys=node.host_keys,
                               provider=node.provider,
                               region=node.region,
+                              ssh_port=node.ssh_port,
                               interface_ip=node.interface_ip,
                               public_ipv6=node.public_ipv6,
                               public_ipv4=node.public_ipv4))
@@ -321,9 +322,9 @@
                     make_project_dict(project,
                                       job_project.override_branch))
                 projects.add(project)
-        for item in all_items:
-            if item.change.project not in projects:
-                project = item.change.project
+        for i in all_items:
+            if i.change.project not in projects:
+                project = i.change.project
                 params['projects'].append(make_project_dict(project))
                 projects.add(project)
 
@@ -340,12 +341,6 @@
         build.__gearman_worker = None
         self.builds[uuid] = build
 
-        # NOTE(pabelanger): Rather then looping forever, check to see if job
-        # has passed attempts limit.
-        if item.current_build_set.getTries(job.name) > job.attempts:
-            self.onBuildCompleted(gearman_job, 'RETRY_LIMIT')
-            return build
-
         if pipeline.precedence == zuul.model.PRECEDENCE_NORMAL:
             precedence = gear.PRECEDENCE_NORMAL
         elif pipeline.precedence == zuul.model.PRECEDENCE_HIGH:
@@ -420,7 +415,11 @@
             if result is None:
                 result = data.get('result')
             if result is None:
-                build.retry = True
+                if (build.build_set.getTries(build.job.name) >=
+                    build.job.attempts):
+                    result = 'RETRY_LIMIT'
+                else:
+                    build.retry = True
             self.log.info("Build %s complete, result %s" %
                           (job, result))
             self.sched.onBuildCompleted(build, result)
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index fe757b6..9bf4f8d 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -400,7 +400,7 @@
         self.merger = self._getMerger(self.merge_root)
         self.update_queue = DeduplicateQueue()
 
-        state_dir = get_default(self.config, 'zuul', 'state_dir',
+        state_dir = get_default(self.config, 'executor', 'state_dir',
                                 '/var/lib/zuul', expand_user=True)
         path = os.path.join(state_dir, 'executor.socket')
         self.command_socket = commandsocket.CommandSocket(path)
@@ -641,7 +641,8 @@
         task.wait()
         with self.merger_lock:
             files = self.merger.getFiles(args['connection'], args['project'],
-                                         args['branch'], args['files'])
+                                         args['branch'], args['files'],
+                                         args.get('dirs', []))
         result = dict(updated=True,
                       files=files,
                       zuul_url=self.zuul_url)
@@ -651,6 +652,7 @@
         args = json.loads(job.arguments)
         with self.merger_lock:
             ret = self.merger.mergeChanges(args['items'], args.get('files'),
+                                           args.get('dirs', []),
                                            args.get('repo_state'))
         result = dict(merged=(ret is not None),
                       zuul_url=self.zuul_url)
@@ -739,12 +741,11 @@
                     self.log.exception("Error stopping SSH agent:")
 
     def _execute(self):
-        self.log.debug("Job %s: beginning" % (self.job.unique,))
-        self.log.debug("Job %s: args: %s" % (self.job.unique,
-                                             self.job.arguments,))
-        self.log.debug("Job %s: job root at %s" %
-                       (self.job.unique, self.jobdir.root))
         args = json.loads(self.job.arguments)
+        self.log.debug("Beginning job %s for ref %s" %
+                       (self.job.name, args['vars']['zuul']['ref']))
+        self.log.debug("Args: %s" % (self.job.arguments,))
+        self.log.debug("Job root: %s" % (self.jobdir.root,))
         tasks = []
         projects = set()
 
@@ -935,9 +936,11 @@
             # results in the wrong thing being in interface_ip
             # TODO(jeblair): Move this notice to the docs.
             ip = node.get('interface_ip')
+            port = node.get('ssh_port', 22)
             host_vars = dict(
                 ansible_host=ip,
                 ansible_user=self.executor_server.default_username,
+                ansible_port=port,
                 nodepool=dict(
                     az=node.get('az'),
                     provider=node.get('provider'),
@@ -945,7 +948,10 @@
 
             host_keys = []
             for key in node.get('host_keys'):
-                host_keys.append("%s %s" % (ip, key))
+                if port != 22:
+                    host_keys.append("[%s]:%s %s" % (ip, port, key))
+                else:
+                    host_keys.append("%s %s" % (ip, key))
 
             hosts.append(dict(
                 name=node['name'],
@@ -1293,7 +1299,7 @@
                               '%s_ro_dirs' % opt_prefix)
         rw_dirs = get_default(self.executor_server.config, 'executor',
                               '%s_rw_dirs' % opt_prefix)
-        state_dir = get_default(self.executor_server.config, 'zuul',
+        state_dir = get_default(self.executor_server.config, 'executor',
                                 'state_dir', '/var/lib/zuul', expand_user=True)
         ro_dirs = ro_dirs.split(":") if ro_dirs else []
         rw_dirs = rw_dirs.split(":") if rw_dirs else []
diff --git a/zuul/lib/log_streamer.py b/zuul/lib/log_streamer.py
index 67c733e..57afef9 100644
--- a/zuul/lib/log_streamer.py
+++ b/zuul/lib/log_streamer.py
@@ -15,6 +15,7 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+import logging
 import os
 import os.path
 import pwd
@@ -212,6 +213,8 @@
     '''
 
     def __init__(self, user, host, port, jobdir_root):
+        self.log = logging.getLogger('zuul.lib.LogStreamer')
+        self.log.debug("LogStreamer starting on port %s", port)
         self.server = CustomForkingTCPServer((host, port),
                                              RequestHandler,
                                              user=user,
@@ -227,3 +230,4 @@
         if self.thd.isAlive():
             self.server.shutdown()
             self.server.server_close()
+            self.log.debug("LogStreamer stopped")
diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py
index 01429ce..09b09d7 100644
--- a/zuul/manager/__init__.py
+++ b/zuul/manager/__init__.py
@@ -480,7 +480,7 @@
             self.log.debug("Preparing dynamic layout for: %s" % item.change)
             return self._loadDynamicLayout(item)
 
-    def scheduleMerge(self, item, files=None):
+    def scheduleMerge(self, item, files=None, dirs=None):
         build_set = item.current_build_set
 
         if not hasattr(item.change, 'branch'):
@@ -490,12 +490,12 @@
             build_set.merge_state = build_set.COMPLETE
             return True
 
-        self.log.debug("Scheduling merge for item %s (files: %s)" %
-                       (item, files))
+        self.log.debug("Scheduling merge for item %s (files: %s, dirs: %s)" %
+                       (item, files, dirs))
         build_set = item.current_build_set
         build_set.merge_state = build_set.PENDING
         self.sched.merger.mergeChanges(build_set.merger_items,
-                                       item.current_build_set, files,
+                                       item.current_build_set, files, dirs,
                                        precedence=self.pipeline.precedence)
         return False
 
@@ -506,7 +506,9 @@
         if not build_set.ref:
             build_set.setConfiguration()
         if build_set.merge_state == build_set.NEW:
-            return self.scheduleMerge(item, ['zuul.yaml', '.zuul.yaml'])
+            return self.scheduleMerge(item,
+                                      files=['zuul.yaml', '.zuul.yaml'],
+                                      dirs=['zuul.d', '.zuul.d'])
         if build_set.merge_state == build_set.PENDING:
             return False
         if build_set.unable_to_merge:
diff --git a/zuul/merger/client.py b/zuul/merger/client.py
index e92d9fd..e354d5d 100644
--- a/zuul/merger/client.py
+++ b/zuul/merger/client.py
@@ -108,19 +108,21 @@
                                timeout=300)
         return job
 
-    def mergeChanges(self, items, build_set, files=None, repo_state=None,
-                     precedence=zuul.model.PRECEDENCE_NORMAL):
+    def mergeChanges(self, items, build_set, files=None, dirs=None,
+                     repo_state=None, precedence=zuul.model.PRECEDENCE_NORMAL):
         data = dict(items=items,
                     files=files,
+                    dirs=dirs,
                     repo_state=repo_state)
         self.submitJob('merger:merge', data, build_set, precedence)
 
-    def getFiles(self, connection_name, project_name, branch, files,
+    def getFiles(self, connection_name, project_name, branch, files, dirs=[],
                  precedence=zuul.model.PRECEDENCE_HIGH):
         data = dict(connection=connection_name,
                     project=project_name,
                     branch=branch,
-                    files=files)
+                    files=files,
+                    dirs=dirs)
         job = self.submitJob('merger:cat', data, None, precedence)
         return job
 
diff --git a/zuul/merger/merger.py b/zuul/merger/merger.py
index 2ac0de8..93340fa 100644
--- a/zuul/merger/merger.py
+++ b/zuul/merger/merger.py
@@ -254,7 +254,7 @@
             origin.fetch()
         origin.fetch(tags=True)
 
-    def getFiles(self, files, branch=None, commit=None):
+    def getFiles(self, files, dirs=[], branch=None, commit=None):
         ret = {}
         repo = self.createRepoObject()
         if branch:
@@ -266,6 +266,14 @@
                 ret[fn] = tree[fn].data_stream.read().decode('utf8')
             else:
                 ret[fn] = None
+        if dirs:
+            for dn in dirs:
+                if dn not in tree:
+                    continue
+                for blob in tree[dn].traverse():
+                    if blob.path.endswith(".yaml"):
+                        ret[blob.path] = blob.data_stream.read().decode(
+                            'utf-8')
         return ret
 
     def deleteRemote(self, remote):
@@ -452,7 +460,7 @@
                 return None
         return commit
 
-    def mergeChanges(self, items, files=None, repo_state=None):
+    def mergeChanges(self, items, files=None, dirs=None, repo_state=None):
         # connection+project+branch -> commit
         recent = {}
         commit = None
@@ -470,9 +478,9 @@
             commit = self._mergeItem(item, recent, repo_state)
             if not commit:
                 return None
-            if files:
+            if files or dirs:
                 repo = self.getRepo(item['connection'], item['project'])
-                repo_files = repo.getFiles(files, commit=commit)
+                repo_files = repo.getFiles(files, dirs, commit=commit)
                 read_files.append(dict(
                     connection=item['connection'],
                     project=item['project'],
@@ -483,6 +491,6 @@
             ret_recent[k] = v.hexsha
         return commit.hexsha, read_files, repo_state, ret_recent
 
-    def getFiles(self, connection_name, project_name, branch, files):
+    def getFiles(self, connection_name, project_name, branch, files, dirs=[]):
         repo = self.getRepo(connection_name, project_name)
-        return repo.getFiles(files, branch=branch)
+        return repo.getFiles(files, dirs, branch=branch)
diff --git a/zuul/merger/server.py b/zuul/merger/server.py
index cbc4cb8..555a4bc 100644
--- a/zuul/merger/server.py
+++ b/zuul/merger/server.py
@@ -94,8 +94,9 @@
 
     def merge(self, job):
         args = json.loads(job.arguments)
-        ret = self.merger.mergeChanges(args['items'], args.get('files'),
-                                       args.get('repo_state'))
+        ret = self.merger.mergeChanges(
+            args['items'], args.get('files'),
+            args.get('dirs'), args.get('repo_state'))
         result = dict(merged=(ret is not None),
                       zuul_url=self.zuul_url)
         if ret is None:
@@ -109,7 +110,8 @@
         args = json.loads(job.arguments)
         self.merger.updateRepo(args['connection'], args['project'])
         files = self.merger.getFiles(args['connection'], args['project'],
-                                     args['branch'], args['files'])
+                                     args['branch'], args['files'],
+                                     args.get('dirs'))
         result = dict(updated=True,
                       files=files,
                       zuul_url=self.zuul_url)
diff --git a/zuul/model.py b/zuul/model.py
index 17301b7..4744bbe 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -358,6 +358,7 @@
         self.public_ipv4 = None
         self.private_ipv4 = None
         self.public_ipv6 = None
+        self.ssh_port = 22
         self._keys = []
         self.az = None
         self.provider = None
@@ -1617,7 +1618,7 @@
                 result = job.success_message
             if job.success_url:
                 pattern = job.success_url
-        elif result == 'FAILURE':
+        else:
             if job.failure_message:
                 result = job.failure_message
             if job.failure_url:
@@ -1845,7 +1846,9 @@
         return set()
 
     def updatesConfig(self):
-        if 'zuul.yaml' in self.files or '.zuul.yaml' in self.files:
+        if 'zuul.yaml' in self.files or '.zuul.yaml' in self.files or \
+           [True for fn in self.files if fn.startswith("zuul.d/") or
+            fn.startswith(".zuul.d/")]:
             return True
         return False
 
diff --git a/zuul/reporter/__init__.py b/zuul/reporter/__init__.py
index 0ac5766..95b9208 100644
--- a/zuul/reporter/__init__.py
+++ b/zuul/reporter/__init__.py
@@ -14,6 +14,7 @@
 
 import abc
 import logging
+from zuul.lib.config import get_default
 
 
 class BaseReporter(object, metaclass=abc.ABCMeta):
@@ -69,10 +70,8 @@
         return ret
 
     def _formatItemReportStart(self, item, with_jobs=True):
-        status_url = ''
-        if self.connection.sched.config.has_option('zuul', 'status_url'):
-            status_url = self.connection.sched.config.get('zuul',
-                                                          'status_url')
+        status_url = get_default(self.connection.sched.config,
+                                 'webapp', 'status_url', '')
         return item.pipeline.start_message.format(pipeline=item.pipeline,
                                                   status_url=status_url)
 
diff --git a/zuul/rpcclient.py b/zuul/rpcclient.py
index 6f0d34b..fd3517f 100644
--- a/zuul/rpcclient.py
+++ b/zuul/rpcclient.py
@@ -86,3 +86,11 @@
 
     def shutdown(self):
         self.gearman.shutdown()
+
+    def get_job_log_stream_address(self, uuid, logfile='console.log'):
+        data = {'uuid': uuid, 'logfile': logfile}
+        job = self.submitJob('zuul:get_job_log_stream_address', data)
+        if job.failure:
+            return False
+        else:
+            return json.loads(job.data[0])
diff --git a/zuul/rpclistener.py b/zuul/rpclistener.py
index be3b7d1..6543c91 100644
--- a/zuul/rpclistener.py
+++ b/zuul/rpclistener.py
@@ -53,6 +53,7 @@
         self.worker.registerFunction("zuul:enqueue_ref")
         self.worker.registerFunction("zuul:promote")
         self.worker.registerFunction("zuul:get_running_jobs")
+        self.worker.registerFunction("zuul:get_job_log_stream_address")
 
     def stop(self):
         self.log.debug("Stopping")
@@ -173,3 +174,29 @@
                         running_items.append(item.formatJSON())
 
         job.sendWorkComplete(json.dumps(running_items))
+
+    def handle_get_job_log_stream_address(self, job):
+        # TODO: map log files to ports. Currently there is only one
+        #       log stream for a given job. But many jobs produce many
+        #       log files, so this is forwards compatible with a future
+        #       where there are more logs to potentially request than
+        #       "console.log"
+        def find_build(uuid):
+            for tenant in self.sched.abide.tenants.values():
+                for pipeline_name, pipeline in tenant.layout.pipelines.items():
+                    for queue in pipeline.queues:
+                        for item in queue.queue:
+                            for bld in item.current_build_set.getBuilds():
+                                if bld.uuid == uuid:
+                                    return bld
+            return None
+
+        args = json.loads(job.arguments)
+        uuid = args['uuid']
+        # TODO: logfile = args['logfile']
+        job_log_stream_address = {}
+        build = find_build(uuid)
+        if build:
+            job_log_stream_address['server'] = build.worker.hostname
+            job_log_stream_address['port'] = build.worker.log_port
+        job.sendWorkComplete(json.dumps(job_log_stream_address))
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 1476c38..fe6a673 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -372,12 +372,12 @@
         self.log.debug("Waiting for exit")
 
     def _get_queue_pickle_file(self):
-        state_dir = get_default(self.config, 'zuul', 'state_dir',
+        state_dir = get_default(self.config, 'scheduler', 'state_dir',
                                 '/var/lib/zuul', expand_user=True)
         return os.path.join(state_dir, 'queue.pickle')
 
     def _get_time_database_dir(self):
-        state_dir = get_default(self.config, 'zuul', 'state_dir',
+        state_dir = get_default(self.config, 'scheduler', 'state_dir',
                                 '/var/lib/zuul', expand_user=True)
         d = os.path.join(state_dir, 'times')
         if not os.path.exists(d):
@@ -385,7 +385,7 @@
         return d
 
     def _get_project_key_dir(self):
-        state_dir = get_default(self.config, 'zuul', 'state_dir',
+        state_dir = get_default(self.config, 'scheduler', 'state_dir',
                                 '/var/lib/zuul', expand_user=True)
         key_dir = os.path.join(state_dir, 'keys')
         if not os.path.exists(key_dir):
@@ -547,6 +547,8 @@
                     else:
                         items_to_remove.append(item)
             for item in items_to_remove:
+                self.log.warning(
+                    "Removing item %s during reconfiguration" % (item,))
                 for build in item.current_build_set.getBuilds():
                     builds_to_cancel.append(build)
             for build in builds_to_cancel:
diff --git a/zuul/web.py b/zuul/web.py
new file mode 100644
index 0000000..2ef65fe
--- /dev/null
+++ b/zuul/web.py
@@ -0,0 +1,232 @@
+#!/usr/bin/env python
+# Copyright (c) 2017 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import asyncio
+import json
+import logging
+import uvloop
+
+import aiohttp
+from aiohttp import web
+
+import zuul.rpcclient
+
+
+class LogStreamingHandler(object):
+    log = logging.getLogger("zuul.web.LogStreamingHandler")
+
+    def __init__(self, loop, gear_server, gear_port,
+                 ssl_key=None, ssl_cert=None, ssl_ca=None):
+        self.event_loop = loop
+        self.gear_server = gear_server
+        self.gear_port = gear_port
+        self.ssl_key = ssl_key
+        self.ssl_cert = ssl_cert
+        self.ssl_ca = ssl_ca
+
+    def _getPortLocation(self, job_uuid):
+        '''
+        Query Gearman for the executor running the given job.
+
+        :param str job_uuid: The job UUID we want to stream.
+        '''
+        # TODO: Fetch the entire list of uuid/file/server/ports once and
+        #       share that, and fetch a new list on cache misses perhaps?
+        # TODO: Avoid recreating a client for each request.
+        rpc = zuul.rpcclient.RPCClient(self.gear_server, self.gear_port,
+                                       self.ssl_key, self.ssl_cert,
+                                       self.ssl_ca)
+        ret = rpc.get_job_log_stream_address(job_uuid)
+        rpc.shutdown()
+        return ret
+
+    async def _fingerClient(self, ws, server, port, job_uuid):
+        '''
+        Create a client to connect to the finger streamer and pull results.
+
+        :param aiohttp.web.WebSocketResponse ws: The websocket response object.
+        :param str server: The executor server running the job.
+        :param str port: The executor server port.
+        :param str job_uuid: The job UUID to stream.
+        '''
+        self.log.debug("Connecting to finger server %s:%s", server, port)
+        reader, writer = await asyncio.open_connection(host=server, port=port,
+                                                       loop=self.event_loop)
+
+        self.log.debug("Sending finger request for %s", job_uuid)
+        msg = "%s\n" % job_uuid    # Must have a trailing newline!
+
+        writer.write(msg.encode('utf8'))
+        await writer.drain()
+
+        while True:
+            data = await reader.read(1024)
+            if data:
+                await ws.send_str(data.decode('utf8'))
+            else:
+                writer.close()
+                return
+
+    async def _streamLog(self, ws, request):
+        '''
+        Stream the log for the requested job back to the client.
+
+        :param aiohttp.web.WebSocketResponse ws: The websocket response object.
+        :param dict request: The client request parameters.
+        '''
+        for key in ('uuid', 'logfile'):
+            if key not in request:
+                return (4000, "'{key}' missing from request payload".format(
+                        key=key))
+
+        # Schedule the blocking gearman work in an Executor
+        gear_task = self.event_loop.run_in_executor(
+            None, self._getPortLocation, request['uuid'])
+
+        try:
+            port_location = await asyncio.wait_for(gear_task, 10)
+        except asyncio.TimeoutError:
+            return (4010, "Gearman timeout")
+
+        if not port_location:
+            return (4011, "Error with Gearman")
+
+        await self._fingerClient(
+            ws, port_location['server'], port_location['port'], request['uuid']
+        )
+
+        return (1000, "No more data")
+
+    async def processRequest(self, request):
+        '''
+        Handle a client websocket request for log streaming.
+
+        :param aiohttp.web.Request request: The client request.
+        '''
+        try:
+            ws = web.WebSocketResponse()
+            await ws.prepare(request)
+            async for msg in ws:
+                if msg.type == aiohttp.WSMsgType.TEXT:
+                    req = json.loads(msg.data)
+                    self.log.debug("Websocket request: %s", req)
+                    code, msg = await self._streamLog(ws, req)
+
+                    # We expect to process only a single message. I.e., we
+                    # can stream only a single file at a time.
+                    await ws.close(code=code, message=msg)
+                    break
+                elif msg.type == aiohttp.WSMsgType.ERROR:
+                    self.log.error(
+                        "Websocket connection closed with exception %s",
+                        ws.exception()
+                    )
+                    break
+                elif msg.type == aiohttp.WSMsgType.CLOSED:
+                    break
+        except Exception as e:
+            self.log.exception("Websocket exception:")
+            await ws.close(code=4009, message=str(e).encode('utf-8'))
+        return ws
+
+
+class ZuulWeb(object):
+
+    log = logging.getLogger("zuul.web.ZuulWeb")
+
+    def __init__(self, listen_address, listen_port,
+                 gear_server, gear_port,
+                 ssl_key=None, ssl_cert=None, ssl_ca=None):
+        self.listen_address = listen_address
+        self.listen_port = listen_port
+        self.gear_server = gear_server
+        self.gear_port = gear_port
+        self.ssl_key = ssl_key
+        self.ssl_cert = ssl_cert
+        self.ssl_ca = ssl_ca
+
+    async def _handleWebsocket(self, request):
+        handler = LogStreamingHandler(self.event_loop,
+                                      self.gear_server, self.gear_port,
+                                      self.ssl_key, self.ssl_cert, self.ssl_ca)
+        return await handler.processRequest(request)
+
+    def run(self, loop=None):
+        '''
+        Run the websocket daemon.
+
+        Because this method can be the target of a new thread, we need to
+        set the thread event loop here, rather than in __init__().
+
+        :param loop: The event loop to use. If not supplied, the default main
+            thread event loop is used. This should be supplied if ZuulWeb
+            is run within a separate (non-main) thread.
+        '''
+        routes = [
+            ('GET', '/console-stream', self._handleWebsocket)
+        ]
+
+        self.log.debug("ZuulWeb starting")
+        asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
+        user_supplied_loop = loop is not None
+        if not loop:
+            loop = asyncio.get_event_loop()
+        asyncio.set_event_loop(loop)
+
+        self.event_loop = loop
+
+        app = web.Application()
+        for method, path, handler in routes:
+            app.router.add_route(method, path, handler)
+        handler = app.make_handler(loop=self.event_loop)
+
+        # create the server
+        coro = self.event_loop.create_server(handler,
+                                             self.listen_address,
+                                             self.listen_port)
+        self.server = self.event_loop.run_until_complete(coro)
+
+        self.term = asyncio.Future()
+
+        # start the server
+        self.event_loop.run_until_complete(self.term)
+
+        # cleanup
+        self.log.debug("ZuulWeb stopping")
+        self.server.close()
+        self.event_loop.run_until_complete(self.server.wait_closed())
+        self.event_loop.run_until_complete(app.shutdown())
+        self.event_loop.run_until_complete(handler.shutdown(60.0))
+        self.event_loop.run_until_complete(app.cleanup())
+        self.log.debug("ZuulWeb stopped")
+
+        # Only run these if we are controlling the loop - they need to be
+        # run from the main thread
+        if not user_supplied_loop:
+            loop.stop()
+            loop.close()
+
+    def stop(self):
+        self.event_loop.call_soon_threadsafe(self.term.set_result, True)
+
+
+if __name__ == "__main__":
+    logging.basicConfig(level=logging.DEBUG)
+    loop = asyncio.get_event_loop()
+    loop.set_debug(True)
+    z = ZuulWeb()
+    z.run(loop)