Merge "Allow loading logging config from yaml" into feature/zuulv3
diff --git a/etc/zuul.conf-sample b/etc/zuul.conf-sample
index d7b8eae..3de145a 100644
--- a/etc/zuul.conf-sample
+++ b/etc/zuul.conf-sample
@@ -10,6 +10,7 @@
 pidfile=/var/run/zuul/zuul.pid
 state_dir=/var/lib/zuul
 status_url=https://jenkins.example.com/zuul/status
+zookeeper_hosts=127.0.0.1:2181
 
 [merger]
 git_dir=/var/lib/zuul/git
diff --git a/tests/base.py b/tests/base.py
index 66fd85a..8802cfd 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -263,6 +263,16 @@
                  "comment": "This is a comment"}
         return event
 
+    def getChangeMergedEvent(self):
+        event = {"submitter": {"name": "Jenkins",
+                               "username": "jenkins"},
+                 "newRev": "29ed3b5f8f750a225c5be70235230e3a6ccb04d9",
+                 "patchSet": self.patchsets[-1],
+                 "change": self.data,
+                 "type": "change-merged",
+                 "eventCreatedOn": 1487613810}
+        return event
+
     def addApproval(self, category, value, username='reviewer_john',
                     granted_on=None, message=''):
         if not granted_on:
@@ -757,16 +767,24 @@
         self.launcher_server.lock.release()
         return result
 
-    def runAnsible(self, cmd, timeout, secure=False):
+    def runAnsible(self, cmd, timeout, trusted=False):
         build = self.launcher_server.job_builds[self.job.unique]
 
         if self.launcher_server._run_ansible:
             result = super(RecordingAnsibleJob, self).runAnsible(
-                cmd, timeout, secure=secure)
+                cmd, timeout, trusted=trusted)
         else:
             result = build.run()
         return result
 
+    def getHostList(self, args):
+        self.log.debug("hostlist")
+        hosts = super(RecordingAnsibleJob, self).getHostList(args)
+        for name, d in hosts:
+            d['ansible_connection'] = 'local'
+        hosts.append(('localhost', dict(ansible_connection='local')))
+        return hosts
+
 
 class FakeGearmanServer(gear.Server):
     """A Gearman server for use in tests.
@@ -1290,11 +1308,12 @@
             self.config, self.sched)
         self.nodepool = zuul.nodepool.Nodepool(self.sched)
         self.zk = zuul.zk.ZooKeeper()
-        self.zk.connect([self.zk_config])
+        self.zk.connect(self.zk_config)
 
-        self.fake_nodepool = FakeNodepool(self.zk_config.host,
-                                          self.zk_config.port,
-                                          self.zk_config.chroot)
+        self.fake_nodepool = FakeNodepool(
+            self.zk_chroot_fixture.zookeeper_host,
+            self.zk_chroot_fixture.zookeeper_port,
+            self.zk_chroot_fixture.zookeeper_chroot)
 
         self.sched.setLauncher(self.launch_client)
         self.sched.setMerger(self.merge_client)
@@ -1370,7 +1389,7 @@
 
     def setupZK(self):
         self.zk_chroot_fixture = self.useFixture(ChrootedKazooFixture())
-        self.zk_config = zuul.zk.ZooKeeperConnectionConfig(
+        self.zk_config = '%s:%s%s' % (
             self.zk_chroot_fixture.zookeeper_host,
             self.zk_chroot_fixture.zookeeper_port,
             self.zk_chroot_fixture.zookeeper_chroot)
diff --git a/tests/fixtures/main.yaml b/tests/fixtures/main.yaml
deleted file mode 100644
index f9ec378..0000000
--- a/tests/fixtures/main.yaml
+++ /dev/null
@@ -1,4 +0,0 @@
-tenants:
-  - name: openstack
-    include:
-      - layout.yaml
diff --git a/tests/unit/test_model.py b/tests/unit/test_model.py
index 9dac383..9bd405e 100644
--- a/tests/unit/test_model.py
+++ b/tests/unit/test_model.py
@@ -197,7 +197,7 @@
         })
         layout.addJob(python27essex)
 
-        project_config = configloader.ProjectParser.fromYaml(tenant, layout, {
+        project_config = configloader.ProjectParser.fromYaml(tenant, layout, [{
             '_source_context': context,
             'name': 'project',
             'gate': {
@@ -205,7 +205,7 @@
                     'python27'
                 ]
             }
-        })
+        }])
         layout.addProjectConfig(project_config, update_pipeline=False)
 
         change = model.Change(project)
@@ -406,7 +406,7 @@
         })
         layout.addJob(python27diablo)
 
-        project_config = configloader.ProjectParser.fromYaml(tenant, layout, {
+        project_config = configloader.ProjectParser.fromYaml(tenant, layout, [{
             '_source_context': context,
             'name': 'project',
             'gate': {
@@ -414,7 +414,7 @@
                     {'python27': {'timeout': 70}}
                 ]
             }
-        })
+        }])
         layout.addProjectConfig(project_config, update_pipeline=False)
 
         change = model.Change(project)
@@ -471,7 +471,7 @@
         })
         layout.addJob(python27)
 
-        project_config = configloader.ProjectParser.fromYaml(tenant, layout, {
+        project_config = configloader.ProjectParser.fromYaml(tenant, layout, [{
             '_source_context': context,
             'name': 'project',
             'gate': {
@@ -479,7 +479,7 @@
                     'python27',
                 ]
             }
-        })
+        }])
         layout.addProjectConfig(project_config, update_pipeline=False)
 
         change = model.Change(project)
diff --git a/tests/unit/test_nodepool.py b/tests/unit/test_nodepool.py
index 6462f9a..19c7e05 100644
--- a/tests/unit/test_nodepool.py
+++ b/tests/unit/test_nodepool.py
@@ -30,22 +30,23 @@
         super(BaseTestCase, self).setUp()
 
         self.zk_chroot_fixture = self.useFixture(ChrootedKazooFixture())
-        self.zk_config = zuul.zk.ZooKeeperConnectionConfig(
+        self.zk_config = '%s:%s%s' % (
             self.zk_chroot_fixture.zookeeper_host,
             self.zk_chroot_fixture.zookeeper_port,
             self.zk_chroot_fixture.zookeeper_chroot)
 
         self.zk = zuul.zk.ZooKeeper()
-        self.zk.connect([self.zk_config])
+        self.zk.connect(self.zk_config)
 
         self.provisioned_requests = []
         # This class implements the scheduler methods zuul.nodepool
         # needs, so we pass 'self' as the scheduler.
         self.nodepool = zuul.nodepool.Nodepool(self)
 
-        self.fake_nodepool = FakeNodepool(self.zk_config.host,
-                                          self.zk_config.port,
-                                          self.zk_config.chroot)
+        self.fake_nodepool = FakeNodepool(
+            self.zk_chroot_fixture.zookeeper_host,
+            self.zk_chroot_fixture.zookeeper_port,
+            self.zk_chroot_fixture.zookeeper_chroot)
 
     def waitForRequests(self):
         # Wait until all requests are complete.
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 27e2275..97002b2 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -17,7 +17,7 @@
 import os
 import textwrap
 
-from tests.base import AnsibleZuulTestCase
+from tests.base import AnsibleZuulTestCase, ZuulTestCase
 
 
 class TestMultipleTenants(AnsibleZuulTestCase):
@@ -63,7 +63,7 @@
                          "not affect tenant one")
 
 
-class TestInRepoConfig(AnsibleZuulTestCase):
+class TestInRepoConfig(ZuulTestCase):
     # A temporary class to hold new tests while others are disabled
 
     tenant_config_file = 'config/in-repo/main.yaml'
@@ -115,6 +115,8 @@
         self.assertHistory([
             dict(name='project-test2', result='SUCCESS', changes='1,1')])
 
+        self.fake_gerrit.addEvent(A.getChangeMergedEvent())
+
         # Now that the config change is landed, it should be live for
         # subsequent changes.
         B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
@@ -127,6 +129,62 @@
             dict(name='project-test2', result='SUCCESS', changes='1,1'),
             dict(name='project-test2', result='SUCCESS', changes='2,1')])
 
+    def test_in_repo_branch(self):
+        in_repo_conf = textwrap.dedent(
+            """
+            - job:
+                name: project-test2
+
+            - project:
+                name: org/project
+                tenant-one-gate:
+                  jobs:
+                    - project-test2
+            """)
+
+        in_repo_playbook = textwrap.dedent(
+            """
+            - hosts: all
+              tasks: []
+            """)
+
+        file_dict = {'.zuul.yaml': in_repo_conf,
+                     'playbooks/project-test2.yaml': in_repo_playbook}
+        self.create_branch('org/project', 'stable')
+        A = self.fake_gerrit.addFakeChange('org/project', 'stable', 'A',
+                                           files=file_dict)
+        A.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+        self.waitUntilSettled()
+        self.assertEqual(A.data['status'], 'MERGED')
+        self.assertEqual(A.reported, 2,
+                         "A should report start and success")
+        self.assertIn('tenant-one-gate', A.messages[1],
+                      "A should transit tenant-one gate")
+        self.assertHistory([
+            dict(name='project-test2', result='SUCCESS', changes='1,1')])
+        self.fake_gerrit.addEvent(A.getChangeMergedEvent())
+
+        # The config change should not affect master.
+        B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+        B.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(B.addApproval('approved', 1))
+        self.waitUntilSettled()
+        self.assertHistory([
+            dict(name='project-test2', result='SUCCESS', changes='1,1'),
+            dict(name='project-test1', result='SUCCESS', changes='2,1')])
+
+        # The config change should be live for further changes on
+        # stable.
+        C = self.fake_gerrit.addFakeChange('org/project', 'stable', 'C')
+        C.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(C.addApproval('approved', 1))
+        self.waitUntilSettled()
+        self.assertHistory([
+            dict(name='project-test2', result='SUCCESS', changes='1,1'),
+            dict(name='project-test1', result='SUCCESS', changes='2,1'),
+            dict(name='project-test2', result='SUCCESS', changes='3,1')])
+
 
 class TestAnsible(AnsibleZuulTestCase):
     # A temporary class to hold new tests while others are disabled
diff --git a/zuul/ansible/action/add_host.py b/zuul/ansible/action/add_host.py
index e41e4e1..d4b24aa 100644
--- a/zuul/ansible/action/add_host.py
+++ b/zuul/ansible/action/add_host.py
@@ -13,7 +13,8 @@
 # You should have received a copy of the GNU General Public License
 # along with this software.  If not, see <http://www.gnu.org/licenses/>.
 
-from zuul.ansible.plugins.action import add_host
+from zuul.ansible import paths
+add_host = paths._import_ansible_action_plugin("add_host")
 
 
 class ActionModule(add_host.ActionModule):
diff --git a/zuul/ansible/action/assemble.py b/zuul/ansible/action/assemble.py
index d0bff37..2cc7eb7 100644
--- a/zuul/ansible/action/assemble.py
+++ b/zuul/ansible/action/assemble.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import assemble
+assemble = paths._import_ansible_action_plugin("assemble")
 
 
 class ActionModule(assemble.ActionModule):
diff --git a/zuul/ansible/action/copy.py b/zuul/ansible/action/copy.py
index 5dc9fa8..bb54430 100644
--- a/zuul/ansible/action/copy.py
+++ b/zuul/ansible/action/copy.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import copy
+copy = paths._import_ansible_action_plugin("copy")
 
 
 class ActionModule(copy.ActionModule):
diff --git a/zuul/ansible/action/fetch.py b/zuul/ansible/action/fetch.py
index fe06c3b..170b655 100644
--- a/zuul/ansible/action/fetch.py
+++ b/zuul/ansible/action/fetch.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import fetch
+fetch = paths._import_ansible_action_plugin("fetch")
 
 
 class ActionModule(fetch.ActionModule):
diff --git a/zuul/ansible/action/include_vars.py b/zuul/ansible/action/include_vars.py
index aa0e7d8..5bc1d76 100644
--- a/zuul/ansible/action/include_vars.py
+++ b/zuul/ansible/action/include_vars.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import include_vars
+include_vars = paths._import_ansible_action_plugin("include_vars")
 
 
 class ActionModule(include_vars.ActionModule):
diff --git a/zuul/ansible/action/network.py b/zuul/ansible/action/network.py
index 31a8739..41fc560 100644
--- a/zuul/ansible/action/network.py
+++ b/zuul/ansible/action/network.py
@@ -14,7 +14,8 @@
 # along with this software.  If not, see <http://www.gnu.org/licenses/>.
 
 
-from zuul.ansible.plugins.action import network
+from zuul.ansible import paths
+network = paths._import_ansible_action_plugin("network")
 
 
 class ActionModule(network.ActionModule):
diff --git a/zuul/ansible/action/normal.py b/zuul/ansible/action/normal.py
index d4b2396..b18cb51 100644
--- a/zuul/ansible/action/normal.py
+++ b/zuul/ansible/action/normal.py
@@ -13,8 +13,8 @@
 # You should have received a copy of the GNU General Public License
 # along with this software.  If not, see <http://www.gnu.org/licenses/>.
 
-
-from zuul.ansible.plugins.action import normal
+from zuul.ansible import paths
+normal = paths._import_ansible_action_plugin('normal')
 
 
 class ActionModule(normal.ActionModule):
diff --git a/zuul/ansible/action/patch.py b/zuul/ansible/action/patch.py
index d630844..0b43c82 100644
--- a/zuul/ansible/action/patch.py
+++ b/zuul/ansible/action/patch.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import patch
+patch = paths._import_ansible_action_plugin("patch")
 
 
 class ActionModule(patch.ActionModule):
diff --git a/zuul/ansible/action/script.py b/zuul/ansible/action/script.py
index bd3d5d5..c95d357 100644
--- a/zuul/ansible/action/script.py
+++ b/zuul/ansible/action/script.py
@@ -15,10 +15,10 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import copy
+script = paths._import_ansible_action_plugin("script")
 
 
-class ActionModule(copy.ActionModule):
+class ActionModule(script.ActionModule):
 
     def run(self, tmp=None, task_vars=None):
 
diff --git a/zuul/ansible/action/synchronize.py b/zuul/ansible/action/synchronize.py
index cbb7ea2..0193eca 100644
--- a/zuul/ansible/action/synchronize.py
+++ b/zuul/ansible/action/synchronize.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import synchronize
+synchronize = paths._import_ansible_action_plugin("synchronize")
 
 
 class ActionModule(synchronize.ActionModule):
diff --git a/zuul/ansible/action/template.py b/zuul/ansible/action/template.py
index 96471ae..c6df3d8 100644
--- a/zuul/ansible/action/template.py
+++ b/zuul/ansible/action/template.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import template
+template = paths._import_ansible_action_plugin("template")
 
 
 class ActionModule(template.ActionModule):
diff --git a/zuul/ansible/action/unarchive.py b/zuul/ansible/action/unarchive.py
index c3f6e91..c78c331 100644
--- a/zuul/ansible/action/unarchive.py
+++ b/zuul/ansible/action/unarchive.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import unarchive
+unarchive = paths._import_ansible_action_plugin("unarchive")
 
 
 class ActionModule(unarchive.ActionModule):
diff --git a/zuul/ansible/action/win_copy.py b/zuul/ansible/action/win_copy.py
index eef3a1c..2751585 100644
--- a/zuul/ansible/action/win_copy.py
+++ b/zuul/ansible/action/win_copy.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import win_copy
+win_copy = paths._import_ansible_action_plugin("win_copy")
 
 
 class ActionModule(win_copy.ActionModule):
diff --git a/zuul/ansible/action/win_template.py b/zuul/ansible/action/win_template.py
index 2a47216..7a357f9 100644
--- a/zuul/ansible/action/win_template.py
+++ b/zuul/ansible/action/win_template.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import win_template
+win_template = paths._import_ansible_action_plugin("win_template")
 
 
 class ActionModule(win_template.ActionModule):
diff --git a/zuul/ansible/callback/__init__.py b/zuul/ansible/callback/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/zuul/ansible/callback/__init__.py
diff --git a/zuul/ansible/callback/zuul_stream.py b/zuul/ansible/callback/zuul_stream.py
new file mode 100644
index 0000000..9b8bccd
--- /dev/null
+++ b/zuul/ansible/callback/zuul_stream.py
@@ -0,0 +1,96 @@
+# Copyright 2017 Red Hat, Inc.
+#
+# Zuul is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Zuul is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import multiprocessing
+import socket
+import time
+
+from ansible.plugins.callback import default
+
+LOG_STREAM_PORT = 19885
+
+
+def linesplit(socket):
+    buff = socket.recv(4096)
+    buffering = True
+    while buffering:
+        if "\n" in buff:
+            (line, buff) = buff.split("\n", 1)
+            yield line + "\n"
+        else:
+            more = socket.recv(4096)
+            if not more:
+                buffering = False
+            else:
+                buff += more
+    if buff:
+        yield buff
+
+
+class CallbackModule(default.CallbackModule):
+
+    '''
+    This is the Zuul streaming callback. It's based on the default
+    callback plugin, but streams results from shell commands.
+    '''
+
+    CALLBACK_VERSION = 2.0
+    CALLBACK_TYPE = 'stdout'
+    CALLBACK_NAME = 'zuul_stream'
+
+    def __init__(self):
+
+        super(CallbackModule, self).__init__()
+        self._task = None
+        self._daemon_running = False
+        self._daemon_stamp = 'daemon-stamp-%s'
+        self._host_dict = {}
+
+    def _read_log(self, host, ip):
+        self._display.display("[%s] starting to log" % host)
+        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        while True:
+            try:
+                s.connect((ip, LOG_STREAM_PORT))
+            except Exception:
+                self._display.display("[%s] Waiting on logger" % host)
+                time.sleep(0.1)
+                continue
+            for line in linesplit(s):
+                self._display.display("[%s] %s " % (host, line.strip()))
+
+    def v2_playbook_on_play_start(self, play):
+        self._play = play
+        super(CallbackModule, self).v2_playbook_on_play_start(play)
+
+    def v2_playbook_on_task_start(self, task, is_conditional):
+        self._task = task
+
+        if self._play.strategy != 'free':
+            self._print_task_banner(task)
+        if task.action == 'command':
+            play_vars = self._play._variable_manager._hostvars
+            for host in self._play.hosts:
+                ip = play_vars[host]['ansible_host']
+                daemon_stamp = self._daemon_stamp % host
+                if not os.path.exists(daemon_stamp):
+                    self._host_dict[host] = ip
+                    # Touch stamp file
+                    open(daemon_stamp, 'w').close()
+                    p = multiprocessing.Process(
+                        target=self._read_log, args=(host, ip))
+                    p.daemon = True
+                    p.start()
diff --git a/zuul/ansible/library/command.py b/zuul/ansible/library/command.py
index 6390322..328ae7b 100644
--- a/zuul/ansible/library/command.py
+++ b/zuul/ansible/library/command.py
@@ -121,12 +121,13 @@
 from ast import literal_eval
 
 
+LOG_STREAM_FILE = '/tmp/console.log'
 PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
 
 
 class Console(object):
     def __enter__(self):
-        self.logfile = open('/tmp/console.html', 'a', 0)
+        self.logfile = open(LOG_STREAM_FILE, 'a', 0)
         return self
 
     def __exit__(self, etype, value, tb):
diff --git a/zuul/ansible/library/zuul_console.py b/zuul/ansible/library/zuul_console.py
index e70dac8..5d1333f 100644
--- a/zuul/ansible/library/zuul_console.py
+++ b/zuul/ansible/library/zuul_console.py
@@ -20,6 +20,9 @@
 import socket
 import threading
 
+LOG_STREAM_FILE = '/tmp/console.log'
+LOG_STREAM_PORT = 19885
+
 
 def daemonize():
     # A really basic daemonize method that should work well enough for
@@ -155,15 +158,15 @@
 
 
 def test():
-    s = Server('/tmp/console.html', 19885)
+    s = Server(LOG_STREAM_PATH, LOG_STREAM_PORT)
     s.run()
 
 
 def main():
     module = AnsibleModule(
         argument_spec=dict(
-            path=dict(default='/tmp/console.html'),
-            port=dict(default=19885, type='int'),
+            path=dict(default=LOG_STREAM_PATH),
+            port=dict(default=LOG_STREAM_PORT, type='int'),
         )
     )
 
diff --git a/zuul/ansible/library/zuul_log.py b/zuul/ansible/library/zuul_log.py
deleted file mode 100644
index 4b377d9..0000000
--- a/zuul/ansible/library/zuul_log.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (c) 2016 IBM Corp.
-# Copyright (c) 2016 Red Hat
-#
-# This module is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This software is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this software.  If not, see <http://www.gnu.org/licenses/>.
-
-import datetime
-
-
-class Console(object):
-    def __enter__(self):
-        self.logfile = open('/tmp/console.html', 'a', 0)
-        return self
-
-    def __exit__(self, etype, value, tb):
-        self.logfile.close()
-
-    def addLine(self, ln):
-        ts = datetime.datetime.now()
-        outln = '%s | %s' % (str(ts), ln)
-        self.logfile.write(outln)
-
-
-def log(msg):
-    if not isinstance(msg, list):
-        msg = [msg]
-    with Console() as console:
-        for line in msg:
-            console.addLine("[Zuul] %s\n" % line)
-
-
-def main():
-    module = AnsibleModule(
-        argument_spec=dict(
-            msg=dict(required=True, type='raw'),
-        )
-    )
-
-    p = module.params
-    log(p['msg'])
-    module.exit_json(changed=True)
-
-from ansible.module_utils.basic import *  # noqa
-
-if __name__ == '__main__':
-    main()
diff --git a/zuul/ansible/paths.py b/zuul/ansible/paths.py
index 2bd0181..e387732 100644
--- a/zuul/ansible/paths.py
+++ b/zuul/ansible/paths.py
@@ -13,14 +13,15 @@
 # You should have received a copy of the GNU General Public License
 # along with this software.  If not, see <http://www.gnu.org/licenses/>.
 
+import imp
 import os
 
+import ansible.plugins.action
+
 
 def _is_safe_path(path):
-    if os.path.isabs(path):
-        return False
-    if not os.path.abspath(os.path.expanduser(path)).startswith(
-            os.path.abspath(os.path.curdir)):
+    full_path = os.path.realpath(os.path.abspath(os.path.expanduser(path)))
+    if not full_path.startswith(os.path.abspath(os.path.curdir)):
         return False
     return True
 
@@ -29,5 +30,24 @@
     return dict(
         failed=True,
         path=path,
-        msg="{prefix} outside the working dir is prohibited".format(
-            prefix=prefix))
+        msg="{prefix} outside the working dir {curdir} is prohibited".format(
+            prefix=prefix,
+            curdir=os.path.abspath(os.path.curdir)))
+
+
+def _import_ansible_action_plugin(name):
+    # Ansible forces the import of our action plugins
+    # (zuul.ansible.action.foo) as ansible.plugins.action.foo, which
+    # is the import path of the ansible implementation.  Our
+    # implementations need to subclass that, but if we try to import
+    # it with that name, we will get our own module.  This bypasses
+    # Python's module namespace to load the actual ansible modules.
+    # We need to give it a name, however.  If we load it with its
+    # actual name, we will end up overwriting our module in Python's
+    # namespace, causing infinite recursion.  So we supply an
+    # otherwise unused name for the module:
+    # zuul.ansible.protected.action.foo.
+
+    return imp.load_module(
+        'zuul.ansible.protected.action.' + name,
+        *imp.find_module(name, ansible.plugins.action.__path__))
diff --git a/zuul/cmd/scheduler.py b/zuul/cmd/scheduler.py
index e5497dc..9a8b24f 100755
--- a/zuul/cmd/scheduler.py
+++ b/zuul/cmd/scheduler.py
@@ -44,15 +44,11 @@
         parser = argparse.ArgumentParser(description='Project gating system.')
         parser.add_argument('-c', dest='config',
                             help='specify the config file')
-        parser.add_argument('-l', dest='layout',
-                            help='specify the layout file')
         parser.add_argument('-d', dest='nodaemon', action='store_true',
                             help='do not run as a daemon')
-        parser.add_argument('-t', dest='validate', nargs='?', const=True,
-                            metavar='JOB_LIST',
-                            help='validate layout file syntax (optionally '
-                            'providing the path to a file with a list of '
-                            'available job names)')
+        parser.add_argument('-t', dest='validate', action='store_true',
+                            help='validate config file syntax (Does not'
+                            'validate config repo validity)')
         parser.add_argument('--version', dest='version', action='version',
                             version=self._get_version(),
                             help='show zuul version')
@@ -79,38 +75,19 @@
         self.stop_gear_server()
         os._exit(0)
 
-    def test_config(self, job_list_path):
+    def test_config(self):
         # See comment at top of file about zuul imports
         import zuul.scheduler
-        import zuul.launcher.gearman
-        import zuul.trigger.gerrit
+        import zuul.launcher.client
 
         logging.basicConfig(level=logging.DEBUG)
-        self.sched = zuul.scheduler.Scheduler(self.config,
-                                              testonly=True)
-        self.configure_connections()
-        self.sched.registerConnections(self.connections, load=False)
-        layout = self.sched.testConfig(self.config.get('zuul',
-                                                       'layout_config'),
-                                       self.connections)
-        if not job_list_path:
-            return False
-
-        failure = False
-        path = os.path.expanduser(job_list_path)
-        if not os.path.exists(path):
-            raise Exception("Unable to find job list: %s" % path)
-        jobs = set()
-        jobs.add('noop')
-        for line in open(path):
-            v = line.strip()
-            if v:
-                jobs.add(v)
-        for job in sorted(layout.jobs):
-            if job not in jobs:
-                print("FAILURE: Job %s not defined" % job)
-                failure = True
-        return failure
+        try:
+            self.sched = zuul.scheduler.Scheduler(self.config,
+                                                  testonly=True)
+        except Exception as e:
+            self.log.error("%s" % e)
+            return -1
+        return 0
 
     def start_gear_server(self):
         pipe_read, pipe_write = os.pipe()
@@ -153,6 +130,7 @@
         import zuul.lib.swift
         import zuul.webapp
         import zuul.rpclistener
+        import zuul.zk
 
         signal.signal(signal.SIGUSR2, zuul.cmd.stack_dump_handler)
         if (self.config.has_option('gearman_server', 'start') and
@@ -171,6 +149,14 @@
         merger = zuul.merger.client.MergeClient(self.config, self.sched)
         nodepool = zuul.nodepool.Nodepool(self.sched)
 
+        zookeeper = zuul.zk.ZooKeeper()
+        if self.config.has_option('zuul', 'zookeeper_hosts'):
+            zookeeper_hosts = self.config.get('zuul', 'zookeeper_hosts')
+        else:
+            zookeeper_hosts = '127.0.0.1:2181'
+
+        zookeeper.connect(zookeeper_hosts)
+
         if self.config.has_option('zuul', 'status_expiry'):
             cache_expiry = self.config.getint('zuul', 'status_expiry')
         else:
@@ -195,12 +181,19 @@
         self.sched.setLauncher(gearman)
         self.sched.setMerger(merger)
         self.sched.setNodepool(nodepool)
+        self.sched.setZooKeeper(zookeeper)
 
         self.log.info('Starting scheduler')
-        self.sched.start()
-        self.sched.registerConnections(self.connections)
-        self.sched.reconfigure(self.config)
-        self.sched.resume()
+        try:
+            self.sched.start()
+            self.sched.registerConnections(self.connections)
+            self.sched.reconfigure(self.config)
+            self.sched.resume()
+        except Exception:
+            self.log.exception("Error starting Zuul:")
+            # TODO(jeblair): If we had all threads marked as daemon,
+            # we might be able to have a nicer way of exiting here.
+            sys.exit(1)
         self.log.info('Starting Webapp')
         webapp.start()
         self.log.info('Starting RPC')
@@ -223,19 +216,13 @@
 
     scheduler.read_config()
 
-    if scheduler.args.layout:
-        scheduler.config.set('zuul', 'layout_config', scheduler.args.layout)
-
     if scheduler.args.validate:
-        path = scheduler.args.validate
-        if path is True:
-            path = None
-        sys.exit(scheduler.test_config(path))
+        sys.exit(scheduler.test_config())
 
     if scheduler.config.has_option('zuul', 'pidfile'):
         pid_fn = os.path.expanduser(scheduler.config.get('zuul', 'pidfile'))
     else:
-        pid_fn = '/var/run/zuul/zuul.pid'
+        pid_fn = '/var/run/zuul-scheduler/zuul-scheduler.pid'
     pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
 
     if scheduler.args.nodaemon:
diff --git a/zuul/configloader.py b/zuul/configloader.py
index 6cea19d..e4fa620 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -202,7 +202,7 @@
 
         # If the definition for this job came from a project repo,
         # implicitly apply a branch matcher for the branch it was on.
-        if (not job.source_context.secure):
+        if (not job.source_context.trusted):
             branches = [job.source_context.branch]
         elif 'branches' in conf:
             branches = as_list(conf['branches'])
@@ -233,12 +233,12 @@
         # TODOv3(jeblair): this limits roles to the same
         # source; we should remove that limitation.
         source = job.source_context.project.connection_name
-        (secure, project) = tenant.getRepo(source, role['zuul'])
+        (trusted, project) = tenant.getRepo(source, role['zuul'])
         if project is None:
             return None
 
         return model.ZuulRole(role.get('name', name), source,
-                              project.name, secure)
+                              project.name, trusted)
 
 
 class ProjectTemplateParser(object):
@@ -327,25 +327,29 @@
         for p in layout.pipelines.values():
             project[p.name] = {'queue': str,
                                'jobs': [vs.Any(str, dict)]}
-        return vs.Schema(project)
+        return vs.Schema([project])
 
     @staticmethod
-    def fromYaml(tenant, layout, conf):
-        # TODOv3(jeblair): This may need some branch-specific
-        # configuration for in-repo configs.
-        ProjectParser.getSchema(layout)(conf)
-        # Make a copy since we modify this later via pop
-        conf = copy.deepcopy(conf)
-        conf_templates = conf.pop('templates', [])
-        # The way we construct a project definition is by parsing the
-        # definition as a template, then applying all of the
-        # templates, including the newly parsed one, in order.
-        project_template = ProjectTemplateParser.fromYaml(tenant, layout, conf)
-        configs = [layout.project_templates[name] for name in conf_templates]
-        configs.append(project_template)
-        project = model.ProjectConfig(conf['name'])
-        mode = conf.get('merge-mode', 'merge-resolve')
+    def fromYaml(tenant, layout, conf_list):
+        ProjectParser.getSchema(layout)(conf_list)
+        project = model.ProjectConfig(conf_list[0]['name'])
+        mode = conf_list[0].get('merge-mode', 'merge-resolve')
         project.merge_mode = model.MERGER_MAP[mode]
+
+        # TODOv3(jeblair): deal with merge mode setting on multi branches
+        configs = []
+        for conf in conf_list:
+            # Make a copy since we modify this later via pop
+            conf = copy.deepcopy(conf)
+            conf_templates = conf.pop('templates', [])
+            # The way we construct a project definition is by parsing the
+            # definition as a template, then applying all of the
+            # templates, including the newly parsed one, in order.
+            project_template = ProjectTemplateParser.fromYaml(
+                tenant, layout, conf)
+            configs.extend([layout.project_templates[name]
+                            for name in conf_templates])
+            configs.append(project_template)
         for pipeline in layout.pipelines.values():
             project_pipeline = model.ProjectPipelineConfig()
             project_pipeline.job_tree = model.JobTree(None)
@@ -663,6 +667,8 @@
             # branch selector to each job there.  This makes the
             # in-repo configuration apply only to that branch.
             for branch in source.getProjectBranches(project):
+                project.unparsed_branch_config[branch] = \
+                    model.UnparsedTenantConfig()
                 job = merger.getFiles(project.name, url, branch,
                                       files=['.zuul.yaml'])
                 job.source_context = model.SourceContext(project,
@@ -681,7 +687,9 @@
                     TenantParser.log.info(
                         "Loading configuration from %s/%s" %
                         (job.source_context, fn))
-                    if job.source_context.secure:
+                    project = job.source_context.project
+                    branch = job.source_context.branch
+                    if job.source_context.trusted:
                         incdata = TenantParser._parseConfigRepoLayout(
                             job.files[fn], job.source_context)
                         config_repos_config.extend(incdata)
@@ -689,7 +697,9 @@
                         incdata = TenantParser._parseProjectRepoLayout(
                             job.files[fn], job.source_context)
                         project_repos_config.extend(incdata)
-                    job.source_context.project.unparsed_config.extend(incdata)
+                    project.unparsed_config.extend(incdata)
+                    if branch in project.unparsed_branch_config:
+                        project.unparsed_branch_config[branch].extend(incdata)
         return config_repos_config, project_repos_config
 
     @staticmethod
@@ -727,7 +737,7 @@
             layout.addProjectTemplate(ProjectTemplateParser.fromYaml(
                 tenant, layout, config_template))
 
-        for config_project in data.projects:
+        for config_project in data.projects.values():
             layout.addProjectConfig(ProjectParser.fromYaml(
                 tenant, layout, config_project))
 
@@ -784,19 +794,18 @@
     def createDynamicLayout(self, tenant, files):
         config = tenant.config_repos_config.copy()
         for source, project in tenant.project_repos:
-            # TODOv3(jeblair): config should be branch specific
             for branch in source.getProjectBranches(project):
                 data = files.getFile(project.name, branch, '.zuul.yaml')
-                if not data:
-                    data = project.unparsed_config
-                if not data:
+                if data:
+                    source_context = model.SourceContext(project,
+                                                         branch, False)
+                    incdata = TenantParser._parseProjectRepoLayout(
+                        data, source_context)
+                else:
+                    incdata = project.unparsed_branch_config[branch]
+                if not incdata:
                     continue
-                source_context = model.SourceContext(project,
-                                                     branch, False)
-                incdata = TenantParser._parseProjectRepoLayout(
-                    data, source_context)
                 config.extend(incdata)
-
         layout = model.Layout()
         # TODOv3(jeblair): copying the pipelines could be dangerous/confusing.
         layout.pipelines = tenant.layout.pipelines
@@ -808,8 +817,7 @@
             layout.addProjectTemplate(ProjectTemplateParser.fromYaml(
                 tenant, layout, config_template))
 
-        for config_project in config.projects:
+        for config_project in config.projects.values():
             layout.addProjectConfig(ProjectParser.fromYaml(
                 tenant, layout, config_project), update_pipeline=False)
-
         return layout
diff --git a/zuul/launcher/client.py b/zuul/launcher/client.py
index 23bec90..d9e7d21 100644
--- a/zuul/launcher/client.py
+++ b/zuul/launcher/client.py
@@ -338,7 +338,9 @@
 
         nodes = []
         for node in item.current_build_set.getJobNodeSet(job.name).getNodes():
-            nodes.append(dict(name=node.name, image=node.image))
+            nodes.append(dict(name=node.name, image=node.image,
+                              public_ipv6=node.public_ipv6,
+                              public_ipv4=node.public_ipv4))
         params['nodes'] = nodes
         params['zuul'] = zuul_params
         projects = set()
diff --git a/zuul/launcher/server.py b/zuul/launcher/server.py
index 1ba42da..2827d83 100644
--- a/zuul/launcher/server.py
+++ b/zuul/launcher/server.py
@@ -27,9 +27,11 @@
 import yaml
 
 import gear
+import git
 
 import zuul.merger.merger
 import zuul.ansible.action
+import zuul.ansible.callback
 import zuul.ansible.library
 from zuul.lib import commandsocket
 
@@ -74,20 +76,31 @@
 class JobDirPlaybook(object):
     def __init__(self, root):
         self.root = root
-        self.secure = None
+        self.trusted = None
         self.path = None
 
 
 class JobDir(object):
     def __init__(self, root=None, keep=False):
+        # root
+        #   ansible
+        #     trusted.cfg
+        #     untrusted.cfg
+        #   work
+        #     git
+        #     logs
         self.keep = keep
         self.root = tempfile.mkdtemp(dir=root)
-        self.git_root = os.path.join(self.root, 'git')
+        # Work
+        self.work_root = os.path.join(self.root, 'work')
+        os.makedirs(self.work_root)
+        self.git_root = os.path.join(self.work_root, 'git')
         os.makedirs(self.git_root)
+        self.log_root = os.path.join(self.work_root, 'logs')
+        os.makedirs(self.log_root)
+        # Ansible
         self.ansible_root = os.path.join(self.root, 'ansible')
         os.makedirs(self.ansible_root)
-        self.secure_ansible_root = os.path.join(self.ansible_root, 'secure')
-        os.makedirs(self.secure_ansible_root)
         self.known_hosts = os.path.join(self.ansible_root, 'known_hosts')
         self.inventory = os.path.join(self.ansible_root, 'inventory')
         self.vars = os.path.join(self.ansible_root, 'vars.yaml')
@@ -97,10 +110,10 @@
         self.post_playbooks = []
         self.roles = []
         self.roles_path = []
-        self.config = os.path.join(self.ansible_root, 'ansible.cfg')
-        self.secure_config = os.path.join(
-            self.secure_ansible_root, 'ansible.cfg')
-        self.ansible_log = os.path.join(self.ansible_root, 'ansible_log.txt')
+        self.untrusted_config = os.path.join(
+            self.ansible_root, 'untrusted.cfg')
+        self.trusted_config = os.path.join(self.ansible_root, 'trusted.cfg')
+        self.ansible_log = os.path.join(self.log_root, 'ansible_log.txt')
 
     def addPrePlaybook(self):
         count = len(self.pre_playbooks)
@@ -238,6 +251,10 @@
             self.merge_name = None
 
         self.connections = connections
+        # This merger and its git repos are used to maintain
+        # up-to-date copies of all the repos that are used by jobs, as
+        # well as to support the merger:cat functon to supply
+        # configuration information to Zuul when it starts.
         self.merger = self._getMerger(self.merge_root)
         self.update_queue = DeduplicateQueue()
 
@@ -256,15 +273,25 @@
         if not os.path.exists(self.action_dir):
             os.makedirs(self.action_dir)
 
+        self.callback_dir = os.path.join(ansible_dir, 'callback')
+        if not os.path.exists(self.callback_dir):
+            os.makedirs(self.callback_dir)
+
         library_path = os.path.dirname(os.path.abspath(
             zuul.ansible.library.__file__))
         for fn in os.listdir(library_path):
             shutil.copy(os.path.join(library_path, fn), self.library_dir)
+
         action_path = os.path.dirname(os.path.abspath(
             zuul.ansible.action.__file__))
         for fn in os.listdir(action_path):
             shutil.copy(os.path.join(action_path, fn), self.action_dir)
 
+        callback_path = os.path.dirname(os.path.abspath(
+            zuul.ansible.callback.__file__))
+        for fn in os.listdir(callback_path):
+            shutil.copy(os.path.join(callback_path, fn), self.callback_dir)
+
         self.job_workers = {}
 
     def _getMerger(self, root):
@@ -356,7 +383,7 @@
                 self.log.exception("Exception in update thread:")
 
     def _innerUpdateLoop(self):
-        # Inside of a loop that keeps the main repository up to date
+        # Inside of a loop that keeps the main repositories up to date
         task = self.update_queue.get()
         if task is None:
             # We are asked to stop
@@ -368,6 +395,7 @@
         task.setComplete()
 
     def update(self, project, url):
+        # Update a repository in the main merger
         task = UpdateTask(project, url)
         task = self.update_queue.put(task)
         return task
@@ -484,7 +512,8 @@
 
     def launch(self):
         try:
-            self.jobdir = JobDir(root=self.launcher_server.jobdir_root)
+            self.jobdir = JobDir(root=self.launcher_server.jobdir_root,
+                                 keep=self.launcher_server.keep_jobdir)
             self._launch()
         except Exception:
             self.log.exception("Exception while launching job")
@@ -517,6 +546,16 @@
             task.wait()
 
         self.log.debug("Job %s: git updates complete" % (self.job.unique,))
+        for project in args['projects']:
+            self.log.debug("Cloning %s" % (project['name'],))
+            repo = git.Repo.clone_from(
+                os.path.join(self.launcher_server.merge_root,
+                             project['name']),
+                os.path.join(self.jobdir.git_root,
+                             project['name']))
+            repo.remotes.origin.config_writer.set('url', project['url'])
+
+        # Get a merger in order to update the repos involved in this job.
         merger = self.launcher_server._getMerger(self.jobdir.git_root)
         merge_items = [i for i in args['items'] if i.get('refspec')]
         if merge_items:
@@ -594,13 +633,14 @@
         return result
 
     def getHostList(self, args):
-        # TODOv3: the localhost addition is temporary so we have
-        # something to exercise ansible.
-        hosts = [('localhost', dict(ansible_connection='local'))]
+        # TODO(clarkb): This prefers v4 because we're not sure if we
+        # expect v6 to work.  If we can determine how to prefer v6
+        hosts = []
         for node in args['nodes']:
-            # TODOv3: the connection should almost certainly not be
-            # local.
-            hosts.append((node['name'], dict(ansible_connection='local')))
+            ip = node.get('public_ipv4')
+            if not ip:
+                ip = node.get('public_ipv6')
+            hosts.append((node['name'], dict(ansible_host=ip)))
         return hosts
 
     def _blockPluginDirs(self, path):
@@ -615,13 +655,13 @@
             if os.path.isdir(entry) and entry.endswith('_plugins'):
                 raise Exception(
                     "Ansible plugin dir %s found adjacent to playbook %s in"
-                    " non-secure repo." % (entry, path))
+                    " non-trusted repo." % (entry, path))
 
-    def findPlaybook(self, path, required=False, secure=False):
+    def findPlaybook(self, path, required=False, trusted=False):
         for ext in ['.yaml', '.yml']:
             fn = path + ext
             if os.path.exists(fn):
-                if not secure:
+                if not trusted:
                     playbook_dir = os.path.dirname(os.path.abspath(fn))
                     self._blockPluginDirs(playbook_dir)
                 return fn
@@ -654,13 +694,13 @@
         self.log.debug("Prepare playbook repo for %s" % (playbook,))
         # Check out the playbook repo if needed and set the path to
         # the playbook that should be run.
-        jobdir_playbook.secure = playbook['secure']
+        jobdir_playbook.trusted = playbook['trusted']
         source = self.launcher_server.connections.getSource(
             playbook['connection'])
         project = source.getProject(playbook['project'])
         # TODO(jeblair): construct the url in the merger itself
         url = source.getGitUrl(project)
-        if not playbook['secure']:
+        if not playbook['trusted']:
             # This is a project repo, so it is safe to use the already
             # checked out version (from speculative merging) of the
             # playbook
@@ -674,7 +714,7 @@
                     jobdir_playbook.path = self.findPlaybook(
                         path,
                         required=required,
-                        secure=playbook['secure'])
+                        trusted=playbook['trusted'])
                     return
         # The playbook repo is either a config repo, or it isn't in
         # the stack of changes we are testing, so check out the branch
@@ -689,7 +729,7 @@
         jobdir_playbook.path = self.findPlaybook(
             path,
             required=required,
-            secure=playbook['secure'])
+            trusted=playbook['trusted'])
 
     def prepareRoles(self, args):
         for role in args['roles']:
@@ -697,23 +737,23 @@
                 root = self.jobdir.addRole()
                 self.prepareZuulRole(args, role, root)
 
-    def findRole(self, path, secure=False):
+    def findRole(self, path, trusted=False):
         d = os.path.join(path, 'tasks')
         if os.path.isdir(d):
             # This is a bare role
-            if not secure:
+            if not trusted:
                 self._blockPluginDirs(path)
             # None signifies that the repo is a bare role
             return None
         d = os.path.join(path, 'roles')
         if os.path.isdir(d):
             # This repo has a collection of roles
-            if not secure:
+            if not trusted:
                 for entry in os.listdir(d):
                     self._blockPluginDirs(os.path.join(d, entry))
             return d
         # We assume the repository itself is a collection of roles
-        if not secure:
+        if not trusted:
             for entry in os.listdir(path):
                 self._blockPluginDirs(os.path.join(path, entry))
         return path
@@ -727,7 +767,7 @@
         # TODO(jeblair): construct the url in the merger itself
         url = source.getGitUrl(project)
         role_repo = None
-        if not role['secure']:
+        if not role['trusted']:
             # This is a project repo, so it is safe to use the already
             # checked out version (from speculative merging) of the
             # role
@@ -754,7 +794,7 @@
             merger.checkoutBranch(project.name, url, 'master')
             role_repo = os.path.join(root, project.name)
 
-        role_path = self.findRole(role_repo, secure=role['secure'])
+        role_path = self.findRole(role_repo, trusted=role['trusted'])
         if role_path is None:
             # In the case of a bare role, add the containing directory
             role_path = root
@@ -768,14 +808,21 @@
                 for k, v in host_vars.items():
                     inventory.write('%s=%s' % (k, v))
                 inventory.write('\n')
+                if 'ansible_host' in host_vars:
+                    os.system("ssh-keyscan %s >> %s" % (
+                        host_vars['ansible_host'],
+                        self.jobdir.known_hosts))
+
         with open(self.jobdir.vars, 'w') as vars_yaml:
             zuul_vars = dict(zuul=args['zuul'])
+            zuul_vars['zuul']['launcher'] = dict(git_root=self.jobdir.git_root,
+                                                 log_root=self.jobdir.log_root)
             vars_yaml.write(
                 yaml.safe_dump(zuul_vars, default_flow_style=False))
-        self.writeAnsibleConfig(self.jobdir.config)
-        self.writeAnsibleConfig(self.jobdir.secure_config, secure=True)
+        self.writeAnsibleConfig(self.jobdir.untrusted_config)
+        self.writeAnsibleConfig(self.jobdir.trusted_config, trusted=True)
 
-    def writeAnsibleConfig(self, config_path, secure=False):
+    def writeAnsibleConfig(self, config_path, trusted=False):
         with open(config_path, 'w') as config:
             config.write('[defaults]\n')
             config.write('hostfile = %s\n' % self.jobdir.inventory)
@@ -792,20 +839,23 @@
             if self.jobdir.roles_path:
                 config.write('roles_path = %s\n' %
                              ':'.join(self.jobdir.roles_path))
+            config.write('callback_plugins = %s\n'
+                         % self.launcher_server.callback_dir)
+            config.write('stdout_callback = zuul_stream\n')
             # bump the timeout because busy nodes may take more than
             # 10s to respond
             config.write('timeout = 30\n')
-            if not secure:
+            if not trusted:
                 config.write('action_plugins = %s\n'
                              % self.launcher_server.action_dir)
 
-            # On secure jobs, we want to prevent the printing of args,
-            # since secure jobs might have access to secrets that they may
+            # On trusted jobs, we want to prevent the printing of args,
+            # since trusted jobs might have access to secrets that they may
             # need to pass to a task or a role. On the other hand, there
-            # should be no sensitive data in insecure jobs, and printing
+            # should be no sensitive data in untrusted jobs, and printing
             # the args could be useful for debugging.
             config.write('display_args_to_stdout = %s\n' %
-                         str(not secure))
+                         str(not trusted))
 
             config.write('[ssh_connection]\n')
             # NB: when setting pipelining = True, keep_remote_files
@@ -836,17 +886,16 @@
                 pgid = os.getpgid(self.proc.pid)
                 os.killpg(pgid, signal.SIGKILL)
             except Exception:
-                self.log.exception("Exception while killing "
-                                   "ansible process:")
+                self.log.exception("Exception while killing ansible process:")
 
-    def runAnsible(self, cmd, timeout, secure=False):
+    def runAnsible(self, cmd, timeout, trusted=False):
         env_copy = os.environ.copy()
         env_copy['LOGNAME'] = 'zuul'
 
-        if secure:
-            cwd = self.jobdir.secure_ansible_root
+        if trusted:
+            env_copy['ANSIBLE_CONFIG'] = self.jobdir.trusted_config
         else:
-            cwd = self.jobdir.ansible_root
+            env_copy['ANSIBLE_CONFIG'] = self.jobdir.untrusted_config
 
         with self.proc_lock:
             if self.aborted:
@@ -854,7 +903,7 @@
             self.log.debug("Ansible command: %s" % (cmd,))
             self.proc = subprocess.Popen(
                 cmd,
-                cwd=cwd,
+                cwd=self.jobdir.work_root,
                 stdout=subprocess.PIPE,
                 stderr=subprocess.STDOUT,
                 preexec_fn=os.setsid,
@@ -910,4 +959,4 @@
         timeout = 60
 
         return self.runAnsible(
-            cmd=cmd, timeout=timeout, secure=playbook.secure)
+            cmd=cmd, timeout=timeout, trusted=playbook.trusted)
diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py
index 71d8c19..18cf11b 100644
--- a/zuul/manager/__init__.py
+++ b/zuul/manager/__init__.py
@@ -689,12 +689,6 @@
                 tenant = self.pipeline.layout.tenant
                 zuul_driver.onChangeMerged(tenant, item.change,
                                            self.pipeline.source)
-                if item.change.updatesConfig():
-                    # The change that just landed updates the config.
-                    # Clear out cached data for this project and
-                    # perform a reconfiguration.
-                    item.change.project.unparsed_config = None
-                    self.sched.reconfigureTenant(tenant)
 
     def _reportItem(self, item):
         self.log.debug("Reporting change %s" % item.change)
diff --git a/zuul/merger/merger.py b/zuul/merger/merger.py
index f970b03..658fd64 100644
--- a/zuul/merger/merger.py
+++ b/zuul/merger/merger.py
@@ -214,24 +214,17 @@
         self.working_root = working_root
         if not os.path.exists(working_root):
             os.makedirs(working_root)
-        self._makeSSHWrappers(working_root, connections)
+        self.connections = connections
         self.email = email
         self.username = username
 
-    def _makeSSHWrappers(self, working_root, connections):
-        for connection_name, connection in connections.connections.items():
-            sshkey = connection.connection_config.get('sshkey')
-            if sshkey:
-                self._makeSSHWrapper(sshkey, working_root, connection_name)
-
-    def _makeSSHWrapper(self, key, merge_root, connection_name='default'):
-        wrapper_name = '.ssh_wrapper_%s' % connection_name
-        name = os.path.join(merge_root, wrapper_name)
-        fd = open(name, 'w')
-        fd.write('#!/bin/bash\n')
-        fd.write('ssh -i %s $@\n' % key)
-        fd.close()
-        os.chmod(name, 0o755)
+    def _get_ssh_cmd(self, connection_name):
+        sshkey = self.connections.connections.get(connection_name).\
+            connection_config.get('sshkey')
+        if sshkey:
+            return 'ssh -i %s' % sshkey
+        else:
+            return None
 
     def addProject(self, project, url):
         repo = None
@@ -299,30 +292,26 @@
 
         return commit
 
-    def _setGitSsh(self, connection_name):
-        wrapper_name = '.ssh_wrapper_%s' % connection_name
-        name = os.path.join(self.working_root, wrapper_name)
-        if os.path.isfile(name):
-            os.environ['GIT_SSH'] = name
-        elif 'GIT_SSH' in os.environ:
-            del os.environ['GIT_SSH']
-
     def _mergeItem(self, item, recent):
         self.log.debug("Processing refspec %s for project %s / %s ref %s" %
                        (item['refspec'], item['project'], item['branch'],
                         item['ref']))
-        self._setGitSsh(item['connection_name'])
         repo = self.getRepo(item['project'], item['url'])
         key = (item['project'], item['branch'])
+
         # See if we have a commit for this change already in this repo
         zuul_ref = item['branch'] + '/' + item['ref']
-        commit = repo.getCommitFromRef(zuul_ref)
-        if commit:
-            self.log.debug("Found commit %s for ref %s" % (commit, zuul_ref))
-            # Store this as the most recent commit for this
-            # project-branch
-            recent[key] = commit
-            return commit
+        with repo.createRepoObject().git.custom_environment(
+            GIT_SSH_COMMAND=self._get_ssh_cmd(item['connection_name'])):
+            commit = repo.getCommitFromRef(zuul_ref)
+            if commit:
+                self.log.debug(
+                    "Found commit %s for ref %s" % (commit, zuul_ref))
+                # Store this as the most recent commit for this
+                # project-branch
+                recent[key] = commit
+                return commit
+
         self.log.debug("Unable to find commit for ref %s" % (zuul_ref,))
         # We need to merge the change
         # Get the most recent commit for this project-branch
@@ -340,24 +329,26 @@
         else:
             self.log.debug("Found base commit %s for %s" % (base, key,))
         # Merge the change
-        commit = self._mergeChange(item, base)
-        if not commit:
-            return None
-        # Store this commit as the most recent for this project-branch
-        recent[key] = commit
-        # Set the Zuul ref for this item to point to the most recent
-        # commits of each project-branch
-        for key, mrc in recent.items():
-            project, branch = key
-            try:
-                repo = self.getRepo(project, None)
-                zuul_ref = branch + '/' + item['ref']
-                repo.createZuulRef(zuul_ref, mrc)
-            except Exception:
-                self.log.exception("Unable to set zuul ref %s for "
-                                   "item %s" % (zuul_ref, item))
+        with repo.createRepoObject().git.custom_environment(
+            GIT_SSH_COMMAND=self._get_ssh_cmd(item['connection_name'])):
+            commit = self._mergeChange(item, base)
+            if not commit:
                 return None
-        return commit
+            # Store this commit as the most recent for this project-branch
+            recent[key] = commit
+            # Set the Zuul ref for this item to point to the most recent
+            # commits of each project-branch
+            for key, mrc in recent.items():
+                project, branch = key
+                try:
+                    repo = self.getRepo(project, None)
+                    zuul_ref = branch + '/' + item['ref']
+                    repo.createZuulRef(zuul_ref, mrc)
+                except Exception:
+                    self.log.exception("Unable to set zuul ref %s for "
+                                       "item %s" % (zuul_ref, item))
+                    return None
+            return commit
 
     def mergeChanges(self, items, files=None):
         recent = {}
diff --git a/zuul/model.py b/zuul/model.py
index 86546a2..10d0446 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -365,6 +365,7 @@
         # TODOv3 (jeblair): re-add support for foreign projects if needed
         self.foreign = foreign
         self.unparsed_config = None
+        self.unparsed_branch_config = {}  # branch -> UnparsedTenantConfig
 
     def __str__(self):
         return self.name
@@ -534,21 +535,21 @@
     Jobs and playbooks reference this to keep track of where they
     originate."""
 
-    def __init__(self, project, branch, secure):
+    def __init__(self, project, branch, trusted):
         self.project = project
         self.branch = branch
-        self.secure = secure
+        self.trusted = trusted
 
     def __repr__(self):
-        return '<SourceContext %s:%s secure:%s>' % (self.project,
-                                                    self.branch,
-                                                    self.secure)
+        return '<SourceContext %s:%s trusted:%s>' % (self.project,
+                                                     self.branch,
+                                                     self.trusted)
 
     def __deepcopy__(self, memo):
         return self.copy()
 
     def copy(self):
-        return self.__class__(self.project, self.branch, self.secure)
+        return self.__class__(self.project, self.branch, self.trusted)
 
     def __ne__(self, other):
         return not self.__eq__(other)
@@ -558,7 +559,7 @@
             return False
         return (self.project == other.project and
                 self.branch == other.branch and
-                self.secure == other.secure)
+                self.trusted == other.trusted)
 
 
 class PlaybookContext(object):
@@ -592,7 +593,7 @@
             connection=self.source_context.project.connection_name,
             project=self.source_context.project.name,
             branch=self.source_context.branch,
-            secure=self.source_context.secure,
+            trusted=self.source_context.trusted,
             path=self.path)
 
 
@@ -625,11 +626,11 @@
 class ZuulRole(Role):
     """A reference to an ansible role in a Zuul project."""
 
-    def __init__(self, target_name, connection_name, project_name, secure):
+    def __init__(self, target_name, connection_name, project_name, trusted):
         super(ZuulRole, self).__init__(target_name)
         self.connection_name = connection_name
         self.project_name = project_name
-        self.secure = secure
+        self.trusted = trusted
 
     def __repr__(self):
         return '<ZuulRole %s %s>' % (self.project_name, self.target_name)
@@ -640,7 +641,7 @@
         return (super(ZuulRole, self).__eq__(other) and
                 self.connection_name == other.connection_name,
                 self.project_name == other.project_name,
-                self.secure == other.secure)
+                self.trusted == other.trusted)
 
     def toDict(self):
         # Render to a dict to use in passing json to the launcher
@@ -648,7 +649,7 @@
         d['type'] = 'zuul'
         d['connection'] = self.connection_name
         d['project'] = self.project_name
-        d['secure'] = self.secure
+        d['trusted'] = self.trusted
         return d
 
 
@@ -2022,7 +2023,7 @@
         self.pipelines = []
         self.jobs = []
         self.project_templates = []
-        self.projects = []
+        self.projects = {}
         self.nodesets = []
 
     def copy(self):
@@ -2039,7 +2040,8 @@
             self.pipelines.extend(conf.pipelines)
             self.jobs.extend(conf.jobs)
             self.project_templates.extend(conf.project_templates)
-            self.projects.extend(conf.projects)
+            for k, v in conf.projects.items():
+                self.projects.setdefault(k, []).extend(v)
             self.nodesets.extend(conf.nodesets)
             return
 
@@ -2065,7 +2067,8 @@
             if key in ['project', 'project-template', 'job']:
                 value['_source_context'] = source_context
             if key == 'project':
-                self.projects.append(value)
+                name = value['name']
+                self.projects.setdefault(name, []).append(value)
             elif key == 'job':
                 self.jobs.append(value)
             elif key == 'project-template':
@@ -2226,10 +2229,10 @@
     def getRepo(self, source, project_name):
         """Get a project given a source and project name
 
-        Returns a tuple (secure, project) or (None, None) if the
+        Returns a tuple (trusted, project) or (None, None) if the
         project is not found.
 
-        Secure indicates the project is a config repo.
+        Trusted indicates the project is a config repo.
 
         """
 
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 6fbac9b..2679522 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -293,11 +293,6 @@
         self.stopConnections()
         self.wake_event.set()
 
-    def testConfig(self, config_path, connections):
-        # Take the list of set up connections directly here rather than with
-        # registerConnections as we don't want to do the onLoad event yet.
-        return self._parseConfig(config_path, connections)
-
     def registerConnections(self, connections, load=True):
         # load: whether or not to trigger the onLoad for the connection. This
         # is useful for not doing a full load during layout validation.
@@ -744,6 +739,7 @@
         self.log.debug("Processing trigger event %s" % event)
         try:
             for tenant in self.abide.tenants.values():
+                reconfigured_tenant = False
                 for pipeline in tenant.layout.pipelines.values():
                     # Get the change even if the project is unknown to
                     # us for the use of updating the cache if there is
@@ -757,6 +753,16 @@
                                        "connection trigger)",
                                        e.change, pipeline.source)
                         continue
+                    if (event.type == 'change-merged' and
+                        hasattr(change, 'files') and
+                        not reconfigured_tenant and
+                        change.updatesConfig()):
+                        # The change that just landed updates the config.
+                        # Clear out cached data for this project and
+                        # perform a reconfiguration.
+                        change.project.unparsed_config = None
+                        self.reconfigureTenant(tenant)
+                        reconfigured_tenant = True
                     if event.type == 'patchset-created':
                         pipeline.manager.removeOldVersionsOfChange(change)
                     elif event.type == 'change-abandoned':
@@ -773,7 +779,7 @@
         try:
             if isinstance(event, ReconfigureEvent):
                 self._doReconfigureEvent(event)
-            if isinstance(event, TenantReconfigureEvent):
+            elif isinstance(event, TenantReconfigureEvent):
                 self._doTenantReconfigureEvent(event)
             elif isinstance(event, PromoteEvent):
                 self._doPromoteEvent(event)
diff --git a/zuul/zk.py b/zuul/zk.py
index a456873..2009945 100644
--- a/zuul/zk.py
+++ b/zuul/zk.py
@@ -64,23 +64,6 @@
         self.chroot = chroot or ''
 
 
-def buildZooKeeperHosts(host_list):
-    '''
-    Build the ZK cluster host list for client connections.
-
-    :param list host_list: A list of
-        :py:class:`~nodepool.zk.ZooKeeperConnectionConfig` objects (one
-        per server) defining the ZooKeeper cluster servers.
-    '''
-    if not isinstance(host_list, list):
-        raise Exception("'host_list' must be a list")
-    hosts = []
-    for host_def in host_list:
-        host = '%s:%s%s' % (host_def.host, host_def.port, host_def.chroot)
-        hosts.append(host)
-    return ",".join(hosts)
-
-
 class ZooKeeper(object):
     '''
     Class implementing the ZooKeeper interface.
@@ -158,8 +141,7 @@
 
         '''
         if self.client is None:
-            hosts = buildZooKeeperHosts(host_list)
-            self.client = KazooClient(hosts=hosts, read_only=read_only)
+            self.client = KazooClient(hosts=host_list, read_only=read_only)
             self.client.add_listener(self._connection_listener)
             self.client.start()
 
@@ -184,8 +166,7 @@
             (one per server) defining the ZooKeeper cluster servers.
         '''
         if self.client is not None:
-            hosts = buildZooKeeperHosts(host_list)
-            self.client.set_hosts(hosts=hosts)
+            self.client.set_hosts(hosts=host_list)
 
     def submitNodeRequest(self, node_request, watcher):
         '''