Merge "Re-enable test_timer" into feature/zuulv3
diff --git a/.zuul.yaml b/.zuul.yaml
new file mode 100644
index 0000000..bb9a96d
--- /dev/null
+++ b/.zuul.yaml
@@ -0,0 +1,15 @@
+- job:
+    name: python-linters
+    pre-run: pre
+    post-run: post
+    success-url: http://zuulv3-dev.openstack.org/logs/{build.uuid}/
+    failure-url: http://zuulv3-dev.openstack.org/logs/{build.uuid}/
+    nodes:
+      - name: worker
+        image: ubuntu-xenial
+
+- project:
+    name: openstack-infra/zuul
+    check:
+      jobs:
+        - python-linters
diff --git a/TESTING.rst b/TESTING.rst
index 56f2fbb..f6d948e 100644
--- a/TESTING.rst
+++ b/TESTING.rst
@@ -17,6 +17,16 @@
 
   pip install tox
 
+As of zuul v3, a running zookeeper is required to execute tests.
+
+*Install zookeeper*::
+
+  [apt-get | yum] install zookeeperd
+
+*Start zookeeper*::
+
+  service zookeeper start
+
 Run The Tests
 -------------
 
diff --git a/etc/zuul.conf-sample b/etc/zuul.conf-sample
index d7b8eae..3de145a 100644
--- a/etc/zuul.conf-sample
+++ b/etc/zuul.conf-sample
@@ -10,6 +10,7 @@
 pidfile=/var/run/zuul/zuul.pid
 state_dir=/var/lib/zuul
 status_url=https://jenkins.example.com/zuul/status
+zookeeper_hosts=127.0.0.1:2181
 
 [merger]
 git_dir=/var/lib/zuul/git
diff --git a/playbooks/post.yaml b/playbooks/post.yaml
new file mode 100644
index 0000000..a11e50a
--- /dev/null
+++ b/playbooks/post.yaml
@@ -0,0 +1,19 @@
+- hosts: all
+  tasks:
+    - name: Collect console log.
+      synchronize:
+        dest: "{{ zuul.launcher.log_root }}"
+        mode: pull
+        src: "/tmp/console.log"
+
+    - name: Collect tox logs.
+      synchronize:
+        dest: "{{ zuul.launcher.log_root }}/tox"
+        mode: pull
+        src: "/home/zuul/workspace/src/{{ zuul.project }}/.tox/pep8/log/"
+
+    - name: publish tox logs.
+      copy:
+        dest: "/opt/zuul-logs/{{ zuul.uuid}}"
+        src: "{{ zuul.launcher.log_root }}/"
+      delegate_to: 127.0.0.1
diff --git a/playbooks/pre.yaml b/playbooks/pre.yaml
new file mode 100644
index 0000000..1a2e699
--- /dev/null
+++ b/playbooks/pre.yaml
@@ -0,0 +1,3 @@
+- hosts: all
+  roles:
+    - prepare-workspace
diff --git a/playbooks/python-linters.yaml b/playbooks/python-linters.yaml
new file mode 100644
index 0000000..bc7effe
--- /dev/null
+++ b/playbooks/python-linters.yaml
@@ -0,0 +1,7 @@
+- hosts: all
+  tasks:
+    - name: Run a tox -e pep8.
+      include_role:
+        name: run-tox
+      vars:
+        run_tox_eventlist: pep8
diff --git a/playbooks/roles/prepare-workspace/defaults/main.yaml b/playbooks/roles/prepare-workspace/defaults/main.yaml
new file mode 100644
index 0000000..9127ad8
--- /dev/null
+++ b/playbooks/roles/prepare-workspace/defaults/main.yaml
@@ -0,0 +1,3 @@
+---
+# tasks/main.yaml
+prepare_workspace_root: /home/zuul/workspace
diff --git a/playbooks/roles/prepare-workspace/tasks/main.yaml b/playbooks/roles/prepare-workspace/tasks/main.yaml
new file mode 100644
index 0000000..76f9d95
--- /dev/null
+++ b/playbooks/roles/prepare-workspace/tasks/main.yaml
@@ -0,0 +1,21 @@
+- name: Ensure console.log does not exist.
+  file:
+    path: /tmp/console.log
+    state: absent
+
+- name: Start zuul_console daemon.
+  zuul_console:
+    path: /tmp/console.log
+    port: 19885
+
+- name: Create workspace directory.
+  file:
+    path: "{{ prepare_workspace_root }}"
+    owner: zuul
+    group: zuul
+    state: directory
+
+- name: Synchronize src repos to workspace directory.
+  synchronize:
+    dest: "{{ prepare_workspace_root }}"
+    src: "{{ zuul.launcher.src_root }}"
diff --git a/playbooks/roles/run-tox/defaults/main.yaml b/playbooks/roles/run-tox/defaults/main.yaml
new file mode 100644
index 0000000..7f0310c
--- /dev/null
+++ b/playbooks/roles/run-tox/defaults/main.yaml
@@ -0,0 +1,3 @@
+---
+# tasks/main.yaml
+run_tox_eventlist:
diff --git a/playbooks/roles/run-tox/tasks/main.yaml b/playbooks/roles/run-tox/tasks/main.yaml
new file mode 100644
index 0000000..ca8d079
--- /dev/null
+++ b/playbooks/roles/run-tox/tasks/main.yaml
@@ -0,0 +1,4 @@
+- name: Run tox
+  shell: "/usr/local/jenkins/slave_scripts/run-tox.sh {{ run_tox_eventlist }}"
+  args:
+    chdir: "/home/zuul/workspace/src/{{ zuul.project }}"
diff --git a/test-requirements.txt b/test-requirements.txt
index aed9998..150fd2e 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,4 +1,4 @@
-hacking>=0.9.2,<0.10
+hacking>=0.12.0,!=0.13.0,<0.14  # Apache-2.0
 
 coverage>=3.6
 sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3
diff --git a/tests/base.py b/tests/base.py
index 2e8012f..7ee9b9c 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -263,6 +263,16 @@
                  "comment": "This is a comment"}
         return event
 
+    def getChangeMergedEvent(self):
+        event = {"submitter": {"name": "Jenkins",
+                               "username": "jenkins"},
+                 "newRev": "29ed3b5f8f750a225c5be70235230e3a6ccb04d9",
+                 "patchSet": self.patchsets[-1],
+                 "change": self.data,
+                 "type": "change-merged",
+                 "eventCreatedOn": 1487613810}
+        return event
+
     def addApproval(self, category, value, username='reviewer_john',
                     granted_on=None, message=''):
         if not granted_on:
@@ -640,7 +650,7 @@
 
         """
         for change in changes:
-            path = os.path.join(self.jobdir.git_root, change.project)
+            path = os.path.join(self.jobdir.src_root, change.project)
             try:
                 repo = git.Repo(path)
             except NoSuchPathError as e:
@@ -722,7 +732,7 @@
         self.running_builds.append(build)
         self.job_builds[job.unique] = build
         args = json.loads(job.arguments)
-        args['zuul']['_test'] = dict(test_root=self._test_root)
+        args['vars']['zuul']['_test'] = dict(test_root=self._test_root)
         job.arguments = json.dumps(args)
         self.job_workers[job.unique] = RecordingAnsibleJob(self, job)
         self.job_workers[job.unique].run()
@@ -739,17 +749,17 @@
 
 
 class RecordingAnsibleJob(zuul.launcher.server.AnsibleJob):
-    def runPlaybooks(self):
+    def runPlaybooks(self, args):
         build = self.launcher_server.job_builds[self.job.unique]
         build.jobdir = self.jobdir
 
-        result = super(RecordingAnsibleJob, self).runPlaybooks()
+        result = super(RecordingAnsibleJob, self).runPlaybooks(args)
 
         self.launcher_server.lock.acquire()
         self.launcher_server.build_history.append(
             BuildHistory(name=build.name, result=result, changes=build.changes,
                          node=build.node, uuid=build.unique,
-                         parameters=build.parameters,
+                         parameters=build.parameters, jobdir=build.jobdir,
                          pipeline=build.parameters['ZUUL_PIPELINE'])
         )
         self.launcher_server.running_builds.remove(build)
@@ -757,16 +767,24 @@
         self.launcher_server.lock.release()
         return result
 
-    def runAnsible(self, cmd, timeout, secure=False):
+    def runAnsible(self, cmd, timeout, trusted=False):
         build = self.launcher_server.job_builds[self.job.unique]
 
         if self.launcher_server._run_ansible:
             result = super(RecordingAnsibleJob, self).runAnsible(
-                cmd, timeout, secure=secure)
+                cmd, timeout, trusted=trusted)
         else:
             result = build.run()
         return result
 
+    def getHostList(self, args):
+        self.log.debug("hostlist")
+        hosts = super(RecordingAnsibleJob, self).getHostList(args)
+        for name, d in hosts:
+            d['ansible_connection'] = 'local'
+        hosts.append(('localhost', dict(ansible_connection='local')))
+        return hosts
+
 
 class FakeGearmanServer(gear.Server):
     """A Gearman server for use in tests.
@@ -1193,8 +1211,8 @@
             tmp_root = os.environ.get("ZUUL_TEST_ROOT")
         self.test_root = os.path.join(tmp_root, "zuul-test")
         self.upstream_root = os.path.join(self.test_root, "upstream")
-        self.merger_git_root = os.path.join(self.test_root, "merger-git")
-        self.launcher_git_root = os.path.join(self.test_root, "launcher-git")
+        self.merger_src_root = os.path.join(self.test_root, "merger-git")
+        self.launcher_src_root = os.path.join(self.test_root, "launcher-git")
         self.state_root = os.path.join(self.test_root, "lib")
 
         if os.path.exists(self.test_root):
@@ -1208,8 +1226,8 @@
         self.config.set('zuul', 'tenant_config',
                         os.path.join(FIXTURE_DIR,
                                      self.config.get('zuul', 'tenant_config')))
-        self.config.set('merger', 'git_dir', self.merger_git_root)
-        self.config.set('launcher', 'git_dir', self.launcher_git_root)
+        self.config.set('merger', 'git_dir', self.merger_src_root)
+        self.config.set('launcher', 'git_dir', self.launcher_src_root)
         self.config.set('zuul', 'state_dir', self.state_root)
 
         # For each project in config:
@@ -1290,11 +1308,12 @@
             self.config, self.sched)
         self.nodepool = zuul.nodepool.Nodepool(self.sched)
         self.zk = zuul.zk.ZooKeeper()
-        self.zk.connect([self.zk_config])
+        self.zk.connect(self.zk_config)
 
-        self.fake_nodepool = FakeNodepool(self.zk_config.host,
-                                          self.zk_config.port,
-                                          self.zk_config.chroot)
+        self.fake_nodepool = FakeNodepool(
+            self.zk_chroot_fixture.zookeeper_host,
+            self.zk_chroot_fixture.zookeeper_port,
+            self.zk_chroot_fixture.zookeeper_chroot)
 
         self.sched.setLauncher(self.launch_client)
         self.sched.setMerger(self.merge_client)
@@ -1370,7 +1389,7 @@
 
     def setupZK(self):
         self.zk_chroot_fixture = self.useFixture(ChrootedKazooFixture())
-        self.zk_config = zuul.zk.ZooKeeperConnectionConfig(
+        self.zk_config = '%s:%s%s' % (
             self.zk_chroot_fixture.zookeeper_host,
             self.zk_chroot_fixture.zookeeper_port,
             self.zk_chroot_fixture.zookeeper_chroot)
diff --git a/tests/fixtures/config/ansible/git/common-config/playbooks/python27.yaml b/tests/fixtures/config/ansible/git/common-config/playbooks/python27.yaml
index 6b79a78..45acb87 100644
--- a/tests/fixtures/config/ansible/git/common-config/playbooks/python27.yaml
+++ b/tests/fixtures/config/ansible/git/common-config/playbooks/python27.yaml
@@ -1,7 +1,7 @@
 - hosts: all
   tasks:
     - file:
-        path: "{{zuul._test.test_root}}/{{zuul.uuid}}.flag"
+        path: "{{flagpath}}"
         state: touch
     - copy:
         src: "{{zuul._test.test_root}}/{{zuul.uuid}}.flag"
diff --git a/tests/fixtures/config/ansible/git/common-config/playbooks/timeout.yaml b/tests/fixtures/config/ansible/git/common-config/playbooks/timeout.yaml
new file mode 100644
index 0000000..4af20eb
--- /dev/null
+++ b/tests/fixtures/config/ansible/git/common-config/playbooks/timeout.yaml
@@ -0,0 +1,4 @@
+- hosts: all
+  tasks:
+    - name: Pause for 60 seconds, so zuul aborts our job.
+      shell: sleep 60
diff --git a/tests/fixtures/config/ansible/git/common-config/zuul.yaml b/tests/fixtures/config/ansible/git/common-config/zuul.yaml
index 7373eff..30148f0 100644
--- a/tests/fixtures/config/ansible/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/ansible/git/common-config/zuul.yaml
@@ -40,5 +40,12 @@
     name: python27
     pre-run: pre
     post-run: post
+    vars:
+      flagpath: "{{zuul._test.test_root}}/{{zuul.uuid}}.flag"
     roles:
       - zuul: bare-role
+
+- job:
+    parent: python27
+    name: timeout
+    timeout: 1
diff --git a/tests/fixtures/config/ansible/git/org_project/.zuul.yaml b/tests/fixtures/config/ansible/git/org_project/.zuul.yaml
index 6abfc47..c76ba70 100644
--- a/tests/fixtures/config/ansible/git/org_project/.zuul.yaml
+++ b/tests/fixtures/config/ansible/git/org_project/.zuul.yaml
@@ -9,3 +9,4 @@
       jobs:
         - python27
         - faillocal
+        - timeout
diff --git a/tests/fixtures/config/openstack/git/project-config/playbooks/dsvm.yaml b/tests/fixtures/config/openstack/git/project-config/playbooks/dsvm.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/openstack/git/project-config/playbooks/dsvm.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+  tasks: []
diff --git a/tests/fixtures/config/openstack/git/project-config/zuul.yaml b/tests/fixtures/config/openstack/git/project-config/zuul.yaml
index 9c2231a..420d979 100644
--- a/tests/fixtures/config/openstack/git/project-config/zuul.yaml
+++ b/tests/fixtures/config/openstack/git/project-config/zuul.yaml
@@ -71,12 +71,22 @@
         - python27
         - python35
 
+- job:
+    name: dsvm
+    parent: base
+    repos:
+      - openstack/keystone
+      - openstack/nova
+
 # Project definitions
 
 - project:
     name: openstack/nova
     templates:
       - python-jobs
+    check:
+      jobs:
+        - dsvm
     gate:
       queue: integrated
 
@@ -84,5 +94,8 @@
     name: openstack/keystone
     templates:
       - python-jobs
+    check:
+      jobs:
+        - dsvm
     gate:
       queue: integrated
diff --git a/tests/fixtures/main.yaml b/tests/fixtures/main.yaml
deleted file mode 100644
index f9ec378..0000000
--- a/tests/fixtures/main.yaml
+++ /dev/null
@@ -1,4 +0,0 @@
-tenants:
-  - name: openstack
-    include:
-      - layout.yaml
diff --git a/tests/unit/test_cloner.py b/tests/unit/test_cloner.py
index 2cdc826..02ae910 100644
--- a/tests/unit/test_cloner.py
+++ b/tests/unit/test_cloner.py
@@ -91,7 +91,7 @@
                 workspace=self.workspace_root,
                 zuul_branch=build.parameters['ZUUL_BRANCH'],
                 zuul_ref=build.parameters['ZUUL_REF'],
-                zuul_url=self.git_root,
+                zuul_url=self.src_root,
                 cache_dir=cache_root,
             )
             cloner.execute()
@@ -149,7 +149,7 @@
                 workspace=self.workspace_root,
                 zuul_branch=build.parameters['ZUUL_BRANCH'],
                 zuul_ref=build.parameters['ZUUL_REF'],
-                zuul_url=self.git_root,
+                zuul_url=self.src_root,
             )
             cloner.execute()
             work = self.getWorkspaceRepos(projects)
@@ -219,7 +219,7 @@
                 workspace=self.workspace_root,
                 zuul_branch=build.parameters['ZUUL_BRANCH'],
                 zuul_ref=build.parameters['ZUUL_REF'],
-                zuul_url=self.git_root,
+                zuul_url=self.src_root,
             )
             cloner.execute()
             work = self.getWorkspaceRepos(projects)
@@ -333,7 +333,7 @@
                 workspace=self.workspace_root,
                 zuul_branch=build.parameters['ZUUL_BRANCH'],
                 zuul_ref=build.parameters['ZUUL_REF'],
-                zuul_url=self.git_root,
+                zuul_url=self.src_root,
                 branch='stable/havana',  # Old branch for upgrade
             )
             cloner.execute()
@@ -395,7 +395,7 @@
                 workspace=self.workspace_root,
                 zuul_branch=build.parameters['ZUUL_BRANCH'],
                 zuul_ref=build.parameters['ZUUL_REF'],
-                zuul_url=self.git_root,
+                zuul_url=self.src_root,
                 branch='master',  # New branch for upgrade
             )
             cloner.execute()
@@ -481,7 +481,7 @@
                 workspace=self.workspace_root,
                 zuul_branch=build.parameters['ZUUL_BRANCH'],
                 zuul_ref=build.parameters['ZUUL_REF'],
-                zuul_url=self.git_root,
+                zuul_url=self.src_root,
                 project_branches={'org/project4': 'master'},
             )
             cloner.execute()
@@ -546,7 +546,7 @@
                 workspace=self.workspace_root,
                 zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
                 zuul_ref=build.parameters.get('ZUUL_REF', None),
-                zuul_url=self.git_root,
+                zuul_url=self.src_root,
                 branch='stable/havana',
             )
             cloner.execute()
@@ -581,7 +581,7 @@
             workspace=self.workspace_root,
             zuul_branch=None,
             zuul_ref='master',
-            zuul_url=self.git_root,
+            zuul_url=self.src_root,
             zuul_project=project,
             zuul_newrev=newRev,
         )
@@ -607,7 +607,7 @@
             workspace=self.workspace_root,
             zuul_branch=None,
             zuul_ref='master',
-            zuul_url=self.git_root,
+            zuul_url=self.src_root,
             zuul_project=project,
             zuul_newrev=newRev
         )
diff --git a/tests/unit/test_model.py b/tests/unit/test_model.py
index 9dac383..9bd405e 100644
--- a/tests/unit/test_model.py
+++ b/tests/unit/test_model.py
@@ -197,7 +197,7 @@
         })
         layout.addJob(python27essex)
 
-        project_config = configloader.ProjectParser.fromYaml(tenant, layout, {
+        project_config = configloader.ProjectParser.fromYaml(tenant, layout, [{
             '_source_context': context,
             'name': 'project',
             'gate': {
@@ -205,7 +205,7 @@
                     'python27'
                 ]
             }
-        })
+        }])
         layout.addProjectConfig(project_config, update_pipeline=False)
 
         change = model.Change(project)
@@ -406,7 +406,7 @@
         })
         layout.addJob(python27diablo)
 
-        project_config = configloader.ProjectParser.fromYaml(tenant, layout, {
+        project_config = configloader.ProjectParser.fromYaml(tenant, layout, [{
             '_source_context': context,
             'name': 'project',
             'gate': {
@@ -414,7 +414,7 @@
                     {'python27': {'timeout': 70}}
                 ]
             }
-        })
+        }])
         layout.addProjectConfig(project_config, update_pipeline=False)
 
         change = model.Change(project)
@@ -471,7 +471,7 @@
         })
         layout.addJob(python27)
 
-        project_config = configloader.ProjectParser.fromYaml(tenant, layout, {
+        project_config = configloader.ProjectParser.fromYaml(tenant, layout, [{
             '_source_context': context,
             'name': 'project',
             'gate': {
@@ -479,7 +479,7 @@
                     'python27',
                 ]
             }
-        })
+        }])
         layout.addProjectConfig(project_config, update_pipeline=False)
 
         change = model.Change(project)
diff --git a/tests/unit/test_nodepool.py b/tests/unit/test_nodepool.py
index 6462f9a..19c7e05 100644
--- a/tests/unit/test_nodepool.py
+++ b/tests/unit/test_nodepool.py
@@ -30,22 +30,23 @@
         super(BaseTestCase, self).setUp()
 
         self.zk_chroot_fixture = self.useFixture(ChrootedKazooFixture())
-        self.zk_config = zuul.zk.ZooKeeperConnectionConfig(
+        self.zk_config = '%s:%s%s' % (
             self.zk_chroot_fixture.zookeeper_host,
             self.zk_chroot_fixture.zookeeper_port,
             self.zk_chroot_fixture.zookeeper_chroot)
 
         self.zk = zuul.zk.ZooKeeper()
-        self.zk.connect([self.zk_config])
+        self.zk.connect(self.zk_config)
 
         self.provisioned_requests = []
         # This class implements the scheduler methods zuul.nodepool
         # needs, so we pass 'self' as the scheduler.
         self.nodepool = zuul.nodepool.Nodepool(self)
 
-        self.fake_nodepool = FakeNodepool(self.zk_config.host,
-                                          self.zk_config.port,
-                                          self.zk_config.chroot)
+        self.fake_nodepool = FakeNodepool(
+            self.zk_chroot_fixture.zookeeper_host,
+            self.zk_chroot_fixture.zookeeper_port,
+            self.zk_chroot_fixture.zookeeper_chroot)
 
     def waitForRequests(self):
         # Wait until all requests are complete.
diff --git a/tests/unit/test_openstack.py b/tests/unit/test_openstack.py
index d0c7ab2..670e578 100644
--- a/tests/unit/test_openstack.py
+++ b/tests/unit/test_openstack.py
@@ -14,6 +14,8 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+import os
+
 from tests.base import AnsibleZuulTestCase
 
 
@@ -54,3 +56,45 @@
                          "A should report start and success")
         self.assertEqual(self.getJobFromHistory('python27').node,
                          'ubuntu-trusty')
+
+    def test_dsvm_keystone_repo(self):
+        self.launch_server.keep_jobdir = True
+        A = self.fake_gerrit.addFakeChange('openstack/nova', 'master', 'A')
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+
+        self.assertHistory([
+            dict(name='dsvm', result='SUCCESS', changes='1,1')])
+        build = self.getJobFromHistory('dsvm')
+
+        # Check that a change to nova triggered a keystone clone
+        launcher_git_dir = os.path.join(self.launcher_src_root,
+                                        'openstack', 'keystone', '.git')
+        self.assertTrue(os.path.exists(launcher_git_dir),
+                        msg='openstack/keystone should be cloned.')
+
+        jobdir_git_dir = os.path.join(build.jobdir.src_root,
+                                      'openstack', 'keystone', '.git')
+        self.assertTrue(os.path.exists(jobdir_git_dir),
+                        msg='openstack/keystone should be cloned.')
+
+    def test_dsvm_nova_repo(self):
+        self.launch_server.keep_jobdir = True
+        A = self.fake_gerrit.addFakeChange('openstack/keystone', 'master', 'A')
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+
+        self.assertHistory([
+            dict(name='dsvm', result='SUCCESS', changes='1,1')])
+        build = self.getJobFromHistory('dsvm')
+
+        # Check that a change to keystone triggered a nova clone
+        launcher_git_dir = os.path.join(self.launcher_src_root,
+                                        'openstack', 'nova', '.git')
+        self.assertTrue(os.path.exists(launcher_git_dir),
+                        msg='openstack/nova should be cloned.')
+
+        jobdir_git_dir = os.path.join(build.jobdir.src_root,
+                                      'openstack', 'nova', '.git')
+        self.assertTrue(os.path.exists(jobdir_git_dir),
+                        msg='openstack/nova should be cloned.')
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index 7d48e4d..07d832f 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -1378,10 +1378,10 @@
         self.assertEmptyQueues()
         self.build_history = []
 
-        path = os.path.join(self.merger_git_root, "org/project")
+        path = os.path.join(self.merger_src_root, "org/project")
         if os.path.exists(path):
             repack_repo(path)
-        path = os.path.join(self.launcher_git_root, "org/project")
+        path = os.path.join(self.launcher_src_root, "org/project")
         if os.path.exists(path):
             repack_repo(path)
 
@@ -1410,10 +1410,10 @@
         A.addPatchset(large=True)
         path = os.path.join(self.upstream_root, "org/project1")
         repack_repo(path)
-        path = os.path.join(self.merger_git_root, "org/project1")
+        path = os.path.join(self.merger_src_root, "org/project1")
         if os.path.exists(path):
             repack_repo(path)
-        path = os.path.join(self.launcher_git_root, "org/project1")
+        path = os.path.join(self.launcher_src_root, "org/project1")
         if os.path.exists(path):
             repack_repo(path)
 
@@ -2729,10 +2729,10 @@
 
         # Delete org/new-project zuul repo. Should be recloned.
         p = 'org/delete-project'
-        if os.path.exists(os.path.join(self.merger_git_root, p)):
-            shutil.rmtree(os.path.join(self.merger_git_root, p))
-        if os.path.exists(os.path.join(self.launcher_git_root, p)):
-            shutil.rmtree(os.path.join(self.launcher_git_root, p))
+        if os.path.exists(os.path.join(self.merger_src_root, p)):
+            shutil.rmtree(os.path.join(self.merger_src_root, p))
+        if os.path.exists(os.path.join(self.launcher_src_root, p)):
+            shutil.rmtree(os.path.join(self.launcher_src_root, p))
 
         B = self.fake_gerrit.addFakeChange('org/delete-project', 'master', 'B')
 
@@ -3973,7 +3973,7 @@
         self.launch_server.release('.*-merge')
         self.waitUntilSettled()
 
-        path = os.path.join(self.builds[0].jobdir.git_root, "org/project1")
+        path = os.path.join(self.builds[0].jobdir.src_root, "org/project1")
         repo = git.Repo(path)
         repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
         repo_messages.reverse()
@@ -3981,7 +3981,7 @@
             'initial commit', 'add content from fixture', 'A-1']
         self.assertEqual(repo_messages, correct_messages)
 
-        path = os.path.join(self.builds[0].jobdir.git_root, "org/project2")
+        path = os.path.join(self.builds[0].jobdir.src_root, "org/project2")
         repo = git.Repo(path)
         repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
         repo_messages.reverse()
@@ -4630,7 +4630,7 @@
         build = self.builds[-1]
         ref = self.getParameter(build, 'ZUUL_REF')
 
-        path = os.path.join(build.jobdir.git_root, project)
+        path = os.path.join(build.jobdir.src_root, project)
         repo = git.Repo(path)
         repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
         repo_messages.reverse()
@@ -4701,7 +4701,7 @@
         self.assertEqual(self.getParameter(build, 'ZUUL_BRANCH'), 'mp')
         ref = self.getParameter(build, 'ZUUL_REF')
         path = os.path.join(
-            build.jobdir.git_root, 'org/project-merge-branches')
+            build.jobdir.src_root, 'org/project-merge-branches')
         repo = git.Repo(path)
 
         repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
@@ -4746,7 +4746,7 @@
         self.log.debug("Got Zuul commit for change A: %s" % commit_A)
 
         path = os.path.join(
-            job_A.jobdir.git_root, "org/project-merge-branches")
+            job_A.jobdir.src_root, "org/project-merge-branches")
         repo = git.Repo(path)
         repo_messages = [c.message.strip()
                          for c in repo.iter_commits(ref_A)]
@@ -4768,7 +4768,7 @@
         self.log.debug("Got Zuul commit for change B: %s" % commit_B)
 
         path = os.path.join(
-            job_B.jobdir.git_root, "org/project-merge-branches")
+            job_B.jobdir.src_root, "org/project-merge-branches")
         repo = git.Repo(path)
         repo_messages = [c.message.strip()
                          for c in repo.iter_commits(ref_B)]
@@ -4789,7 +4789,7 @@
         self.log.debug("Got Zuul ref for change C: %s" % ref_C)
         self.log.debug("Got Zuul commit for change C: %s" % commit_C)
         path = os.path.join(
-            job_C.jobdir.git_root, "org/project-merge-branches")
+            job_C.jobdir.src_root, "org/project-merge-branches")
         repo = git.Repo(path)
         repo_messages = [c.message.strip()
                          for c in repo.iter_commits(ref_C)]
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 27e2275..f69ffe6 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -17,7 +17,7 @@
 import os
 import textwrap
 
-from tests.base import AnsibleZuulTestCase
+from tests.base import AnsibleZuulTestCase, ZuulTestCase
 
 
 class TestMultipleTenants(AnsibleZuulTestCase):
@@ -63,7 +63,7 @@
                          "not affect tenant one")
 
 
-class TestInRepoConfig(AnsibleZuulTestCase):
+class TestInRepoConfig(ZuulTestCase):
     # A temporary class to hold new tests while others are disabled
 
     tenant_config_file = 'config/in-repo/main.yaml'
@@ -115,6 +115,8 @@
         self.assertHistory([
             dict(name='project-test2', result='SUCCESS', changes='1,1')])
 
+        self.fake_gerrit.addEvent(A.getChangeMergedEvent())
+
         # Now that the config change is landed, it should be live for
         # subsequent changes.
         B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
@@ -127,6 +129,62 @@
             dict(name='project-test2', result='SUCCESS', changes='1,1'),
             dict(name='project-test2', result='SUCCESS', changes='2,1')])
 
+    def test_in_repo_branch(self):
+        in_repo_conf = textwrap.dedent(
+            """
+            - job:
+                name: project-test2
+
+            - project:
+                name: org/project
+                tenant-one-gate:
+                  jobs:
+                    - project-test2
+            """)
+
+        in_repo_playbook = textwrap.dedent(
+            """
+            - hosts: all
+              tasks: []
+            """)
+
+        file_dict = {'.zuul.yaml': in_repo_conf,
+                     'playbooks/project-test2.yaml': in_repo_playbook}
+        self.create_branch('org/project', 'stable')
+        A = self.fake_gerrit.addFakeChange('org/project', 'stable', 'A',
+                                           files=file_dict)
+        A.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+        self.waitUntilSettled()
+        self.assertEqual(A.data['status'], 'MERGED')
+        self.assertEqual(A.reported, 2,
+                         "A should report start and success")
+        self.assertIn('tenant-one-gate', A.messages[1],
+                      "A should transit tenant-one gate")
+        self.assertHistory([
+            dict(name='project-test2', result='SUCCESS', changes='1,1')])
+        self.fake_gerrit.addEvent(A.getChangeMergedEvent())
+
+        # The config change should not affect master.
+        B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+        B.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(B.addApproval('approved', 1))
+        self.waitUntilSettled()
+        self.assertHistory([
+            dict(name='project-test2', result='SUCCESS', changes='1,1'),
+            dict(name='project-test1', result='SUCCESS', changes='2,1')])
+
+        # The config change should be live for further changes on
+        # stable.
+        C = self.fake_gerrit.addFakeChange('org/project', 'stable', 'C')
+        C.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(C.addApproval('approved', 1))
+        self.waitUntilSettled()
+        self.assertHistory([
+            dict(name='project-test2', result='SUCCESS', changes='1,1'),
+            dict(name='project-test1', result='SUCCESS', changes='2,1'),
+            dict(name='project-test2', result='SUCCESS', changes='3,1')])
+
 
 class TestAnsible(AnsibleZuulTestCase):
     # A temporary class to hold new tests while others are disabled
@@ -137,6 +195,8 @@
         A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
         self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
         self.waitUntilSettled()
+        build = self.getJobFromHistory('timeout')
+        self.assertEqual(build.result, 'ABORTED')
         build = self.getJobFromHistory('faillocal')
         self.assertEqual(build.result, 'FAILURE')
         build = self.getJobFromHistory('python27')
diff --git a/tools/update-storyboard.py b/tools/update-storyboard.py
index 6800a35..12e6916 100644
--- a/tools/update-storyboard.py
+++ b/tools/update-storyboard.py
@@ -67,6 +67,7 @@
     'inprogress': ['In Progress', 'Blocked'],
     'review': ['In Progress', 'Blocked'],
     'merged': None,
+    'invalid': None,
 }
 
 
diff --git a/zuul/ansible/action/add_host.py b/zuul/ansible/action/add_host.py
index e41e4e1..d4b24aa 100644
--- a/zuul/ansible/action/add_host.py
+++ b/zuul/ansible/action/add_host.py
@@ -13,7 +13,8 @@
 # You should have received a copy of the GNU General Public License
 # along with this software.  If not, see <http://www.gnu.org/licenses/>.
 
-from zuul.ansible.plugins.action import add_host
+from zuul.ansible import paths
+add_host = paths._import_ansible_action_plugin("add_host")
 
 
 class ActionModule(add_host.ActionModule):
diff --git a/zuul/ansible/action/assemble.py b/zuul/ansible/action/assemble.py
index d0bff37..2cc7eb7 100644
--- a/zuul/ansible/action/assemble.py
+++ b/zuul/ansible/action/assemble.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import assemble
+assemble = paths._import_ansible_action_plugin("assemble")
 
 
 class ActionModule(assemble.ActionModule):
diff --git a/zuul/ansible/action/copy.py b/zuul/ansible/action/copy.py
index 5dc9fa8..bb54430 100644
--- a/zuul/ansible/action/copy.py
+++ b/zuul/ansible/action/copy.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import copy
+copy = paths._import_ansible_action_plugin("copy")
 
 
 class ActionModule(copy.ActionModule):
diff --git a/zuul/ansible/action/fetch.py b/zuul/ansible/action/fetch.py
index fe06c3b..170b655 100644
--- a/zuul/ansible/action/fetch.py
+++ b/zuul/ansible/action/fetch.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import fetch
+fetch = paths._import_ansible_action_plugin("fetch")
 
 
 class ActionModule(fetch.ActionModule):
diff --git a/zuul/ansible/action/include_vars.py b/zuul/ansible/action/include_vars.py
index aa0e7d8..5bc1d76 100644
--- a/zuul/ansible/action/include_vars.py
+++ b/zuul/ansible/action/include_vars.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import include_vars
+include_vars = paths._import_ansible_action_plugin("include_vars")
 
 
 class ActionModule(include_vars.ActionModule):
diff --git a/zuul/ansible/action/network.py b/zuul/ansible/action/network.py
index 31a8739..41fc560 100644
--- a/zuul/ansible/action/network.py
+++ b/zuul/ansible/action/network.py
@@ -14,7 +14,8 @@
 # along with this software.  If not, see <http://www.gnu.org/licenses/>.
 
 
-from zuul.ansible.plugins.action import network
+from zuul.ansible import paths
+network = paths._import_ansible_action_plugin("network")
 
 
 class ActionModule(network.ActionModule):
diff --git a/zuul/ansible/action/normal.py b/zuul/ansible/action/normal.py
index d4b2396..b18cb51 100644
--- a/zuul/ansible/action/normal.py
+++ b/zuul/ansible/action/normal.py
@@ -13,8 +13,8 @@
 # You should have received a copy of the GNU General Public License
 # along with this software.  If not, see <http://www.gnu.org/licenses/>.
 
-
-from zuul.ansible.plugins.action import normal
+from zuul.ansible import paths
+normal = paths._import_ansible_action_plugin('normal')
 
 
 class ActionModule(normal.ActionModule):
diff --git a/zuul/ansible/action/patch.py b/zuul/ansible/action/patch.py
index d630844..0b43c82 100644
--- a/zuul/ansible/action/patch.py
+++ b/zuul/ansible/action/patch.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import patch
+patch = paths._import_ansible_action_plugin("patch")
 
 
 class ActionModule(patch.ActionModule):
diff --git a/zuul/ansible/action/script.py b/zuul/ansible/action/script.py
index bd3d5d5..c95d357 100644
--- a/zuul/ansible/action/script.py
+++ b/zuul/ansible/action/script.py
@@ -15,10 +15,10 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import copy
+script = paths._import_ansible_action_plugin("script")
 
 
-class ActionModule(copy.ActionModule):
+class ActionModule(script.ActionModule):
 
     def run(self, tmp=None, task_vars=None):
 
diff --git a/zuul/ansible/action/synchronize.py b/zuul/ansible/action/synchronize.py
index cbb7ea2..75fd45f 100644
--- a/zuul/ansible/action/synchronize.py
+++ b/zuul/ansible/action/synchronize.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import synchronize
+synchronize = paths._import_ansible_action_plugin("synchronize")
 
 
 class ActionModule(synchronize.ActionModule):
@@ -24,10 +24,15 @@
 
         source = self._task.args.get('src', None)
         dest = self._task.args.get('dest', None)
-        pull = self._task.args.get('pull', False)
+        mode = self._task.args.get('mode', 'push')
 
-        if not pull and not paths._is_safe_path(source):
+        if 'rsync_opts' not in self._task.args:
+            self._task.args['rsync_opts'] = []
+        if '--safe-links' not in self._task.args['rsync_opts']:
+            self._task.args['rsync_opts'].append('--safe-links')
+
+        if mode == 'push' and not paths._is_safe_path(source):
             return paths._fail_dict(source, prefix='Syncing files from')
-        if pull and not paths._is_safe_path(dest):
+        if mode == 'pull' and not paths._is_safe_path(dest):
             return paths._fail_dict(dest, prefix='Syncing files to')
         return super(ActionModule, self).run(tmp, task_vars)
diff --git a/zuul/ansible/action/template.py b/zuul/ansible/action/template.py
index 96471ae..c6df3d8 100644
--- a/zuul/ansible/action/template.py
+++ b/zuul/ansible/action/template.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import template
+template = paths._import_ansible_action_plugin("template")
 
 
 class ActionModule(template.ActionModule):
diff --git a/zuul/ansible/action/unarchive.py b/zuul/ansible/action/unarchive.py
index c3f6e91..c78c331 100644
--- a/zuul/ansible/action/unarchive.py
+++ b/zuul/ansible/action/unarchive.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import unarchive
+unarchive = paths._import_ansible_action_plugin("unarchive")
 
 
 class ActionModule(unarchive.ActionModule):
diff --git a/zuul/ansible/action/win_copy.py b/zuul/ansible/action/win_copy.py
index eef3a1c..2751585 100644
--- a/zuul/ansible/action/win_copy.py
+++ b/zuul/ansible/action/win_copy.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import win_copy
+win_copy = paths._import_ansible_action_plugin("win_copy")
 
 
 class ActionModule(win_copy.ActionModule):
diff --git a/zuul/ansible/action/win_template.py b/zuul/ansible/action/win_template.py
index 2a47216..7a357f9 100644
--- a/zuul/ansible/action/win_template.py
+++ b/zuul/ansible/action/win_template.py
@@ -15,7 +15,7 @@
 
 
 from zuul.ansible import paths
-from zuul.ansible.plugins.action import win_template
+win_template = paths._import_ansible_action_plugin("win_template")
 
 
 class ActionModule(win_template.ActionModule):
diff --git a/zuul/ansible/callback/__init__.py b/zuul/ansible/callback/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/zuul/ansible/callback/__init__.py
diff --git a/zuul/ansible/callback/zuul_stream.py b/zuul/ansible/callback/zuul_stream.py
new file mode 100644
index 0000000..9b8bccd
--- /dev/null
+++ b/zuul/ansible/callback/zuul_stream.py
@@ -0,0 +1,96 @@
+# Copyright 2017 Red Hat, Inc.
+#
+# Zuul is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Zuul is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import multiprocessing
+import socket
+import time
+
+from ansible.plugins.callback import default
+
+LOG_STREAM_PORT = 19885
+
+
+def linesplit(socket):
+    buff = socket.recv(4096)
+    buffering = True
+    while buffering:
+        if "\n" in buff:
+            (line, buff) = buff.split("\n", 1)
+            yield line + "\n"
+        else:
+            more = socket.recv(4096)
+            if not more:
+                buffering = False
+            else:
+                buff += more
+    if buff:
+        yield buff
+
+
+class CallbackModule(default.CallbackModule):
+
+    '''
+    This is the Zuul streaming callback. It's based on the default
+    callback plugin, but streams results from shell commands.
+    '''
+
+    CALLBACK_VERSION = 2.0
+    CALLBACK_TYPE = 'stdout'
+    CALLBACK_NAME = 'zuul_stream'
+
+    def __init__(self):
+
+        super(CallbackModule, self).__init__()
+        self._task = None
+        self._daemon_running = False
+        self._daemon_stamp = 'daemon-stamp-%s'
+        self._host_dict = {}
+
+    def _read_log(self, host, ip):
+        self._display.display("[%s] starting to log" % host)
+        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        while True:
+            try:
+                s.connect((ip, LOG_STREAM_PORT))
+            except Exception:
+                self._display.display("[%s] Waiting on logger" % host)
+                time.sleep(0.1)
+                continue
+            for line in linesplit(s):
+                self._display.display("[%s] %s " % (host, line.strip()))
+
+    def v2_playbook_on_play_start(self, play):
+        self._play = play
+        super(CallbackModule, self).v2_playbook_on_play_start(play)
+
+    def v2_playbook_on_task_start(self, task, is_conditional):
+        self._task = task
+
+        if self._play.strategy != 'free':
+            self._print_task_banner(task)
+        if task.action == 'command':
+            play_vars = self._play._variable_manager._hostvars
+            for host in self._play.hosts:
+                ip = play_vars[host]['ansible_host']
+                daemon_stamp = self._daemon_stamp % host
+                if not os.path.exists(daemon_stamp):
+                    self._host_dict[host] = ip
+                    # Touch stamp file
+                    open(daemon_stamp, 'w').close()
+                    p = multiprocessing.Process(
+                        target=self._read_log, args=(host, ip))
+                    p.daemon = True
+                    p.start()
diff --git a/zuul/ansible/library/command.py b/zuul/ansible/library/command.py
index 6390322..328ae7b 100644
--- a/zuul/ansible/library/command.py
+++ b/zuul/ansible/library/command.py
@@ -121,12 +121,13 @@
 from ast import literal_eval
 
 
+LOG_STREAM_FILE = '/tmp/console.log'
 PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
 
 
 class Console(object):
     def __enter__(self):
-        self.logfile = open('/tmp/console.html', 'a', 0)
+        self.logfile = open(LOG_STREAM_FILE, 'a', 0)
         return self
 
     def __exit__(self, etype, value, tb):
diff --git a/zuul/ansible/library/zuul_console.py b/zuul/ansible/library/zuul_console.py
index e70dac8..1932cf9 100644
--- a/zuul/ansible/library/zuul_console.py
+++ b/zuul/ansible/library/zuul_console.py
@@ -20,6 +20,9 @@
 import socket
 import threading
 
+LOG_STREAM_FILE = '/tmp/console.log'
+LOG_STREAM_PORT = 19885
+
 
 def daemonize():
     # A really basic daemonize method that should work well enough for
@@ -155,15 +158,15 @@
 
 
 def test():
-    s = Server('/tmp/console.html', 19885)
+    s = Server(LOG_STREAM_FILE, LOG_STREAM_PORT)
     s.run()
 
 
 def main():
     module = AnsibleModule(
         argument_spec=dict(
-            path=dict(default='/tmp/console.html'),
-            port=dict(default=19885, type='int'),
+            path=dict(default=LOG_STREAM_FILE),
+            port=dict(default=LOG_STREAM_PORT, type='int'),
         )
     )
 
diff --git a/zuul/ansible/library/zuul_log.py b/zuul/ansible/library/zuul_log.py
deleted file mode 100644
index 4b377d9..0000000
--- a/zuul/ansible/library/zuul_log.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (c) 2016 IBM Corp.
-# Copyright (c) 2016 Red Hat
-#
-# This module is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This software is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this software.  If not, see <http://www.gnu.org/licenses/>.
-
-import datetime
-
-
-class Console(object):
-    def __enter__(self):
-        self.logfile = open('/tmp/console.html', 'a', 0)
-        return self
-
-    def __exit__(self, etype, value, tb):
-        self.logfile.close()
-
-    def addLine(self, ln):
-        ts = datetime.datetime.now()
-        outln = '%s | %s' % (str(ts), ln)
-        self.logfile.write(outln)
-
-
-def log(msg):
-    if not isinstance(msg, list):
-        msg = [msg]
-    with Console() as console:
-        for line in msg:
-            console.addLine("[Zuul] %s\n" % line)
-
-
-def main():
-    module = AnsibleModule(
-        argument_spec=dict(
-            msg=dict(required=True, type='raw'),
-        )
-    )
-
-    p = module.params
-    log(p['msg'])
-    module.exit_json(changed=True)
-
-from ansible.module_utils.basic import *  # noqa
-
-if __name__ == '__main__':
-    main()
diff --git a/zuul/ansible/paths.py b/zuul/ansible/paths.py
index 2bd0181..e387732 100644
--- a/zuul/ansible/paths.py
+++ b/zuul/ansible/paths.py
@@ -13,14 +13,15 @@
 # You should have received a copy of the GNU General Public License
 # along with this software.  If not, see <http://www.gnu.org/licenses/>.
 
+import imp
 import os
 
+import ansible.plugins.action
+
 
 def _is_safe_path(path):
-    if os.path.isabs(path):
-        return False
-    if not os.path.abspath(os.path.expanduser(path)).startswith(
-            os.path.abspath(os.path.curdir)):
+    full_path = os.path.realpath(os.path.abspath(os.path.expanduser(path)))
+    if not full_path.startswith(os.path.abspath(os.path.curdir)):
         return False
     return True
 
@@ -29,5 +30,24 @@
     return dict(
         failed=True,
         path=path,
-        msg="{prefix} outside the working dir is prohibited".format(
-            prefix=prefix))
+        msg="{prefix} outside the working dir {curdir} is prohibited".format(
+            prefix=prefix,
+            curdir=os.path.abspath(os.path.curdir)))
+
+
+def _import_ansible_action_plugin(name):
+    # Ansible forces the import of our action plugins
+    # (zuul.ansible.action.foo) as ansible.plugins.action.foo, which
+    # is the import path of the ansible implementation.  Our
+    # implementations need to subclass that, but if we try to import
+    # it with that name, we will get our own module.  This bypasses
+    # Python's module namespace to load the actual ansible modules.
+    # We need to give it a name, however.  If we load it with its
+    # actual name, we will end up overwriting our module in Python's
+    # namespace, causing infinite recursion.  So we supply an
+    # otherwise unused name for the module:
+    # zuul.ansible.protected.action.foo.
+
+    return imp.load_module(
+        'zuul.ansible.protected.action.' + name,
+        *imp.find_module(name, ansible.plugins.action.__path__))
diff --git a/zuul/cmd/__init__.py b/zuul/cmd/__init__.py
index 3102f3b..9fa4c03 100644
--- a/zuul/cmd/__init__.py
+++ b/zuul/cmd/__init__.py
@@ -24,6 +24,7 @@
 import sys
 import traceback
 
+import yaml
 yappi = extras.try_import('yappi')
 
 import zuul.lib.connections
@@ -86,7 +87,14 @@
             if not os.path.exists(fp):
                 raise Exception("Unable to read logging config file at %s" %
                                 fp)
-            logging.config.fileConfig(fp)
+
+            if os.path.splitext(fp)[1] in ('.yml', '.yaml'):
+                with open(fp, 'r') as f:
+                    logging.config.dictConfig(yaml.safe_load(f))
+
+            else:
+                logging.config.fileConfig(fp)
+
         else:
             logging.basicConfig(level=logging.DEBUG)
 
diff --git a/zuul/cmd/scheduler.py b/zuul/cmd/scheduler.py
index e5497dc..9a8b24f 100755
--- a/zuul/cmd/scheduler.py
+++ b/zuul/cmd/scheduler.py
@@ -44,15 +44,11 @@
         parser = argparse.ArgumentParser(description='Project gating system.')
         parser.add_argument('-c', dest='config',
                             help='specify the config file')
-        parser.add_argument('-l', dest='layout',
-                            help='specify the layout file')
         parser.add_argument('-d', dest='nodaemon', action='store_true',
                             help='do not run as a daemon')
-        parser.add_argument('-t', dest='validate', nargs='?', const=True,
-                            metavar='JOB_LIST',
-                            help='validate layout file syntax (optionally '
-                            'providing the path to a file with a list of '
-                            'available job names)')
+        parser.add_argument('-t', dest='validate', action='store_true',
+                            help='validate config file syntax (Does not'
+                            'validate config repo validity)')
         parser.add_argument('--version', dest='version', action='version',
                             version=self._get_version(),
                             help='show zuul version')
@@ -79,38 +75,19 @@
         self.stop_gear_server()
         os._exit(0)
 
-    def test_config(self, job_list_path):
+    def test_config(self):
         # See comment at top of file about zuul imports
         import zuul.scheduler
-        import zuul.launcher.gearman
-        import zuul.trigger.gerrit
+        import zuul.launcher.client
 
         logging.basicConfig(level=logging.DEBUG)
-        self.sched = zuul.scheduler.Scheduler(self.config,
-                                              testonly=True)
-        self.configure_connections()
-        self.sched.registerConnections(self.connections, load=False)
-        layout = self.sched.testConfig(self.config.get('zuul',
-                                                       'layout_config'),
-                                       self.connections)
-        if not job_list_path:
-            return False
-
-        failure = False
-        path = os.path.expanduser(job_list_path)
-        if not os.path.exists(path):
-            raise Exception("Unable to find job list: %s" % path)
-        jobs = set()
-        jobs.add('noop')
-        for line in open(path):
-            v = line.strip()
-            if v:
-                jobs.add(v)
-        for job in sorted(layout.jobs):
-            if job not in jobs:
-                print("FAILURE: Job %s not defined" % job)
-                failure = True
-        return failure
+        try:
+            self.sched = zuul.scheduler.Scheduler(self.config,
+                                                  testonly=True)
+        except Exception as e:
+            self.log.error("%s" % e)
+            return -1
+        return 0
 
     def start_gear_server(self):
         pipe_read, pipe_write = os.pipe()
@@ -153,6 +130,7 @@
         import zuul.lib.swift
         import zuul.webapp
         import zuul.rpclistener
+        import zuul.zk
 
         signal.signal(signal.SIGUSR2, zuul.cmd.stack_dump_handler)
         if (self.config.has_option('gearman_server', 'start') and
@@ -171,6 +149,14 @@
         merger = zuul.merger.client.MergeClient(self.config, self.sched)
         nodepool = zuul.nodepool.Nodepool(self.sched)
 
+        zookeeper = zuul.zk.ZooKeeper()
+        if self.config.has_option('zuul', 'zookeeper_hosts'):
+            zookeeper_hosts = self.config.get('zuul', 'zookeeper_hosts')
+        else:
+            zookeeper_hosts = '127.0.0.1:2181'
+
+        zookeeper.connect(zookeeper_hosts)
+
         if self.config.has_option('zuul', 'status_expiry'):
             cache_expiry = self.config.getint('zuul', 'status_expiry')
         else:
@@ -195,12 +181,19 @@
         self.sched.setLauncher(gearman)
         self.sched.setMerger(merger)
         self.sched.setNodepool(nodepool)
+        self.sched.setZooKeeper(zookeeper)
 
         self.log.info('Starting scheduler')
-        self.sched.start()
-        self.sched.registerConnections(self.connections)
-        self.sched.reconfigure(self.config)
-        self.sched.resume()
+        try:
+            self.sched.start()
+            self.sched.registerConnections(self.connections)
+            self.sched.reconfigure(self.config)
+            self.sched.resume()
+        except Exception:
+            self.log.exception("Error starting Zuul:")
+            # TODO(jeblair): If we had all threads marked as daemon,
+            # we might be able to have a nicer way of exiting here.
+            sys.exit(1)
         self.log.info('Starting Webapp')
         webapp.start()
         self.log.info('Starting RPC')
@@ -223,19 +216,13 @@
 
     scheduler.read_config()
 
-    if scheduler.args.layout:
-        scheduler.config.set('zuul', 'layout_config', scheduler.args.layout)
-
     if scheduler.args.validate:
-        path = scheduler.args.validate
-        if path is True:
-            path = None
-        sys.exit(scheduler.test_config(path))
+        sys.exit(scheduler.test_config())
 
     if scheduler.config.has_option('zuul', 'pidfile'):
         pid_fn = os.path.expanduser(scheduler.config.get('zuul', 'pidfile'))
     else:
-        pid_fn = '/var/run/zuul/zuul.pid'
+        pid_fn = '/var/run/zuul-scheduler/zuul-scheduler.pid'
     pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
 
     if scheduler.args.nodaemon:
diff --git a/zuul/configloader.py b/zuul/configloader.py
index 6cea19d..5a132fe 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -115,6 +115,8 @@
                'run': str,
                '_source_context': model.SourceContext,
                'roles': to_list(role),
+               'repos': to_list(str),
+               'vars': dict,
                }
 
         return vs.Schema(job)
@@ -185,6 +187,11 @@
                     ns.addNode(node)
             job.nodeset = ns
 
+        if 'repos' in conf:
+            # Accumulate repos in a set so that job inheritance
+            # is additive.
+            job.repos = job.repos.union(set(conf.get('repos', [])))
+
         tags = conf.get('tags')
         if tags:
             # Tags are merged via a union rather than a
@@ -200,9 +207,13 @@
                     roles.append(r)
         job.roles = job.roles.union(set(roles))
 
+        variables = conf.get('vars', None)
+        if variables:
+            job.updateVariables(variables)
+
         # If the definition for this job came from a project repo,
         # implicitly apply a branch matcher for the branch it was on.
-        if (not job.source_context.secure):
+        if (not job.source_context.trusted):
             branches = [job.source_context.branch]
         elif 'branches' in conf:
             branches = as_list(conf['branches'])
@@ -233,12 +244,12 @@
         # TODOv3(jeblair): this limits roles to the same
         # source; we should remove that limitation.
         source = job.source_context.project.connection_name
-        (secure, project) = tenant.getRepo(source, role['zuul'])
+        (trusted, project) = tenant.getRepo(source, role['zuul'])
         if project is None:
             return None
 
         return model.ZuulRole(role.get('name', name), source,
-                              project.name, secure)
+                              project.name, trusted)
 
 
 class ProjectTemplateParser(object):
@@ -327,25 +338,29 @@
         for p in layout.pipelines.values():
             project[p.name] = {'queue': str,
                                'jobs': [vs.Any(str, dict)]}
-        return vs.Schema(project)
+        return vs.Schema([project])
 
     @staticmethod
-    def fromYaml(tenant, layout, conf):
-        # TODOv3(jeblair): This may need some branch-specific
-        # configuration for in-repo configs.
-        ProjectParser.getSchema(layout)(conf)
-        # Make a copy since we modify this later via pop
-        conf = copy.deepcopy(conf)
-        conf_templates = conf.pop('templates', [])
-        # The way we construct a project definition is by parsing the
-        # definition as a template, then applying all of the
-        # templates, including the newly parsed one, in order.
-        project_template = ProjectTemplateParser.fromYaml(tenant, layout, conf)
-        configs = [layout.project_templates[name] for name in conf_templates]
-        configs.append(project_template)
-        project = model.ProjectConfig(conf['name'])
-        mode = conf.get('merge-mode', 'merge-resolve')
+    def fromYaml(tenant, layout, conf_list):
+        ProjectParser.getSchema(layout)(conf_list)
+        project = model.ProjectConfig(conf_list[0]['name'])
+        mode = conf_list[0].get('merge-mode', 'merge-resolve')
         project.merge_mode = model.MERGER_MAP[mode]
+
+        # TODOv3(jeblair): deal with merge mode setting on multi branches
+        configs = []
+        for conf in conf_list:
+            # Make a copy since we modify this later via pop
+            conf = copy.deepcopy(conf)
+            conf_templates = conf.pop('templates', [])
+            # The way we construct a project definition is by parsing the
+            # definition as a template, then applying all of the
+            # templates, including the newly parsed one, in order.
+            project_template = ProjectTemplateParser.fromYaml(
+                tenant, layout, conf)
+            configs.extend([layout.project_templates[name]
+                            for name in conf_templates])
+            configs.append(project_template)
         for pipeline in layout.pipelines.values():
             project_pipeline = model.ProjectPipelineConfig()
             project_pipeline.job_tree = model.JobTree(None)
@@ -663,6 +678,8 @@
             # branch selector to each job there.  This makes the
             # in-repo configuration apply only to that branch.
             for branch in source.getProjectBranches(project):
+                project.unparsed_branch_config[branch] = \
+                    model.UnparsedTenantConfig()
                 job = merger.getFiles(project.name, url, branch,
                                       files=['.zuul.yaml'])
                 job.source_context = model.SourceContext(project,
@@ -681,7 +698,9 @@
                     TenantParser.log.info(
                         "Loading configuration from %s/%s" %
                         (job.source_context, fn))
-                    if job.source_context.secure:
+                    project = job.source_context.project
+                    branch = job.source_context.branch
+                    if job.source_context.trusted:
                         incdata = TenantParser._parseConfigRepoLayout(
                             job.files[fn], job.source_context)
                         config_repos_config.extend(incdata)
@@ -689,7 +708,9 @@
                         incdata = TenantParser._parseProjectRepoLayout(
                             job.files[fn], job.source_context)
                         project_repos_config.extend(incdata)
-                    job.source_context.project.unparsed_config.extend(incdata)
+                    project.unparsed_config.extend(incdata)
+                    if branch in project.unparsed_branch_config:
+                        project.unparsed_branch_config[branch].extend(incdata)
         return config_repos_config, project_repos_config
 
     @staticmethod
@@ -727,7 +748,7 @@
             layout.addProjectTemplate(ProjectTemplateParser.fromYaml(
                 tenant, layout, config_template))
 
-        for config_project in data.projects:
+        for config_project in data.projects.values():
             layout.addProjectConfig(ProjectParser.fromYaml(
                 tenant, layout, config_project))
 
@@ -784,19 +805,18 @@
     def createDynamicLayout(self, tenant, files):
         config = tenant.config_repos_config.copy()
         for source, project in tenant.project_repos:
-            # TODOv3(jeblair): config should be branch specific
             for branch in source.getProjectBranches(project):
                 data = files.getFile(project.name, branch, '.zuul.yaml')
-                if not data:
-                    data = project.unparsed_config
-                if not data:
+                if data:
+                    source_context = model.SourceContext(project,
+                                                         branch, False)
+                    incdata = TenantParser._parseProjectRepoLayout(
+                        data, source_context)
+                else:
+                    incdata = project.unparsed_branch_config[branch]
+                if not incdata:
                     continue
-                source_context = model.SourceContext(project,
-                                                     branch, False)
-                incdata = TenantParser._parseProjectRepoLayout(
-                    data, source_context)
                 config.extend(incdata)
-
         layout = model.Layout()
         # TODOv3(jeblair): copying the pipelines could be dangerous/confusing.
         layout.pipelines = tenant.layout.pipelines
@@ -808,8 +828,7 @@
             layout.addProjectTemplate(ProjectTemplateParser.fromYaml(
                 tenant, layout, config_template))
 
-        for config_project in config.projects:
+        for config_project in config.projects.values():
             layout.addProjectConfig(ProjectParser.fromYaml(
                 tenant, layout, config_project), update_pipeline=False)
-
         return layout
diff --git a/zuul/driver/gerrit/gerritconnection.py b/zuul/driver/gerrit/gerritconnection.py
index 9c54b4c..d65e6a8 100644
--- a/zuul/driver/gerrit/gerritconnection.py
+++ b/zuul/driver/gerrit/gerritconnection.py
@@ -513,10 +513,10 @@
             time.sleep(self.replication_retry_interval)
         return False
 
-    def getRefSha(self, project, ref):
+    def getRefSha(self, project_name, ref):
         refs = {}
         try:
-            refs = self.getInfoRefs(project)
+            refs = self.getInfoRefs(project_name)
         except:
             self.log.exception("Exception looking for ref %s" %
                                ref)
@@ -578,7 +578,7 @@
         return changes
 
     def getProjectBranches(self, project):
-        refs = self.getInfoRefs(project)
+        refs = self.getInfoRefs(project.name)
         heads = [str(k[len('refs/heads/'):]) for k in refs.keys()
                  if k.startswith('refs/heads/')]
         return heads
@@ -710,9 +710,9 @@
             raise Exception("Gerrit error executing %s" % command)
         return (out, err)
 
-    def getInfoRefs(self, project):
+    def getInfoRefs(self, project_name):
         url = "%s/p/%s/info/refs?service=git-upload-pack" % (
-            self.baseurl, project)
+            self.baseurl, project_name)
         try:
             data = urllib.request.urlopen(url).read()
         except:
diff --git a/zuul/launcher/client.py b/zuul/launcher/client.py
index 23bec90..6abd6f4 100644
--- a/zuul/launcher/client.py
+++ b/zuul/launcher/client.py
@@ -12,6 +12,7 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+import copy
 import gear
 import json
 import logging
@@ -259,7 +260,16 @@
         dependent_items.reverse()
         # TODOv3(jeblair): This ansible vars data structure will
         # replace the environment variables below.
-        zuul_params = dict(uuid=uuid)
+        zuul_params = dict(uuid=uuid,
+                           pipeline=pipeline.name,
+                           job=job.name,
+                           project=item.change.project.name)
+        if hasattr(item.change, 'branch'):
+            zuul_params['branch'] = item.change.branch
+        if hasattr(item.change, 'number'):
+            zuul_params['change'] = item.change.number
+        if hasattr(item.change, 'patchset'):
+            zuul_params['patchset'] = item.change.patchset
         # Legacy environment variables
         params = dict(ZUUL_UUID=uuid,
                       ZUUL_PROJECT=item.change.project.name)
@@ -327,6 +337,7 @@
         merger_items = map(make_merger_item, all_items)
 
         params['job'] = job.name
+        params['timeout'] = job.timeout
         params['items'] = merger_items
         params['projects'] = []
 
@@ -338,10 +349,20 @@
 
         nodes = []
         for node in item.current_build_set.getJobNodeSet(job.name).getNodes():
-            nodes.append(dict(name=node.name, image=node.image))
+            nodes.append(dict(name=node.name, image=node.image,
+                              public_ipv6=node.public_ipv6,
+                              public_ipv4=node.public_ipv4))
         params['nodes'] = nodes
-        params['zuul'] = zuul_params
+        params['vars'] = copy.deepcopy(job.variables)
+        params['vars']['zuul'] = zuul_params
         projects = set()
+        if job.repos:
+            for repo in job.repos:
+                project = item.pipeline.source.getProject(repo)
+                params['projects'].append(
+                    dict(name=repo,
+                         url=item.pipeline.source.getGitUrl(project)))
+                projects.add(project)
         for item in all_items:
             if item.change.project not in projects:
                 params['projects'].append(
diff --git a/zuul/launcher/server.py b/zuul/launcher/server.py
index 1ba42da..1b8d2c6 100644
--- a/zuul/launcher/server.py
+++ b/zuul/launcher/server.py
@@ -27,15 +27,14 @@
 import yaml
 
 import gear
+import git
 
 import zuul.merger.merger
 import zuul.ansible.action
+import zuul.ansible.callback
 import zuul.ansible.library
 from zuul.lib import commandsocket
 
-ANSIBLE_WATCHDOG_GRACE = 5 * 60
-
-
 COMMANDS = ['stop', 'pause', 'unpause', 'graceful', 'verbose',
             'unverbose']
 
@@ -74,20 +73,31 @@
 class JobDirPlaybook(object):
     def __init__(self, root):
         self.root = root
-        self.secure = None
+        self.trusted = None
         self.path = None
 
 
 class JobDir(object):
     def __init__(self, root=None, keep=False):
+        # root
+        #   ansible
+        #     trusted.cfg
+        #     untrusted.cfg
+        #   work
+        #     src
+        #     logs
         self.keep = keep
         self.root = tempfile.mkdtemp(dir=root)
-        self.git_root = os.path.join(self.root, 'git')
-        os.makedirs(self.git_root)
+        # Work
+        self.work_root = os.path.join(self.root, 'work')
+        os.makedirs(self.work_root)
+        self.src_root = os.path.join(self.work_root, 'src')
+        os.makedirs(self.src_root)
+        self.log_root = os.path.join(self.work_root, 'logs')
+        os.makedirs(self.log_root)
+        # Ansible
         self.ansible_root = os.path.join(self.root, 'ansible')
         os.makedirs(self.ansible_root)
-        self.secure_ansible_root = os.path.join(self.ansible_root, 'secure')
-        os.makedirs(self.secure_ansible_root)
         self.known_hosts = os.path.join(self.ansible_root, 'known_hosts')
         self.inventory = os.path.join(self.ansible_root, 'inventory')
         self.vars = os.path.join(self.ansible_root, 'vars.yaml')
@@ -97,10 +107,10 @@
         self.post_playbooks = []
         self.roles = []
         self.roles_path = []
-        self.config = os.path.join(self.ansible_root, 'ansible.cfg')
-        self.secure_config = os.path.join(
-            self.secure_ansible_root, 'ansible.cfg')
-        self.ansible_log = os.path.join(self.ansible_root, 'ansible_log.txt')
+        self.untrusted_config = os.path.join(
+            self.ansible_root, 'untrusted.cfg')
+        self.trusted_config = os.path.join(self.ansible_root, 'trusted.cfg')
+        self.ansible_log = os.path.join(self.log_root, 'ansible_log.txt')
 
     def addPrePlaybook(self):
         count = len(self.pre_playbooks)
@@ -238,6 +248,10 @@
             self.merge_name = None
 
         self.connections = connections
+        # This merger and its git repos are used to maintain
+        # up-to-date copies of all the repos that are used by jobs, as
+        # well as to support the merger:cat functon to supply
+        # configuration information to Zuul when it starts.
         self.merger = self._getMerger(self.merge_root)
         self.update_queue = DeduplicateQueue()
 
@@ -256,15 +270,25 @@
         if not os.path.exists(self.action_dir):
             os.makedirs(self.action_dir)
 
+        self.callback_dir = os.path.join(ansible_dir, 'callback')
+        if not os.path.exists(self.callback_dir):
+            os.makedirs(self.callback_dir)
+
         library_path = os.path.dirname(os.path.abspath(
             zuul.ansible.library.__file__))
         for fn in os.listdir(library_path):
             shutil.copy(os.path.join(library_path, fn), self.library_dir)
+
         action_path = os.path.dirname(os.path.abspath(
             zuul.ansible.action.__file__))
         for fn in os.listdir(action_path):
             shutil.copy(os.path.join(action_path, fn), self.action_dir)
 
+        callback_path = os.path.dirname(os.path.abspath(
+            zuul.ansible.callback.__file__))
+        for fn in os.listdir(callback_path):
+            shutil.copy(os.path.join(callback_path, fn), self.callback_dir)
+
         self.job_workers = {}
 
     def _getMerger(self, root):
@@ -356,7 +380,7 @@
                 self.log.exception("Exception in update thread:")
 
     def _innerUpdateLoop(self):
-        # Inside of a loop that keeps the main repository up to date
+        # Inside of a loop that keeps the main repositories up to date
         task = self.update_queue.get()
         if task is None:
             # We are asked to stop
@@ -368,6 +392,7 @@
         task.setComplete()
 
     def update(self, project, url):
+        # Update a repository in the main merger
         task = UpdateTask(project, url)
         task = self.update_queue.put(task)
         return task
@@ -484,7 +509,8 @@
 
     def launch(self):
         try:
-            self.jobdir = JobDir(root=self.launcher_server.jobdir_root)
+            self.jobdir = JobDir(root=self.launcher_server.jobdir_root,
+                                 keep=self.launcher_server.keep_jobdir)
             self._launch()
         except Exception:
             self.log.exception("Exception while launching job")
@@ -517,7 +543,17 @@
             task.wait()
 
         self.log.debug("Job %s: git updates complete" % (self.job.unique,))
-        merger = self.launcher_server._getMerger(self.jobdir.git_root)
+        for project in args['projects']:
+            self.log.debug("Cloning %s" % (project['name'],))
+            repo = git.Repo.clone_from(
+                os.path.join(self.launcher_server.merge_root,
+                             project['name']),
+                os.path.join(self.jobdir.src_root,
+                             project['name']))
+            repo.remotes.origin.config_writer.set('url', project['url'])
+
+        # Get a merger in order to update the repos involved in this job.
+        merger = self.launcher_server._getMerger(self.jobdir.src_root)
         merge_items = [i for i in args['items'] if i.get('refspec')]
         if merge_items:
             commit = merger.mergeChanges(merge_items)  # noqa
@@ -552,7 +588,7 @@
         self.job.sendWorkData(json.dumps(data))
         self.job.sendWorkStatus(0, 100)
 
-        result = self.runPlaybooks()
+        result = self.runPlaybooks(args)
 
         if result is None:
             self.job.sendWorkFail()
@@ -560,17 +596,20 @@
         result = dict(result=result)
         self.job.sendWorkComplete(json.dumps(result))
 
-    def runPlaybooks(self):
+    def runPlaybooks(self, args):
         result = None
 
         for playbook in self.jobdir.pre_playbooks:
-            pre_status, pre_code = self.runAnsiblePlaybook(playbook)
+            # TODOv3(pabelanger): Implement pre-run timeout setting.
+            pre_status, pre_code = self.runAnsiblePlaybook(
+                playbook, args['timeout'])
             if pre_status != self.RESULT_NORMAL or pre_code != 0:
                 # These should really never fail, so return None and have
                 # zuul try again
                 return result
 
-        job_status, job_code = self.runAnsiblePlaybook(self.jobdir.playbook)
+        job_status, job_code = self.runAnsiblePlaybook(
+            self.jobdir.playbook, args['timeout'])
         if job_status == self.RESULT_TIMED_OUT:
             return 'TIMED_OUT'
         if job_status == self.RESULT_ABORTED:
@@ -587,20 +626,22 @@
             result = 'FAILURE'
 
         for playbook in self.jobdir.post_playbooks:
+            # TODOv3(pabelanger): Implement post-run timeout setting.
             post_status, post_code = self.runAnsiblePlaybook(
-                playbook, success)
+                playbook, args['timeout'], success)
             if post_status != self.RESULT_NORMAL or post_code != 0:
                 result = 'POST_FAILURE'
         return result
 
     def getHostList(self, args):
-        # TODOv3: the localhost addition is temporary so we have
-        # something to exercise ansible.
-        hosts = [('localhost', dict(ansible_connection='local'))]
+        # TODO(clarkb): This prefers v4 because we're not sure if we
+        # expect v6 to work.  If we can determine how to prefer v6
+        hosts = []
         for node in args['nodes']:
-            # TODOv3: the connection should almost certainly not be
-            # local.
-            hosts.append((node['name'], dict(ansible_connection='local')))
+            ip = node.get('public_ipv4')
+            if not ip:
+                ip = node.get('public_ipv6')
+            hosts.append((node['name'], dict(ansible_host=ip)))
         return hosts
 
     def _blockPluginDirs(self, path):
@@ -615,13 +656,13 @@
             if os.path.isdir(entry) and entry.endswith('_plugins'):
                 raise Exception(
                     "Ansible plugin dir %s found adjacent to playbook %s in"
-                    " non-secure repo." % (entry, path))
+                    " non-trusted repo." % (entry, path))
 
-    def findPlaybook(self, path, required=False, secure=False):
+    def findPlaybook(self, path, required=False, trusted=False):
         for ext in ['.yaml', '.yml']:
             fn = path + ext
             if os.path.exists(fn):
-                if not secure:
+                if not trusted:
                     playbook_dir = os.path.dirname(os.path.abspath(fn))
                     self._blockPluginDirs(playbook_dir)
                 return fn
@@ -654,13 +695,13 @@
         self.log.debug("Prepare playbook repo for %s" % (playbook,))
         # Check out the playbook repo if needed and set the path to
         # the playbook that should be run.
-        jobdir_playbook.secure = playbook['secure']
+        jobdir_playbook.trusted = playbook['trusted']
         source = self.launcher_server.connections.getSource(
             playbook['connection'])
         project = source.getProject(playbook['project'])
         # TODO(jeblair): construct the url in the merger itself
         url = source.getGitUrl(project)
-        if not playbook['secure']:
+        if not playbook['trusted']:
             # This is a project repo, so it is safe to use the already
             # checked out version (from speculative merging) of the
             # playbook
@@ -668,13 +709,13 @@
                 if (i['connection_name'] == playbook['connection'] and
                     i['project'] == playbook['project']):
                     # We already have this repo prepared
-                    path = os.path.join(self.jobdir.git_root,
+                    path = os.path.join(self.jobdir.src_root,
                                         project.name,
                                         playbook['path'])
                     jobdir_playbook.path = self.findPlaybook(
                         path,
                         required=required,
-                        secure=playbook['secure'])
+                        trusted=playbook['trusted'])
                     return
         # The playbook repo is either a config repo, or it isn't in
         # the stack of changes we are testing, so check out the branch
@@ -689,7 +730,7 @@
         jobdir_playbook.path = self.findPlaybook(
             path,
             required=required,
-            secure=playbook['secure'])
+            trusted=playbook['trusted'])
 
     def prepareRoles(self, args):
         for role in args['roles']:
@@ -697,23 +738,23 @@
                 root = self.jobdir.addRole()
                 self.prepareZuulRole(args, role, root)
 
-    def findRole(self, path, secure=False):
+    def findRole(self, path, trusted=False):
         d = os.path.join(path, 'tasks')
         if os.path.isdir(d):
             # This is a bare role
-            if not secure:
+            if not trusted:
                 self._blockPluginDirs(path)
             # None signifies that the repo is a bare role
             return None
         d = os.path.join(path, 'roles')
         if os.path.isdir(d):
             # This repo has a collection of roles
-            if not secure:
+            if not trusted:
                 for entry in os.listdir(d):
                     self._blockPluginDirs(os.path.join(d, entry))
             return d
         # We assume the repository itself is a collection of roles
-        if not secure:
+        if not trusted:
             for entry in os.listdir(path):
                 self._blockPluginDirs(os.path.join(path, entry))
         return path
@@ -727,7 +768,7 @@
         # TODO(jeblair): construct the url in the merger itself
         url = source.getGitUrl(project)
         role_repo = None
-        if not role['secure']:
+        if not role['trusted']:
             # This is a project repo, so it is safe to use the already
             # checked out version (from speculative merging) of the
             # role
@@ -738,7 +779,7 @@
                     # We already have this repo prepared;
                     # copy it into location.
 
-                    path = os.path.join(self.jobdir.git_root,
+                    path = os.path.join(self.jobdir.src_root,
                                         project.name)
                     link = os.path.join(root, role['name'])
                     os.symlink(path, link)
@@ -754,7 +795,7 @@
             merger.checkoutBranch(project.name, url, 'master')
             role_repo = os.path.join(root, project.name)
 
-        role_path = self.findRole(role_repo, secure=role['secure'])
+        role_path = self.findRole(role_repo, trusted=role['trusted'])
         if role_path is None:
             # In the case of a bare role, add the containing directory
             role_path = root
@@ -768,14 +809,21 @@
                 for k, v in host_vars.items():
                     inventory.write('%s=%s' % (k, v))
                 inventory.write('\n')
+                if 'ansible_host' in host_vars:
+                    os.system("ssh-keyscan %s >> %s" % (
+                        host_vars['ansible_host'],
+                        self.jobdir.known_hosts))
+
         with open(self.jobdir.vars, 'w') as vars_yaml:
-            zuul_vars = dict(zuul=args['zuul'])
+            zuul_vars = dict(args['vars'])
+            zuul_vars['zuul']['launcher'] = dict(src_root=self.jobdir.src_root,
+                                                 log_root=self.jobdir.log_root)
             vars_yaml.write(
                 yaml.safe_dump(zuul_vars, default_flow_style=False))
-        self.writeAnsibleConfig(self.jobdir.config)
-        self.writeAnsibleConfig(self.jobdir.secure_config, secure=True)
+        self.writeAnsibleConfig(self.jobdir.untrusted_config)
+        self.writeAnsibleConfig(self.jobdir.trusted_config, trusted=True)
 
-    def writeAnsibleConfig(self, config_path, secure=False):
+    def writeAnsibleConfig(self, config_path, trusted=False):
         with open(config_path, 'w') as config:
             config.write('[defaults]\n')
             config.write('hostfile = %s\n' % self.jobdir.inventory)
@@ -792,20 +840,23 @@
             if self.jobdir.roles_path:
                 config.write('roles_path = %s\n' %
                              ':'.join(self.jobdir.roles_path))
+            config.write('callback_plugins = %s\n'
+                         % self.launcher_server.callback_dir)
+            config.write('stdout_callback = zuul_stream\n')
             # bump the timeout because busy nodes may take more than
             # 10s to respond
             config.write('timeout = 30\n')
-            if not secure:
+            if not trusted:
                 config.write('action_plugins = %s\n'
                              % self.launcher_server.action_dir)
 
-            # On secure jobs, we want to prevent the printing of args,
-            # since secure jobs might have access to secrets that they may
+            # On trusted jobs, we want to prevent the printing of args,
+            # since trusted jobs might have access to secrets that they may
             # need to pass to a task or a role. On the other hand, there
-            # should be no sensitive data in insecure jobs, and printing
+            # should be no sensitive data in untrusted jobs, and printing
             # the args could be useful for debugging.
             config.write('display_args_to_stdout = %s\n' %
-                         str(not secure))
+                         str(not trusted))
 
             config.write('[ssh_connection]\n')
             # NB: when setting pipelining = True, keep_remote_files
@@ -836,17 +887,16 @@
                 pgid = os.getpgid(self.proc.pid)
                 os.killpg(pgid, signal.SIGKILL)
             except Exception:
-                self.log.exception("Exception while killing "
-                                   "ansible process:")
+                self.log.exception("Exception while killing ansible process:")
 
-    def runAnsible(self, cmd, timeout, secure=False):
+    def runAnsible(self, cmd, timeout, trusted=False):
         env_copy = os.environ.copy()
         env_copy['LOGNAME'] = 'zuul'
 
-        if secure:
-            cwd = self.jobdir.secure_ansible_root
+        if trusted:
+            env_copy['ANSIBLE_CONFIG'] = self.jobdir.trusted_config
         else:
-            cwd = self.jobdir.ansible_root
+            env_copy['ANSIBLE_CONFIG'] = self.jobdir.untrusted_config
 
         with self.proc_lock:
             if self.aborted:
@@ -854,7 +904,7 @@
             self.log.debug("Ansible command: %s" % (cmd,))
             self.proc = subprocess.Popen(
                 cmd,
-                cwd=cwd,
+                cwd=self.jobdir.work_root,
                 stdout=subprocess.PIPE,
                 stderr=subprocess.STDOUT,
                 preexec_fn=os.setsid,
@@ -862,23 +912,24 @@
             )
 
         ret = None
-        watchdog = Watchdog(timeout + ANSIBLE_WATCHDOG_GRACE,
-                            self._ansibleTimeout,
-                            ("Ansible timeout exceeded",))
-        watchdog.start()
+        if timeout:
+            watchdog = Watchdog(timeout, self._ansibleTimeout,
+                                ("Ansible timeout exceeded",))
+            watchdog.start()
         try:
             for line in iter(self.proc.stdout.readline, b''):
                 line = line[:1024].rstrip()
                 self.log.debug("Ansible output: %s" % (line,))
             ret = self.proc.wait()
         finally:
-            watchdog.stop()
+            if timeout:
+                watchdog.stop()
         self.log.debug("Ansible exit code: %s" % (ret,))
 
         with self.proc_lock:
             self.proc = None
 
-        if watchdog.timed_out:
+        if timeout and watchdog.timed_out:
             return (self.RESULT_TIMED_OUT, None)
         if ret == 3:
             # AnsibleHostUnreachable: We had a network issue connecting to
@@ -890,7 +941,7 @@
 
         return (self.RESULT_NORMAL, ret)
 
-    def runAnsiblePlaybook(self, playbook, success=None):
+    def runAnsiblePlaybook(self, playbook, timeout, success=None):
         env_copy = os.environ.copy()
         env_copy['LOGNAME'] = 'zuul'
 
@@ -906,8 +957,5 @@
 
         cmd.extend(['-e@%s' % self.jobdir.vars, verbose])
 
-        # TODOv3: get this from the job
-        timeout = 60
-
         return self.runAnsible(
-            cmd=cmd, timeout=timeout, secure=playbook.secure)
+            cmd=cmd, timeout=timeout, trusted=playbook.trusted)
diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py
index 71d8c19..18cf11b 100644
--- a/zuul/manager/__init__.py
+++ b/zuul/manager/__init__.py
@@ -689,12 +689,6 @@
                 tenant = self.pipeline.layout.tenant
                 zuul_driver.onChangeMerged(tenant, item.change,
                                            self.pipeline.source)
-                if item.change.updatesConfig():
-                    # The change that just landed updates the config.
-                    # Clear out cached data for this project and
-                    # perform a reconfiguration.
-                    item.change.project.unparsed_config = None
-                    self.sched.reconfigureTenant(tenant)
 
     def _reportItem(self, item):
         self.log.debug("Reporting change %s" % item.change)
diff --git a/zuul/merger/merger.py b/zuul/merger/merger.py
index f970b03..658fd64 100644
--- a/zuul/merger/merger.py
+++ b/zuul/merger/merger.py
@@ -214,24 +214,17 @@
         self.working_root = working_root
         if not os.path.exists(working_root):
             os.makedirs(working_root)
-        self._makeSSHWrappers(working_root, connections)
+        self.connections = connections
         self.email = email
         self.username = username
 
-    def _makeSSHWrappers(self, working_root, connections):
-        for connection_name, connection in connections.connections.items():
-            sshkey = connection.connection_config.get('sshkey')
-            if sshkey:
-                self._makeSSHWrapper(sshkey, working_root, connection_name)
-
-    def _makeSSHWrapper(self, key, merge_root, connection_name='default'):
-        wrapper_name = '.ssh_wrapper_%s' % connection_name
-        name = os.path.join(merge_root, wrapper_name)
-        fd = open(name, 'w')
-        fd.write('#!/bin/bash\n')
-        fd.write('ssh -i %s $@\n' % key)
-        fd.close()
-        os.chmod(name, 0o755)
+    def _get_ssh_cmd(self, connection_name):
+        sshkey = self.connections.connections.get(connection_name).\
+            connection_config.get('sshkey')
+        if sshkey:
+            return 'ssh -i %s' % sshkey
+        else:
+            return None
 
     def addProject(self, project, url):
         repo = None
@@ -299,30 +292,26 @@
 
         return commit
 
-    def _setGitSsh(self, connection_name):
-        wrapper_name = '.ssh_wrapper_%s' % connection_name
-        name = os.path.join(self.working_root, wrapper_name)
-        if os.path.isfile(name):
-            os.environ['GIT_SSH'] = name
-        elif 'GIT_SSH' in os.environ:
-            del os.environ['GIT_SSH']
-
     def _mergeItem(self, item, recent):
         self.log.debug("Processing refspec %s for project %s / %s ref %s" %
                        (item['refspec'], item['project'], item['branch'],
                         item['ref']))
-        self._setGitSsh(item['connection_name'])
         repo = self.getRepo(item['project'], item['url'])
         key = (item['project'], item['branch'])
+
         # See if we have a commit for this change already in this repo
         zuul_ref = item['branch'] + '/' + item['ref']
-        commit = repo.getCommitFromRef(zuul_ref)
-        if commit:
-            self.log.debug("Found commit %s for ref %s" % (commit, zuul_ref))
-            # Store this as the most recent commit for this
-            # project-branch
-            recent[key] = commit
-            return commit
+        with repo.createRepoObject().git.custom_environment(
+            GIT_SSH_COMMAND=self._get_ssh_cmd(item['connection_name'])):
+            commit = repo.getCommitFromRef(zuul_ref)
+            if commit:
+                self.log.debug(
+                    "Found commit %s for ref %s" % (commit, zuul_ref))
+                # Store this as the most recent commit for this
+                # project-branch
+                recent[key] = commit
+                return commit
+
         self.log.debug("Unable to find commit for ref %s" % (zuul_ref,))
         # We need to merge the change
         # Get the most recent commit for this project-branch
@@ -340,24 +329,26 @@
         else:
             self.log.debug("Found base commit %s for %s" % (base, key,))
         # Merge the change
-        commit = self._mergeChange(item, base)
-        if not commit:
-            return None
-        # Store this commit as the most recent for this project-branch
-        recent[key] = commit
-        # Set the Zuul ref for this item to point to the most recent
-        # commits of each project-branch
-        for key, mrc in recent.items():
-            project, branch = key
-            try:
-                repo = self.getRepo(project, None)
-                zuul_ref = branch + '/' + item['ref']
-                repo.createZuulRef(zuul_ref, mrc)
-            except Exception:
-                self.log.exception("Unable to set zuul ref %s for "
-                                   "item %s" % (zuul_ref, item))
+        with repo.createRepoObject().git.custom_environment(
+            GIT_SSH_COMMAND=self._get_ssh_cmd(item['connection_name'])):
+            commit = self._mergeChange(item, base)
+            if not commit:
                 return None
-        return commit
+            # Store this commit as the most recent for this project-branch
+            recent[key] = commit
+            # Set the Zuul ref for this item to point to the most recent
+            # commits of each project-branch
+            for key, mrc in recent.items():
+                project, branch = key
+                try:
+                    repo = self.getRepo(project, None)
+                    zuul_ref = branch + '/' + item['ref']
+                    repo.createZuulRef(zuul_ref, mrc)
+                except Exception:
+                    self.log.exception("Unable to set zuul ref %s for "
+                                       "item %s" % (zuul_ref, item))
+                    return None
+            return commit
 
     def mergeChanges(self, items, files=None):
         recent = {}
diff --git a/zuul/model.py b/zuul/model.py
index 86546a2..96414be 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -365,6 +365,7 @@
         # TODOv3 (jeblair): re-add support for foreign projects if needed
         self.foreign = foreign
         self.unparsed_config = None
+        self.unparsed_branch_config = {}  # branch -> UnparsedTenantConfig
 
     def __str__(self):
         return self.name
@@ -534,21 +535,21 @@
     Jobs and playbooks reference this to keep track of where they
     originate."""
 
-    def __init__(self, project, branch, secure):
+    def __init__(self, project, branch, trusted):
         self.project = project
         self.branch = branch
-        self.secure = secure
+        self.trusted = trusted
 
     def __repr__(self):
-        return '<SourceContext %s:%s secure:%s>' % (self.project,
-                                                    self.branch,
-                                                    self.secure)
+        return '<SourceContext %s:%s trusted:%s>' % (self.project,
+                                                     self.branch,
+                                                     self.trusted)
 
     def __deepcopy__(self, memo):
         return self.copy()
 
     def copy(self):
-        return self.__class__(self.project, self.branch, self.secure)
+        return self.__class__(self.project, self.branch, self.trusted)
 
     def __ne__(self, other):
         return not self.__eq__(other)
@@ -558,7 +559,7 @@
             return False
         return (self.project == other.project and
                 self.branch == other.branch and
-                self.secure == other.secure)
+                self.trusted == other.trusted)
 
 
 class PlaybookContext(object):
@@ -592,7 +593,7 @@
             connection=self.source_context.project.connection_name,
             project=self.source_context.project.name,
             branch=self.source_context.branch,
-            secure=self.source_context.secure,
+            trusted=self.source_context.trusted,
             path=self.path)
 
 
@@ -625,11 +626,11 @@
 class ZuulRole(Role):
     """A reference to an ansible role in a Zuul project."""
 
-    def __init__(self, target_name, connection_name, project_name, secure):
+    def __init__(self, target_name, connection_name, project_name, trusted):
         super(ZuulRole, self).__init__(target_name)
         self.connection_name = connection_name
         self.project_name = project_name
-        self.secure = secure
+        self.trusted = trusted
 
     def __repr__(self):
         return '<ZuulRole %s %s>' % (self.project_name, self.target_name)
@@ -640,7 +641,7 @@
         return (super(ZuulRole, self).__eq__(other) and
                 self.connection_name == other.connection_name,
                 self.project_name == other.project_name,
-                self.secure == other.secure)
+                self.trusted == other.trusted)
 
     def toDict(self):
         # Render to a dict to use in passing json to the launcher
@@ -648,7 +649,7 @@
         d['type'] = 'zuul'
         d['connection'] = self.connection_name
         d['project'] = self.project_name
-        d['secure'] = self.secure
+        d['trusted'] = self.trusted
         return d
 
 
@@ -686,7 +687,7 @@
         # project-pipeline.
         self.execution_attributes = dict(
             timeout=None,
-            # variables={},
+            variables={},
             nodeset=NodeSet(),
             auth={},
             workspace=None,
@@ -698,6 +699,7 @@
             attempts=3,
             final=False,
             roles=frozenset(),
+            repos=frozenset(),
         )
 
         # These are generally internal attributes which are not
@@ -754,6 +756,22 @@
         if not self.run:
             self.run = self.implied_run
 
+    def updateVariables(self, other_vars):
+        v = self.variables
+        Job._deepUpdate(v, other_vars)
+        self.variables = v
+
+    @staticmethod
+    def _deepUpdate(a, b):
+        # Merge nested dictionaries if possible, otherwise, overwrite
+        # the value in 'a' with the value in 'b'.
+        for k, bv in b.items():
+            av = a.get(k)
+            if isinstance(av, dict) and isinstance(bv, dict):
+                Job._deepUpdate(av, bv)
+            else:
+                a[k] = bv
+
     def inheritFrom(self, other):
         """Copy the inheritable attributes which have been set on the other
         job to this job."""
@@ -794,7 +812,7 @@
                                     "%s=%s with variant %s" % (
                                         repr(self), k, other._get(k),
                                         repr(other)))
-                if k not in set(['pre_run', 'post_run', 'roles']):
+                if k not in set(['pre_run', 'post_run', 'roles', 'variables']):
                     setattr(self, k, copy.deepcopy(other._get(k)))
 
         # Don't set final above so that we don't trip an error halfway
@@ -808,6 +826,8 @@
             self.post_run = other.post_run + self.post_run
         if other._get('roles') is not None:
             self.roles = self.roles.union(other.roles)
+        if other._get('variables') is not None:
+            self.updateVariables(other.variables)
 
         for k in self.context_attributes:
             if (other._get(k) is not None and
@@ -2022,7 +2042,7 @@
         self.pipelines = []
         self.jobs = []
         self.project_templates = []
-        self.projects = []
+        self.projects = {}
         self.nodesets = []
 
     def copy(self):
@@ -2039,7 +2059,8 @@
             self.pipelines.extend(conf.pipelines)
             self.jobs.extend(conf.jobs)
             self.project_templates.extend(conf.project_templates)
-            self.projects.extend(conf.projects)
+            for k, v in conf.projects.items():
+                self.projects.setdefault(k, []).extend(v)
             self.nodesets.extend(conf.nodesets)
             return
 
@@ -2065,7 +2086,8 @@
             if key in ['project', 'project-template', 'job']:
                 value['_source_context'] = source_context
             if key == 'project':
-                self.projects.append(value)
+                name = value['name']
+                self.projects.setdefault(name, []).append(value)
             elif key == 'job':
                 self.jobs.append(value)
             elif key == 'project-template':
@@ -2226,10 +2248,10 @@
     def getRepo(self, source, project_name):
         """Get a project given a source and project name
 
-        Returns a tuple (secure, project) or (None, None) if the
+        Returns a tuple (trusted, project) or (None, None) if the
         project is not found.
 
-        Secure indicates the project is a config repo.
+        Trusted indicates the project is a config repo.
 
         """
 
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 6fbac9b..2679522 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -293,11 +293,6 @@
         self.stopConnections()
         self.wake_event.set()
 
-    def testConfig(self, config_path, connections):
-        # Take the list of set up connections directly here rather than with
-        # registerConnections as we don't want to do the onLoad event yet.
-        return self._parseConfig(config_path, connections)
-
     def registerConnections(self, connections, load=True):
         # load: whether or not to trigger the onLoad for the connection. This
         # is useful for not doing a full load during layout validation.
@@ -744,6 +739,7 @@
         self.log.debug("Processing trigger event %s" % event)
         try:
             for tenant in self.abide.tenants.values():
+                reconfigured_tenant = False
                 for pipeline in tenant.layout.pipelines.values():
                     # Get the change even if the project is unknown to
                     # us for the use of updating the cache if there is
@@ -757,6 +753,16 @@
                                        "connection trigger)",
                                        e.change, pipeline.source)
                         continue
+                    if (event.type == 'change-merged' and
+                        hasattr(change, 'files') and
+                        not reconfigured_tenant and
+                        change.updatesConfig()):
+                        # The change that just landed updates the config.
+                        # Clear out cached data for this project and
+                        # perform a reconfiguration.
+                        change.project.unparsed_config = None
+                        self.reconfigureTenant(tenant)
+                        reconfigured_tenant = True
                     if event.type == 'patchset-created':
                         pipeline.manager.removeOldVersionsOfChange(change)
                     elif event.type == 'change-abandoned':
@@ -773,7 +779,7 @@
         try:
             if isinstance(event, ReconfigureEvent):
                 self._doReconfigureEvent(event)
-            if isinstance(event, TenantReconfigureEvent):
+            elif isinstance(event, TenantReconfigureEvent):
                 self._doTenantReconfigureEvent(event)
             elif isinstance(event, PromoteEvent):
                 self._doPromoteEvent(event)
diff --git a/zuul/zk.py b/zuul/zk.py
index a456873..2009945 100644
--- a/zuul/zk.py
+++ b/zuul/zk.py
@@ -64,23 +64,6 @@
         self.chroot = chroot or ''
 
 
-def buildZooKeeperHosts(host_list):
-    '''
-    Build the ZK cluster host list for client connections.
-
-    :param list host_list: A list of
-        :py:class:`~nodepool.zk.ZooKeeperConnectionConfig` objects (one
-        per server) defining the ZooKeeper cluster servers.
-    '''
-    if not isinstance(host_list, list):
-        raise Exception("'host_list' must be a list")
-    hosts = []
-    for host_def in host_list:
-        host = '%s:%s%s' % (host_def.host, host_def.port, host_def.chroot)
-        hosts.append(host)
-    return ",".join(hosts)
-
-
 class ZooKeeper(object):
     '''
     Class implementing the ZooKeeper interface.
@@ -158,8 +141,7 @@
 
         '''
         if self.client is None:
-            hosts = buildZooKeeperHosts(host_list)
-            self.client = KazooClient(hosts=hosts, read_only=read_only)
+            self.client = KazooClient(hosts=host_list, read_only=read_only)
             self.client.add_listener(self._connection_listener)
             self.client.start()
 
@@ -184,8 +166,7 @@
             (one per server) defining the ZooKeeper cluster servers.
         '''
         if self.client is not None:
-            hosts = buildZooKeeperHosts(host_list)
-            self.client.set_hosts(hosts=hosts)
+            self.client.set_hosts(hosts=host_list)
 
     def submitNodeRequest(self, node_request, watcher):
         '''