Merge "github: retry pull_request()" into feature/zuulv3
diff --git a/doc/source/zuul.rst b/doc/source/zuul.rst
index a7dfb44..a2a6918 100644
--- a/doc/source/zuul.rst
+++ b/doc/source/zuul.rst
@@ -52,6 +52,16 @@
   Port on which the Gearman server is listening.
   ``port=4730`` (optional)
 
+**ssl_ca**
+  Optional: An openssl file containing a set of concatenated “certification authority” certificates
+  in PEM formet.
+
+**ssl_cert**
+  Optional: An openssl file containing the client public certificate in PEM format.
+
+**ssl_key**
+  Optional: An openssl file containing the client private key in PEM format.
+
 gearman_server
 """"""""""""""
 
@@ -70,6 +80,16 @@
   Path to log config file for internal Gearman server.
   ``log_config=/etc/zuul/gearman-logging.yaml``
 
+**ssl_ca**
+  Optional: An openssl file containing a set of concatenated “certification authority” certificates
+  in PEM formet.
+
+**ssl_cert**
+  Optional: An openssl file containing the server public certificate in PEM format.
+
+**ssl_key**
+  Optional: An openssl file containing the server private key in PEM format.
+
 webapp
 """"""
 
diff --git a/etc/zuul.conf-sample b/etc/zuul.conf-sample
index 1065cec..1aee1fa 100644
--- a/etc/zuul.conf-sample
+++ b/etc/zuul.conf-sample
@@ -1,8 +1,14 @@
 [gearman]
 server=127.0.0.1
+;ssl_ca=/path/to/ca.pem
+;ssl_cert=/path/to/client.pem
+;ssl_key=/path/to/client.key
 
 [gearman_server]
 start=true
+;ssl_ca=/path/to/ca.pem
+;ssl_cert=/path/to/server.pem
+;ssl_key=/path/to/server.key
 
 [zuul]
 layout_config=/etc/zuul/layout.yaml
diff --git a/tests/base.py b/tests/base.py
index ecbe5c8..31d6f0d 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -776,21 +776,6 @@
         repo = self._getRepo()
         return repo.references[self._getPRReference()].commit.hexsha
 
-    def setStatus(self, sha, state, url, description, context, user='zuul'):
-        # Since we're bypassing github API, which would require a user, we
-        # hard set the user as 'zuul' here.
-        # insert the status at the top of the list, to simulate that it
-        # is the most recent set status
-        self.statuses[sha].insert(0, ({
-            'state': state,
-            'url': url,
-            'description': description,
-            'context': context,
-            'creator': {
-                'login': user
-            }
-        }))
-
     def addReview(self, user, state, granted_on=None):
         gh_time_format = '%Y-%m-%dT%H:%M:%SZ'
         # convert the timestamp to a str format that would be returned
@@ -882,6 +867,7 @@
         self.connection_name = connection_name
         self.pr_number = 0
         self.pull_requests = []
+        self.statuses = {}
         self.upstream_root = upstream_root
         self.merge_failure = False
         self.merge_not_allowed_count = 0
@@ -1011,25 +997,24 @@
         pull_request.merge_message = commit_message
 
     def getCommitStatuses(self, project, sha):
-        owner, proj = project.split('/')
-        for pr in self.pull_requests:
-            pr_owner, pr_project = pr.project.split('/')
-            # This is somewhat risky, if the same commit exists in multiple
-            # PRs, we might grab the wrong one that doesn't have a status
-            # that is expected to be there. Maybe re-work this so that there
-            # is a global registry of commit statuses like with github.
-            if (pr_owner == owner and pr_project == proj and
-                sha in pr.statuses):
-                return pr.statuses[sha]
+        return self.statuses.get(project, {}).get(sha, [])
 
-    def setCommitStatus(self, project, sha, state,
-                        url='', description='', context=''):
-        owner, proj = project.split('/')
-        for pr in self.pull_requests:
-            pr_owner, pr_project = pr.project.split('/')
-            if (pr_owner == owner and pr_project == proj and
-                pr.head_sha == sha):
-                pr.setStatus(sha, state, url, description, context)
+    def setCommitStatus(self, project, sha, state, url='', description='',
+                        context='default', user='zuul'):
+        # always insert a status to the front of the list, to represent
+        # the last status provided for a commit.
+        # Since we're bypassing github API, which would require a user, we
+        # default the user as 'zuul' here.
+        self.statuses.setdefault(project, {}).setdefault(sha, [])
+        self.statuses[project][sha].insert(0, {
+            'state': state,
+            'url': url,
+            'description': description,
+            'context': context,
+            'creator': {
+                'login': user
+            }
+        })
 
     def labelPull(self, project, pr_number, label):
         pull_request = self.pull_requests[pr_number - 1]
@@ -1088,14 +1073,14 @@
         self.jobdir = None
         self.uuid = job.unique
         self.parameters = json.loads(job.arguments)
-        # TODOv3(jeblair): self.node is really "the image of the node
-        # assigned".  We should rename it (self.node_image?) if we
+        # TODOv3(jeblair): self.node is really "the label of the node
+        # assigned".  We should rename it (self.node_label?) if we
         # keep using it like this, or we may end up exposing more of
         # the complexity around multi-node jobs here
-        # (self.nodes[0].image?)
+        # (self.nodes[0].label?)
         self.node = None
         if len(self.parameters.get('nodes')) == 1:
-            self.node = self.parameters['nodes'][0]['image']
+            self.node = self.parameters['nodes'][0]['label']
         self.unique = self.parameters['ZUUL_UUID']
         self.pipeline = self.parameters['ZUUL_PIPELINE']
         self.project = self.parameters['ZUUL_PROJECT']
@@ -1371,9 +1356,20 @@
 
     """
 
-    def __init__(self):
+    def __init__(self, use_ssl=False):
         self.hold_jobs_in_queue = False
-        super(FakeGearmanServer, self).__init__(0)
+        if use_ssl:
+            ssl_ca = os.path.join(FIXTURE_DIR, 'gearman/root-ca.pem')
+            ssl_cert = os.path.join(FIXTURE_DIR, 'gearman/server.pem')
+            ssl_key = os.path.join(FIXTURE_DIR, 'gearman/server.key')
+        else:
+            ssl_ca = None
+            ssl_cert = None
+            ssl_key = None
+
+        super(FakeGearmanServer, self).__init__(0, ssl_key=ssl_key,
+                                                ssl_cert=ssl_cert,
+                                                ssl_ca=ssl_ca)
 
     def getJobForConnection(self, connection, peek=False):
         for queue in [self.high_queue, self.normal_queue, self.low_queue]:
@@ -1827,6 +1823,7 @@
     config_file = 'zuul.conf'
     run_ansible = False
     create_project_keys = False
+    use_ssl = False
 
     def _startMerger(self):
         self.merge_server = zuul.merger.server.MergeServer(self.config,
@@ -1884,11 +1881,22 @@
         reload_module(statsd)
         reload_module(zuul.scheduler)
 
-        self.gearman_server = FakeGearmanServer()
+        self.gearman_server = FakeGearmanServer(self.use_ssl)
 
         self.config.set('gearman', 'port', str(self.gearman_server.port))
         self.log.info("Gearman server on port %s" %
                       (self.gearman_server.port,))
+        if self.use_ssl:
+            self.log.info('SSL enabled for gearman')
+            self.config.set(
+                'gearman', 'ssl_ca',
+                os.path.join(FIXTURE_DIR, 'gearman/root-ca.pem'))
+            self.config.set(
+                'gearman', 'ssl_cert',
+                os.path.join(FIXTURE_DIR, 'gearman/client.pem'))
+            self.config.set(
+                'gearman', 'ssl_key',
+                os.path.join(FIXTURE_DIR, 'gearman/client.key'))
 
         gerritsource.GerritSource.replication_timeout = 1.5
         gerritsource.GerritSource.replication_retry_interval = 0.5
@@ -2698,6 +2706,11 @@
     run_ansible = True
 
 
+class SSLZuulTestCase(ZuulTestCase):
+    """ZuulTestCase but using SSL when possible"""
+    use_ssl = True
+
+
 class ZuulDBTestCase(ZuulTestCase):
     def setup_config(self):
         super(ZuulDBTestCase, self).setup_config()
diff --git a/tests/fixtures/config/ansible/git/common-config/playbooks/check-vars.yaml b/tests/fixtures/config/ansible/git/common-config/playbooks/check-vars.yaml
index 1f8fdf3..ce392a4 100644
--- a/tests/fixtures/config/ansible/git/common-config/playbooks/check-vars.yaml
+++ b/tests/fixtures/config/ansible/git/common-config/playbooks/check-vars.yaml
@@ -3,9 +3,9 @@
     - name: Assert nodepool variables are valid.
       assert:
         that:
-          - nodepool_az == 'test-az'
-          - nodepool_region == 'test-region'
-          - nodepool_provider == 'test-provider'
+          - nodepool.az == 'test-az'
+          - nodepool.region == 'test-region'
+          - nodepool.provider == 'test-provider'
 
     - name: Assert zuul-executor variables are valid.
       assert:
diff --git a/tests/fixtures/config/ansible/git/common-config/zuul.yaml b/tests/fixtures/config/ansible/git/common-config/zuul.yaml
index 02b87bd..b31c148 100644
--- a/tests/fixtures/config/ansible/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/ansible/git/common-config/zuul.yaml
@@ -71,7 +71,7 @@
     name: check-vars
     nodes:
       - name: ubuntu-xenial
-        image: ubuntu-xenial
+        label: ubuntu-xenial
 
 - job:
     name: hello
diff --git a/tests/fixtures/config/inventory/git/common-config/zuul.yaml b/tests/fixtures/config/inventory/git/common-config/zuul.yaml
index 184bd80..e147b98 100644
--- a/tests/fixtures/config/inventory/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/inventory/git/common-config/zuul.yaml
@@ -16,11 +16,11 @@
     name: nodeset1
     nodes:
       - name: controller
-        image: controller-image
+        label: controller-label
       - name: compute1
-        image: compute-image
+        label: compute-label
       - name: compute2
-        image: compute-image
+        label: compute-label
     groups:
       - name: ceph-osd
         nodes:
@@ -35,7 +35,7 @@
     name: single-inventory
     nodes:
       - name: ubuntu-xenial
-        image: ubuntu-xenial
+        label: ubuntu-xenial
 
 - job:
     name: group-inventory
diff --git a/tests/fixtures/config/multi-tenant/git/common-config/zuul.yaml b/tests/fixtures/config/multi-tenant/git/common-config/zuul.yaml
index ec9c6dd..362434e 100644
--- a/tests/fixtures/config/multi-tenant/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/multi-tenant/git/common-config/zuul.yaml
@@ -15,4 +15,4 @@
     name: python27
     nodes:
       - name: controller
-        image: ubuntu-trusty
+        label: ubuntu-trusty
diff --git a/tests/fixtures/config/multi-tenant/git/tenant-one-config/zuul.yaml b/tests/fixtures/config/multi-tenant/git/tenant-one-config/zuul.yaml
index 63a19e2..347bc53 100644
--- a/tests/fixtures/config/multi-tenant/git/tenant-one-config/zuul.yaml
+++ b/tests/fixtures/config/multi-tenant/git/tenant-one-config/zuul.yaml
@@ -23,7 +23,7 @@
     name: nodeset1
     nodes:
       - name: controller
-        image: controller-image
+        label: controller-label
 
 - job:
     name: project1-test1
diff --git a/tests/fixtures/config/multi-tenant/git/tenant-two-config/zuul.yaml b/tests/fixtures/config/multi-tenant/git/tenant-two-config/zuul.yaml
index 4feb9f5..5ea803e 100644
--- a/tests/fixtures/config/multi-tenant/git/tenant-two-config/zuul.yaml
+++ b/tests/fixtures/config/multi-tenant/git/tenant-two-config/zuul.yaml
@@ -23,7 +23,7 @@
     name: nodeset1
     nodes:
       - name: controller
-        image: controller-image
+        label: controller-label
 
 - job:
     name: project2-test1
diff --git a/tests/fixtures/config/openstack/git/project-config/zuul.yaml b/tests/fixtures/config/openstack/git/project-config/zuul.yaml
index aff2046..2ad600c 100644
--- a/tests/fixtures/config/openstack/git/project-config/zuul.yaml
+++ b/tests/fixtures/config/openstack/git/project-config/zuul.yaml
@@ -38,7 +38,7 @@
     timeout: 30
     nodes:
       - name: controller
-        image: ubuntu-xenial
+        label: ubuntu-xenial
 
 - job:
     name: python27
@@ -50,7 +50,7 @@
     branches: stable/mitaka
     nodes:
       - name: controller
-        image: ubuntu-trusty
+        label: ubuntu-trusty
 
 - job:
     name: python35
diff --git a/tests/fixtures/config/single-tenant/git/common-config/zuul.yaml b/tests/fixtures/config/single-tenant/git/common-config/zuul.yaml
index 2bb61ee..27454d3 100644
--- a/tests/fixtures/config/single-tenant/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/single-tenant/git/common-config/zuul.yaml
@@ -49,20 +49,20 @@
     attempts: 4
     nodes:
       - name: controller
-        image: image1
+        label: label1
 
 - job:
     name: project-test1
     branches: stable
     nodes:
       - name: controller
-        image: image2
+        label: label2
 
 - job:
     name: project-post
     nodes:
       - name: static
-        image: ubuntu-xenial
+        label: ubuntu-xenial
 
 - job:
     name: project-test2
diff --git a/tests/fixtures/config/tenant-parser/git/common-config/zuul.yaml b/tests/fixtures/config/tenant-parser/git/common-config/zuul.yaml
new file mode 100644
index 0000000..9e52187
--- /dev/null
+++ b/tests/fixtures/config/tenant-parser/git/common-config/zuul.yaml
@@ -0,0 +1,27 @@
+- pipeline:
+    name: check
+    manager: independent
+    trigger:
+      gerrit:
+        - event: patchset-created
+    success:
+      gerrit:
+        verified: 1
+    failure:
+      gerrit:
+        verified: -1
+
+- job:
+    name: common-config-job
+
+- project:
+    name: org/project1
+    check:
+      jobs:
+        - common-config-job
+
+- project:
+    name: org/project2
+    check:
+      jobs:
+        - common-config-job
diff --git a/tests/fixtures/config/tenant-parser/git/org_project1/.zuul.yaml b/tests/fixtures/config/tenant-parser/git/org_project1/.zuul.yaml
new file mode 100644
index 0000000..cd5dba7
--- /dev/null
+++ b/tests/fixtures/config/tenant-parser/git/org_project1/.zuul.yaml
@@ -0,0 +1,8 @@
+- job:
+    name: project1-job
+
+- project:
+    name: org/project1
+    check:
+      jobs:
+        - project1-job
diff --git a/tests/fixtures/config/tenant-parser/git/org_project2/.zuul.yaml b/tests/fixtures/config/tenant-parser/git/org_project2/.zuul.yaml
new file mode 100644
index 0000000..4292c89
--- /dev/null
+++ b/tests/fixtures/config/tenant-parser/git/org_project2/.zuul.yaml
@@ -0,0 +1,8 @@
+- job:
+    name: project2-job
+
+- project:
+    name: org/project2
+    check:
+      jobs:
+        - project2-job
diff --git a/tests/fixtures/config/tenant-parser/groups.yaml b/tests/fixtures/config/tenant-parser/groups.yaml
new file mode 100644
index 0000000..f2a0d99
--- /dev/null
+++ b/tests/fixtures/config/tenant-parser/groups.yaml
@@ -0,0 +1,11 @@
+- tenant:
+    name: tenant-one
+    source:
+      gerrit:
+        config-projects:
+          - common-config
+        untrusted-projects:
+          - exclude: project
+            projects:
+              - org/project1
+              - org/project2
diff --git a/tests/fixtures/config/tenant-parser/groups2.yaml b/tests/fixtures/config/tenant-parser/groups2.yaml
new file mode 100644
index 0000000..dc8d339
--- /dev/null
+++ b/tests/fixtures/config/tenant-parser/groups2.yaml
@@ -0,0 +1,12 @@
+- tenant:
+    name: tenant-one
+    source:
+      gerrit:
+        config-projects:
+          - common-config
+        untrusted-projects:
+          - exclude: project
+            projects:
+              - org/project1
+              - org/project2:
+                  exclude: job
diff --git a/tests/fixtures/config/tenant-parser/groups3.yaml b/tests/fixtures/config/tenant-parser/groups3.yaml
new file mode 100644
index 0000000..196f03a
--- /dev/null
+++ b/tests/fixtures/config/tenant-parser/groups3.yaml
@@ -0,0 +1,14 @@
+- tenant:
+    name: tenant-one
+    source:
+      gerrit:
+        config-projects:
+          - common-config
+        untrusted-projects:
+          - include: job
+            projects:
+              - org/project1
+              - org/project2:
+                  include:
+                    - project
+                    - job
diff --git a/tests/fixtures/config/tenant-parser/override.yaml b/tests/fixtures/config/tenant-parser/override.yaml
new file mode 100644
index 0000000..87674f1
--- /dev/null
+++ b/tests/fixtures/config/tenant-parser/override.yaml
@@ -0,0 +1,11 @@
+- tenant:
+    name: tenant-one
+    source:
+      gerrit:
+        config-projects:
+          - common-config
+        untrusted-projects:
+          - org/project1:
+              exclude: project
+          - org/project2:
+              include: job
diff --git a/tests/fixtures/config/tenant-parser/simple.yaml b/tests/fixtures/config/tenant-parser/simple.yaml
new file mode 100644
index 0000000..950b117
--- /dev/null
+++ b/tests/fixtures/config/tenant-parser/simple.yaml
@@ -0,0 +1,9 @@
+- tenant:
+    name: tenant-one
+    source:
+      gerrit:
+        config-projects:
+          - common-config
+        untrusted-projects:
+          - org/project1
+          - org/project2
diff --git a/tests/fixtures/gearman/README.rst b/tests/fixtures/gearman/README.rst
new file mode 100644
index 0000000..a3921ea
--- /dev/null
+++ b/tests/fixtures/gearman/README.rst
@@ -0,0 +1,21 @@
+Steps used to create our certs
+
+# Generate CA cert
+$ openssl req -new -newkey rsa:2048 -nodes -keyout root-ca.key -x509 -days 3650 -out root-ca.pem -subj "/C=US/ST=Texas/L=Austin/O=OpenStack Foundation/CN=gearman-ca"
+
+# Generate server keys
+$ CLIENT='server'
+$ openssl req -new -newkey rsa:2048 -nodes -keyout $CLIENT.key -out $CLIENT.csr -subj "/C=US/ST=Texas/L=Austin/O=OpenStack Foundation/CN=nodepool-$CLIENT"
+$ openssl x509 -req -days 3650 -in $CLIENT.csr -out $CLIENT.pem -CA root-ca.pem -CAkey root-ca.key -CAcreateserial
+
+
+# Generate client keys
+$ CLIENT='client'
+$ openssl req -new -newkey rsa:2048 -nodes -keyout $CLIENT.key -out $CLIENT.csr -subj "/C=US/ST=Texas/L=Austin/O=OpenStack Foundation/CN=gearman-$CLIENT"
+$ openssl x509 -req -days 3650 -in $CLIENT.csr -out $CLIENT.pem -CA root-ca.pem -CAkey root-ca.key -CAcreateserial
+
+
+# Test with geard
+# You'll need 2 terminal windows
+geard --ssl-ca root-ca.pem --ssl-cert server.pem --ssl-key server.key -d
+openssl s_client -connect localhost:4730 -key client.key -cert client.pem -CAfile root-ca.pem
diff --git a/tests/fixtures/gearman/client.csr b/tests/fixtures/gearman/client.csr
new file mode 100644
index 0000000..fadb857
--- /dev/null
+++ b/tests/fixtures/gearman/client.csr
@@ -0,0 +1,17 @@
+-----BEGIN CERTIFICATE REQUEST-----
+MIICqzCCAZMCAQAwZjELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVRleGFzMQ8wDQYD
+VQQHDAZBdXN0aW4xHTAbBgNVBAoMFE9wZW5TdGFjayBGb3VuZGF0aW9uMRcwFQYD
+VQQDDA5nZWFybWFuLWNsaWVudDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBALe+ByAkac9cYjeV8lcWXhDxdFqb7Om+6cWSJ/hpM4Z5QyGJ9XHDWyhrmt5W
+X2jvE/bAxEWXxWj3v8xR5HbjS6XHBHouQxz+FSDcG1GZjOLK5fwnO5tKG5eLdrAN
+WgOqJynJAsA0IuxURI4LiBUnzdi/10VeygwSIHOBLVWfrTZNKiE8siiQIaUAerLT
+T8BEUEAUI38UhS4OT83QGUbcCPOkioE5/Q8VVpvlu3eIIEkkacs5293EfUvQRVSG
++GYjSHfFBV7ECX7gu/4nosa/bLfQw7F9O1C2E6QEoUqVNEtURXT0ALlGkUylq6H9
+ctVjoJS9iW8ToMtajW2PZVI/d6MCAwEAAaAAMA0GCSqGSIb3DQEBCwUAA4IBAQBc
+v3/Z9Exc7pnbwyL31ZGv+gF0Z1l9CaSobdI3JAMzKxYGK9SxYOAwcwuUL0+zAJEE
+VPAaWM0p6eVF6j0d97Q79XsHvIKvyVYFxZ9rYSI+cvAIxhws1b4YtRoPBlY3AajV
+u2CQDVos/8JB28X3DpM4MJRua2tnTfAGLCkEp1psAoND+rr5eL7j+naUcPvNMv7Z
+WnTbIJYmP/6N+8gGGtAiiibXP3/Z92kFUZZxKNt3YSHfhkGY57/p+d8i3/8B+qeA
+/YfohA4hNLPydcw32kzo7865+h3SMdbX7VF8xB9grbZXvkT26rtrFJxWLOf5Vmzi
+aGPrVyPIeyVJvW3EeJQ9
+-----END CERTIFICATE REQUEST-----
diff --git a/tests/fixtures/gearman/client.key b/tests/fixtures/gearman/client.key
new file mode 100644
index 0000000..656cfc7
--- /dev/null
+++ b/tests/fixtures/gearman/client.key
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC3vgcgJGnPXGI3
+lfJXFl4Q8XRam+zpvunFkif4aTOGeUMhifVxw1soa5reVl9o7xP2wMRFl8Vo97/M
+UeR240ulxwR6LkMc/hUg3BtRmYziyuX8JzubShuXi3awDVoDqicpyQLANCLsVESO
+C4gVJ83Yv9dFXsoMEiBzgS1Vn602TSohPLIokCGlAHqy00/ARFBAFCN/FIUuDk/N
+0BlG3AjzpIqBOf0PFVab5bt3iCBJJGnLOdvdxH1L0EVUhvhmI0h3xQVexAl+4Lv+
+J6LGv2y30MOxfTtQthOkBKFKlTRLVEV09AC5RpFMpauh/XLVY6CUvYlvE6DLWo1t
+j2VSP3ejAgMBAAECggEAF5cAFzJVm1fDDFvl9yRaA1bcl115dzEZllIDa7Ml+FfN
+NJsftfFc3L2j7nOsYC6Bo6ZwDHdF0worx7Gj4VehOLFqc71IxIoicEuR/lH2co+W
+I19uGavUCwrOvB+atOm9iXHTNpX6/dh7zLjSSdUIapGGs9NNoWsaW3n0NhAADv51
+SQYemDgG9/vLGPoouUGTBkMNCuI+uHP1C+nRSs/kikebjVqYoDNPm1/ADpccde9p
+mntezm9v/xDXzVFD2qQTTve1mDpoy6YLZdY0mT6qUNElwZ+yZHXkBox1tpJ69Uw+
+pGanSMOy/Gj3W5RlX4qTSLLRcSePV8x65MzRwFoxgQKBgQDstP1/sfnS3JBWSW6V
+YAN3alXeyb0bH0uA+nfXRzy9GnwlFTAszzwnbKL9xK+hPjJRkDBf8XDyXKQ3FMyi
+OVf+H2IkhwErQL01qG4k8E3Hk9wQMvjdO00SaEiLD2uMxX9lRCs9vVlvtmSbGvTH
+/RXBFnqYDHeMJxnWZ8Y34chtoQKBgQDGt+cYtoXH79imuyOQ1SORtIQtSGEKcIcg
+20o5tCGJfCxLtrKs7n4Yph9IPvMtiA8idPACWU2Q8XV580RABzC7Am8ICGGJSwN8
+PLoWOadEpYYeFOV8Mzfxs/YhdQat6zvGy8sF0O+DER0b1ELfbA1I+FNOuz0y53AJ
+MXxOUvQ2wwKBgAFWHEBGTvTDzgTOsVMikaJw9T8mwGyQxqpZv6d1fYBLz/udnQID
+wYEvedQY8izk3v/a4osIH+0eXMb61RTtYfPLVZCDOpx15xuQcd6/hJDl4s4sm38U
+QKEj+ZTfZ2oKC2gU9HGKyiB5VSQTCOLAKQlICTUmjN47skelmlbibXFBAoGBAIHn
+UoELQGU1W3GTQGq7imcDlKxtdlJ2wT8vW1RhdtMDg4lzQ1ZdCb1fS2/VBu8q1In3
+27YNXvFzhxJTfrhEewylSKP9ppUznnGm2RcSVVBAzG35xxLsAJRWyn2QnO8wqYEJ
+VAzXSttpYpgAqD6Zyg17mCoNqLIQLWM1IEerXs41AoGAGdswRmzQ2oHF0f01yZaq
+rxGtLOuTyHzmwi8vA4qZj/9Baht9ihVJiqxTAg/CuA3sTM7DxAJ6P5h6mHsVM6bh
+tPVruBdPIOg4XchcXory1Pa8wSHsPkEnj2NnrZRhvcv86vFxDkhu601nv+AGHj1D
+szjDKeH4IP8fjbf/utRxo3w=
+-----END PRIVATE KEY-----
diff --git a/tests/fixtures/gearman/client.pem b/tests/fixtures/gearman/client.pem
new file mode 100644
index 0000000..aac9d8d
--- /dev/null
+++ b/tests/fixtures/gearman/client.pem
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDRDCCAiwCCQDnKP1tRJr+2DANBgkqhkiG9w0BAQsFADBiMQswCQYDVQQGEwJV
+UzEOMAwGA1UECAwFVGV4YXMxDzANBgNVBAcMBkF1c3RpbjEdMBsGA1UECgwUT3Bl
+blN0YWNrIEZvdW5kYXRpb24xEzARBgNVBAMMCmdlYXJtYW4tY2EwHhcNMTcwNjE0
+MTQwNzAwWhcNMjcwNjEyMTQwNzAwWjBmMQswCQYDVQQGEwJVUzEOMAwGA1UECAwF
+VGV4YXMxDzANBgNVBAcMBkF1c3RpbjEdMBsGA1UECgwUT3BlblN0YWNrIEZvdW5k
+YXRpb24xFzAVBgNVBAMMDmdlYXJtYW4tY2xpZW50MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAt74HICRpz1xiN5XyVxZeEPF0Wpvs6b7pxZIn+GkzhnlD
+IYn1ccNbKGua3lZfaO8T9sDERZfFaPe/zFHkduNLpccEei5DHP4VINwbUZmM4srl
+/Cc7m0obl4t2sA1aA6onKckCwDQi7FREjguIFSfN2L/XRV7KDBIgc4EtVZ+tNk0q
+ITyyKJAhpQB6stNPwERQQBQjfxSFLg5PzdAZRtwI86SKgTn9DxVWm+W7d4ggSSRp
+yznb3cR9S9BFVIb4ZiNId8UFXsQJfuC7/ieixr9st9DDsX07ULYTpAShSpU0S1RF
+dPQAuUaRTKWrof1y1WOglL2JbxOgy1qNbY9lUj93owIDAQABMA0GCSqGSIb3DQEB
+CwUAA4IBAQBSYRP7DDGRBs1wwudH2HzaDRNZrhECUq6n45FY3YHkDU5xxi6CA3wD
+EA+fvvB95BvqNNCS4UxQMW3k7cgJQrUVBKXj9m5HqE/GVZuI15+bR9i7vc5USoen
+nfbVhDAvZcrzPhmj/pfnXKwgeE7PhG55mrJvJgSmxmK2wTcRRIQ6dfoj3OIJJHEY
+kW3oK8I+9r5Tufxbg+CIpZVIuENbRDNGhTPCtzDu3q6DHAEOBKHmwc64W/2c+2QV
+CpfPdutVF2reb6CJuGikM8WMh47mksNIyCW832bUvUCDgW4/tqanPqww4lTdU44b
+W8gkkWcUmOa6MVCXIzCy7tEbkEDJC2NE
+-----END CERTIFICATE-----
diff --git a/tests/fixtures/gearman/root-ca.key b/tests/fixtures/gearman/root-ca.key
new file mode 100644
index 0000000..3db94c3
--- /dev/null
+++ b/tests/fixtures/gearman/root-ca.key
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDAglCCv7aQxXZg
+8wuLq0QuIQbZbK1Y0aFwMaEpOeVyZR/C42nws3hH0BivJZnr5w57fdT2OXFqkAyl
+Pw+sF8PcDlSi2wF33rnswz8qYszX5WUgvGnOtcJx8YJhqBqNCLb0wnneJqNQpXPs
+CmcsEeBMsCVN9Q1cRMgdjyMBpRfcq7WH5NN+o/n4zClHYZwa3wOyH2ipekl4XTEf
+Kz9aq88L3YE/N4dyUWH0UpS+lBem+D0GAarV2IXWqXeMrWce930mBONMhBrgw0X5
+QFrDa0KQn2QRcg9tqlEE9SlAbub/yHUsq7/7q7l6SWl7JBigj4jGw15w98WzSDkJ
+a0we1jexAgMBAAECggEAX/HS3IdeHyM7D7CyZWbzcSYmusBuWOEJ29fwYZKoZ248
++S3MhBl+bhQp6UkNQMSEtEmPlTQl8Z1foBAg6H1jsU43In+SaMLJ2VWqKp7ZRxTe
+ZQVimpJ+GbnraG6W5Qmd3bj7chvBs5TyhIbeytkR+EamIQdsJDtnnUvUf6JflSvl
+gUZxOvfB7UZQZ2PkMQFleZxlEAvgyk8e4k7AnY2IoTyvw1DIUdP7+7hPInBpWaUn
+jJPZzyWyrMDLB+BB7JcdqmO2N5bHudE4iEJwphmdIcHvOFhm/LHfJdZg6+X8lUCP
+lIfzp6Uk25nF5/dsoZQcrdBznhW4NfJsIviui+SSAQKBgQDrVI4pW/1fU4AYoOki
+jB+RaUaBXkRRV6MYJ/ZUPjAONDtFhDdBDEwunVUpTD8sgVKFnaMzNa/+Jcmy4jsE
+Ggj9ZupH05g9Y8w7dYFcfT6VeiPahyml8k/DWWe0qQk0+scH8vuiWcrqIjWRg4WD
+8CXJqSkgrCHFCjuJOvxM24d1UQKBgQDRaupcR/c9AGpUHmhFGexwOyXtIR2ObaVf
+lEZ9rhrpCRAl5RW0tUmd1vHMNDTdRidfYLFe29h6YQ1afgNcV8JdB51VfurJ+cOF
+jbc6FijDag31snIdBnDuV29mazejRm7PSfJjoBnBDNzh3kMed22DsQDlHQmudknH
+wUqUWnWEYQKBgG3bYSoJmXRgxJG6vFq2Ux5MqO9HlFjssmRac3HMPh7DX1AKcsjY
+9s9j/xdyUqNyE5Xwivki/O+FsGzjk21MwhmZa5DwREeUSQkQx7zncsnQ5N/k7Rpc
+zcOB/xmlN3kWAMfDNJkLleBK6/rsDO4Us286msp30KPtLPHZKWKvsMKhAoGAaiER
+5nR+Qsb8G+dRFnv9zB7dqKAYt36vyZF+a+EZODJkoZ/IcU1SopA0+DUY+W69M2Pw
+X89wlQysVMj58Ql0serS/GoWmQdf5EYermxeejI8IuEtXbJO9ysOhMwfZTqjm5+x
+HHYdty1Kn5khUMZblNrWRkaCCo1d9MLrheWWGuECgYEAy5kdeVE8lLliFL39Xrzl
+OCJ1rEIAhXrqr6E3PrMlUiQ75dAOiLEl3GGG7UkSHL/0dZv50RRee+4hxI63P2z0
+xPeH2nvrFzknmabOWxtOpw+H0qGOYto9VcvseFPNKTV2O5wxdfaYgLEOXt8ipaLD
+OVvm6yN1bP1Gxi6vdVppKwk=
+-----END PRIVATE KEY-----
diff --git a/tests/fixtures/gearman/root-ca.pem b/tests/fixtures/gearman/root-ca.pem
new file mode 100644
index 0000000..defedd6
--- /dev/null
+++ b/tests/fixtures/gearman/root-ca.pem
@@ -0,0 +1,22 @@
+-----BEGIN CERTIFICATE-----
+MIIDlzCCAn+gAwIBAgIJAPmWfgTknq1hMA0GCSqGSIb3DQEBCwUAMGIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEPMA0GA1UEBwwGQXVzdGluMR0wGwYDVQQK
+DBRPcGVuU3RhY2sgRm91bmRhdGlvbjETMBEGA1UEAwwKZ2Vhcm1hbi1jYTAeFw0x
+NzA2MTQxNDA1NDNaFw0yNzA2MTIxNDA1NDNaMGIxCzAJBgNVBAYTAlVTMQ4wDAYD
+VQQIDAVUZXhhczEPMA0GA1UEBwwGQXVzdGluMR0wGwYDVQQKDBRPcGVuU3RhY2sg
+Rm91bmRhdGlvbjETMBEGA1UEAwwKZ2Vhcm1hbi1jYTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAMCCUIK/tpDFdmDzC4urRC4hBtlsrVjRoXAxoSk55XJl
+H8LjafCzeEfQGK8lmevnDnt91PY5cWqQDKU/D6wXw9wOVKLbAXfeuezDPypizNfl
+ZSC8ac61wnHxgmGoGo0ItvTCed4mo1Clc+wKZywR4EywJU31DVxEyB2PIwGlF9yr
+tYfk036j+fjMKUdhnBrfA7IfaKl6SXhdMR8rP1qrzwvdgT83h3JRYfRSlL6UF6b4
+PQYBqtXYhdapd4ytZx73fSYE40yEGuDDRflAWsNrQpCfZBFyD22qUQT1KUBu5v/I
+dSyrv/uruXpJaXskGKCPiMbDXnD3xbNIOQlrTB7WN7ECAwEAAaNQME4wHQYDVR0O
+BBYEFDIaceZ/LY42aNSV0hisgSEcnjlMMB8GA1UdIwQYMBaAFDIaceZ/LY42aNSV
+0hisgSEcnjlMMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAKN60Jnx
+NPSkDlqrKtcojX3+oVPC5MQctysZXmjjkGzHSAVKeonQ+gN/glfRc0qq/PuzvHej
+a2Mk9CirL2VzBgp1d/sGtOijqI0Otn706SBuQl1PEAzcmTyQt7TuhUnVcV22xBwy
+ONIuXVT5eh8MhUdrlqZKXX9U49sjmHCheJFFVqFmy0twlqf9YikC0CNxiWa/jDhj
+bxi73kxgZTb2RPjwYUWbESfyNCq0H+N2BmSz7Fgc2Ah/wvhXGdx1udaDVgzDqFIR
+lMGswkzmd76JpJdN0Rce7lmRoE8E6BqDShvoEGiGo3IbuOUwn5JRKFMUPhN6mv7N
+c49ykHzcCgc1wdY=
+-----END CERTIFICATE-----
diff --git a/tests/fixtures/gearman/root-ca.srl b/tests/fixtures/gearman/root-ca.srl
new file mode 100644
index 0000000..0ce584a
--- /dev/null
+++ b/tests/fixtures/gearman/root-ca.srl
@@ -0,0 +1 @@
+E728FD6D449AFED8
diff --git a/tests/fixtures/gearman/server.csr b/tests/fixtures/gearman/server.csr
new file mode 100644
index 0000000..bbb03d2
--- /dev/null
+++ b/tests/fixtures/gearman/server.csr
@@ -0,0 +1,17 @@
+-----BEGIN CERTIFICATE REQUEST-----
+MIICrDCCAZQCAQAwZzELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVRleGFzMQ8wDQYD
+VQQHDAZBdXN0aW4xHTAbBgNVBAoMFE9wZW5TdGFjayBGb3VuZGF0aW9uMRgwFgYD
+VQQDDA9ub2RlcG9vbC1zZXJ2ZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQCzoKkaauTNBRry1Y5YCNG38IrxW0AH5TP5XdTF/q+Qu1p9onRsACiSZX8Y
+YAo/y6jVbZ3WKihVfVIQw9xrPTCoA0AwMtI8fiK70YwSuGg6gqBBCr8NXOaYsYFJ
+k2Vk+8utlNSmLYlcSTKZR0HbhWNmjH9lj5WngL0XPSbcoogtvet92111qGfBZrg+
+86B3XJh2/6PCru9YmufqlooFog7Q4Qo6Bnz7Dh+h2QjtDmGSFz0dQ9PqP8Jgh3LS
+fWRk5TrjGsthKszRTZCQDSXc1XcwAqfO21eufP9oTpfc0zTdAOC1tspdP/632q6B
+0Gf8sSEnMpKmwuGUH3z2ZCY6DSE1AgMBAAGgADANBgkqhkiG9w0BAQsFAAOCAQEA
+NPZ0BNt9vjNM9cNHCgL8rYdB9UnsnkcQ5R/XRV1W+tQlj9QjpvcGH5c3PJ6Ol1Qd
+x8o19aomLb/IMz8bnRmzLxWggKQHxLwU3UKjHBiV1aqI/ieka22IqKYkjeYUAyxC
+ZLytynIZRVt0MB/lo7Z2bjctGHSiZ9tkTsgjawE3hotnZ3BOEOkV42099bLLGdcz
+Jq433DsbwThKC0WijeHR4FZEj3H7Gj07PNAlfyM0KeyrZodtcIwvgA4NyBB8mPoV
+dARn5C8hOtDCWzRPba46h9mTzF8D87pdvmZce6k/bBGJfY+YvOpwBXsO3xhCDxqP
+p9gAs6m+qbxsrwvRRrtn6Q==
+-----END CERTIFICATE REQUEST-----
diff --git a/tests/fixtures/gearman/server.key b/tests/fixtures/gearman/server.key
new file mode 100644
index 0000000..c1707b0
--- /dev/null
+++ b/tests/fixtures/gearman/server.key
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCzoKkaauTNBRry
+1Y5YCNG38IrxW0AH5TP5XdTF/q+Qu1p9onRsACiSZX8YYAo/y6jVbZ3WKihVfVIQ
+w9xrPTCoA0AwMtI8fiK70YwSuGg6gqBBCr8NXOaYsYFJk2Vk+8utlNSmLYlcSTKZ
+R0HbhWNmjH9lj5WngL0XPSbcoogtvet92111qGfBZrg+86B3XJh2/6PCru9Ymufq
+looFog7Q4Qo6Bnz7Dh+h2QjtDmGSFz0dQ9PqP8Jgh3LSfWRk5TrjGsthKszRTZCQ
+DSXc1XcwAqfO21eufP9oTpfc0zTdAOC1tspdP/632q6B0Gf8sSEnMpKmwuGUH3z2
+ZCY6DSE1AgMBAAECggEAaG06YhVKtrYFGK92dU+LPHgnDnGSJATn1kzqacDKqEWD
+Mg7DyBW/gHxpCu6qhrQLjyiO3fbcQ/b7Qqva9K06IDLjmiGxf2GFJ9OGr0ttrLZM
+HAP3VflwRczL8M4z4CVSH7OqfIF0naYgOGPosYo2Y2PCnHSA+EQrqdrvQM1shcot
+8lW368VqlAm8ONgh8z4ZLSDswECgJzWleOSsTBIT0qJ6fXIwnN7akM8Bdyy/dPDD
+PnPvAu1N9KgwrzxKY9WthJ1alKiFQm4Po/TZZApALOtR8zCN4EmDG9izKdfU5FIL
+ZWpVDp0US7a8rbj2e0kf0loRg2bsR2eoJPL7JjJycQKBgQDiHjKnwximtfjqLTtg
+ZOHIL4tTlmeTLNq7ZW69BuJSfI7FTa20piHjny+g3mTvxnCQa/BTSpb6VRHPFcYV
+dVQzdAX6ZMvBZ3YMp9FkY+S9RrjEyimNU9kvJJQBnC1ujen3YuXj6ENFzcmGkvzR
+LZFx3dmFEzfDxOOqzdFTHscGuwKBgQDLXaVBH54yq1fDrXDLG/eEtQsNNyCujIV4
+gp1Z54L34htoDS98dx0L0qZGBEys8I0dGJd9kUBVNu53zDeiJSGW4tHYXQaUpxJH
+0wZDHo59mw3aGvVZ5YP+4uukuNHcX6cUYi2HAv0vwet46L3Kb/utDyyStp1QZw9s
+eucOLGkQzwKBgG3j0yZo0FAk28WjGdos7PWG9aU30TpbcCnmj7zZ3Z/M3O3SZHsI
+yit/L3x02IUW4Zmue2tfMqSSN0d3A39mN/eRiV45IjTp/RsFa+PoEEBUYHNy9GK0
+vzYEBtIJfqLd4TjTHXp3ZEpGSoxWXvuhs6+s64ua3V0NEL/vqq1EpeEFAoGAHa/i
+8tnJvz3SBwenoo7HmEDRhzFX/QMYbNosXDZ2oPcJ5yudlf7RZ6ttiGUSSGCpSOkR
+HEx65rWpJCXUrT/cYmlkFsCluEeXXJLKpDuus1lSMVekH2Zo2WmI2rf8Mr5n5ora
+eI4QJcuaM0FOi2HDjKTdbeFon5cb4ksitaf4AnMCgYB24KyMuOHBAuVlnuf3PSfr
+u3ZxqmcUX0D2BoK+1lw3lgzfQO26Qw5VtkjDBnIPL67IUYRZX2YvXsJPWaRRrF72
+yEqFXDWKbcE+Tl0LxLj6mLW5RKJP8LTybaIBgkyUaLtzTRr+TfK29CC8/FzWGiTf
+oJQozL3TAlvjoadEPrLnjg==
+-----END PRIVATE KEY-----
diff --git a/tests/fixtures/gearman/server.pem b/tests/fixtures/gearman/server.pem
new file mode 100644
index 0000000..1c85fad
--- /dev/null
+++ b/tests/fixtures/gearman/server.pem
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDRTCCAi0CCQDnKP1tRJr+1zANBgkqhkiG9w0BAQsFADBiMQswCQYDVQQGEwJV
+UzEOMAwGA1UECAwFVGV4YXMxDzANBgNVBAcMBkF1c3RpbjEdMBsGA1UECgwUT3Bl
+blN0YWNrIEZvdW5kYXRpb24xEzARBgNVBAMMCmdlYXJtYW4tY2EwHhcNMTcwNjE0
+MTQwNjM1WhcNMjcwNjEyMTQwNjM1WjBnMQswCQYDVQQGEwJVUzEOMAwGA1UECAwF
+VGV4YXMxDzANBgNVBAcMBkF1c3RpbjEdMBsGA1UECgwUT3BlblN0YWNrIEZvdW5k
+YXRpb24xGDAWBgNVBAMMD25vZGVwb29sLXNlcnZlcjCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBALOgqRpq5M0FGvLVjlgI0bfwivFbQAflM/ld1MX+r5C7
+Wn2idGwAKJJlfxhgCj/LqNVtndYqKFV9UhDD3Gs9MKgDQDAy0jx+IrvRjBK4aDqC
+oEEKvw1c5pixgUmTZWT7y62U1KYtiVxJMplHQduFY2aMf2WPlaeAvRc9JtyiiC29
+633bXXWoZ8FmuD7zoHdcmHb/o8Ku71ia5+qWigWiDtDhCjoGfPsOH6HZCO0OYZIX
+PR1D0+o/wmCHctJ9ZGTlOuMay2EqzNFNkJANJdzVdzACp87bV658/2hOl9zTNN0A
+4LW2yl0//rfaroHQZ/yxIScykqbC4ZQffPZkJjoNITUCAwEAATANBgkqhkiG9w0B
+AQsFAAOCAQEAlqcjSBG96JnKcSlw4ntxJiSGja5iuMi3yVpQS8G3ak6i8eGYlqMH
+SCWC96ZfXr/KjVyF3AsD554e54pEAywcFLH4QzZoceWc5L2etfTCa9cInQsiNpvV
+CfvVADRX4Ib7ozb4MJFJFy5OWnhPO6CcknA2KdTergKIichBmR0LvuUZEblwHOcg
+HAwxpZirNofs/i+aXnIgKAIC97WY1S+8SL5cEfdR0Sd9SpbCLVgSdyGhxm0NE2ls
+38jQhwYIVkpYYJd/jsyGtiHCDT4rkSEJlRWYfLXfSkyjtiERASqs/NEgrnbkgp/l
+Sa2wc5cjntNzls2ey7bkpZbgwOvGQVjS7w==
+-----END CERTIFICATE-----
diff --git a/tests/fixtures/layouts/disable_at.yaml b/tests/fixtures/layouts/disable_at.yaml
index 2956ebf..8e352d8 100644
--- a/tests/fixtures/layouts/disable_at.yaml
+++ b/tests/fixtures/layouts/disable_at.yaml
@@ -19,7 +19,7 @@
     name: project-test1
     nodes:
       - name: controller
-        image: image1
+        label: label1
 
 - project:
     name: org/project
diff --git a/tests/fixtures/layouts/dont-ignore-ref-deletes.yaml b/tests/fixtures/layouts/dont-ignore-ref-deletes.yaml
index aee5ac6..6a05fe6 100644
--- a/tests/fixtures/layouts/dont-ignore-ref-deletes.yaml
+++ b/tests/fixtures/layouts/dont-ignore-ref-deletes.yaml
@@ -11,7 +11,7 @@
     name: project-post
     nodes:
       - name: static
-        image: ubuntu-xenial
+        label: ubuntu-xenial
 
 - project:
     name: org/project
diff --git a/tests/fixtures/layouts/idle.yaml b/tests/fixtures/layouts/idle.yaml
index ff33842..60f8ed1 100644
--- a/tests/fixtures/layouts/idle.yaml
+++ b/tests/fixtures/layouts/idle.yaml
@@ -9,13 +9,13 @@
     name: project-bitrot-stable-old
     nodes:
       - name: static
-        image: ubuntu-xenial
+        label: ubuntu-xenial
 
 - job:
     name: project-bitrot-stable-older
     nodes:
       - name: static
-        image: ubuntu-trusty
+        label: ubuntu-trusty
 
 - project:
     name: org/project
diff --git a/tests/fixtures/layouts/no-timer.yaml b/tests/fixtures/layouts/no-timer.yaml
index c8ced62..12eaa35 100644
--- a/tests/fixtures/layouts/no-timer.yaml
+++ b/tests/fixtures/layouts/no-timer.yaml
@@ -27,13 +27,13 @@
     name: project-bitrot-stable-old
     nodes:
       - name: static
-        image: ubuntu-xenial
+        label: ubuntu-xenial
 
 - job:
     name: project-bitrot-stable-older
     nodes:
       - name: static
-        image: ubuntu-trusty
+        label: ubuntu-trusty
 
 - project:
     name: org/project
diff --git a/tests/fixtures/layouts/repo-deleted.yaml b/tests/fixtures/layouts/repo-deleted.yaml
index a33da77..95d11bb 100644
--- a/tests/fixtures/layouts/repo-deleted.yaml
+++ b/tests/fixtures/layouts/repo-deleted.yaml
@@ -40,14 +40,14 @@
     name: project-test1
     nodes:
       - name: controller
-        image: image1
+        label: label1
 
 - job:
     name: project-test1
     branches: stable
     nodes:
       - name: controller
-        image: image2
+        label: label2
 
 - job:
     name: project-test2
diff --git a/tests/fixtures/layouts/smtp.yaml b/tests/fixtures/layouts/smtp.yaml
index 8f53d02..fd91d36 100644
--- a/tests/fixtures/layouts/smtp.yaml
+++ b/tests/fixtures/layouts/smtp.yaml
@@ -46,14 +46,14 @@
     name: project-test1
     nodes:
       - name: controller
-        image: image1
+        label: label1
 
 - job:
     name: project-test1
     branches: stable
     nodes:
       - name: controller
-        image: image2
+        label: label2
 
 - job:
     name: project-test2
diff --git a/tests/fixtures/layouts/timer.yaml b/tests/fixtures/layouts/timer.yaml
index 95199e7..883c32e 100644
--- a/tests/fixtures/layouts/timer.yaml
+++ b/tests/fixtures/layouts/timer.yaml
@@ -28,13 +28,13 @@
     name: project-bitrot-stable-old
     nodes:
       - name: static
-        image: ubuntu-xenial
+        label: ubuntu-xenial
 
 - job:
     name: project-bitrot-stable-older
     nodes:
       - name: static
-        image: ubuntu-trusty
+        label: ubuntu-trusty
 
 - project:
     name: org/project
diff --git a/tests/unit/test_configloader.py b/tests/unit/test_configloader.py
new file mode 100644
index 0000000..faa2f61
--- /dev/null
+++ b/tests/unit/test_configloader.py
@@ -0,0 +1,188 @@
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+from tests.base import ZuulTestCase
+
+
+class TenantParserTestCase(ZuulTestCase):
+    create_project_keys = True
+
+    CONFIG_SET = set(['pipeline', 'job', 'semaphore', 'project',
+                      'project-template', 'nodeset', 'secret'])
+    UNTRUSTED_SET = CONFIG_SET - set(['pipeline'])
+
+    def setupAllProjectKeys(self):
+        for project in ['common-config', 'org/project1', 'org/project2']:
+            self.setupProjectKeys('gerrit', project)
+
+
+class TestTenantSimple(TenantParserTestCase):
+    tenant_config_file = 'config/tenant-parser/simple.yaml'
+
+    def test_tenant_simple(self):
+        tenant = self.sched.abide.tenants.get('tenant-one')
+        self.assertEqual(['common-config'],
+                         [x.name for x in tenant.config_projects])
+        self.assertEqual(['org/project1', 'org/project2'],
+                         [x.name for x in tenant.untrusted_projects])
+        self.assertEqual(self.CONFIG_SET,
+                         tenant.config_projects[0].load_classes)
+        self.assertEqual(self.UNTRUSTED_SET,
+                         tenant.untrusted_projects[0].load_classes)
+        self.assertEqual(self.UNTRUSTED_SET,
+                         tenant.untrusted_projects[1].load_classes)
+        self.assertTrue('common-config-job' in tenant.layout.jobs)
+        self.assertTrue('project1-job' in tenant.layout.jobs)
+        self.assertTrue('project2-job' in tenant.layout.jobs)
+        project1_config = tenant.layout.project_configs.get(
+            'review.example.com/org/project1')
+        self.assertTrue('common-config-job' in
+                        project1_config.pipelines['check'].job_list.jobs)
+        self.assertTrue('project1-job' in
+                        project1_config.pipelines['check'].job_list.jobs)
+        project2_config = tenant.layout.project_configs.get(
+            'review.example.com/org/project2')
+        self.assertTrue('common-config-job' in
+                        project2_config.pipelines['check'].job_list.jobs)
+        self.assertTrue('project2-job' in
+                        project2_config.pipelines['check'].job_list.jobs)
+
+
+class TestTenantOverride(TenantParserTestCase):
+    tenant_config_file = 'config/tenant-parser/override.yaml'
+
+    def test_tenant_override(self):
+        tenant = self.sched.abide.tenants.get('tenant-one')
+        self.assertEqual(['common-config'],
+                         [x.name for x in tenant.config_projects])
+        self.assertEqual(['org/project1', 'org/project2'],
+                         [x.name for x in tenant.untrusted_projects])
+        self.assertEqual(self.CONFIG_SET,
+                         tenant.config_projects[0].load_classes)
+        self.assertEqual(self.UNTRUSTED_SET - set(['project']),
+                         tenant.untrusted_projects[0].load_classes)
+        self.assertEqual(set(['job']),
+                         tenant.untrusted_projects[1].load_classes)
+        self.assertTrue('common-config-job' in tenant.layout.jobs)
+        self.assertTrue('project1-job' in tenant.layout.jobs)
+        self.assertTrue('project2-job' in tenant.layout.jobs)
+        project1_config = tenant.layout.project_configs.get(
+            'review.example.com/org/project1')
+        self.assertTrue('common-config-job' in
+                        project1_config.pipelines['check'].job_list.jobs)
+        self.assertFalse('project1-job' in
+                         project1_config.pipelines['check'].job_list.jobs)
+        project2_config = tenant.layout.project_configs.get(
+            'review.example.com/org/project2')
+        self.assertTrue('common-config-job' in
+                        project2_config.pipelines['check'].job_list.jobs)
+        self.assertFalse('project2-job' in
+                         project2_config.pipelines['check'].job_list.jobs)
+
+
+class TestTenantGroups(TenantParserTestCase):
+    tenant_config_file = 'config/tenant-parser/groups.yaml'
+
+    def test_tenant_groups(self):
+        tenant = self.sched.abide.tenants.get('tenant-one')
+        self.assertEqual(['common-config'],
+                         [x.name for x in tenant.config_projects])
+        self.assertEqual(['org/project1', 'org/project2'],
+                         [x.name for x in tenant.untrusted_projects])
+        self.assertEqual(self.CONFIG_SET,
+                         tenant.config_projects[0].load_classes)
+        self.assertEqual(self.UNTRUSTED_SET - set(['project']),
+                         tenant.untrusted_projects[0].load_classes)
+        self.assertEqual(self.UNTRUSTED_SET - set(['project']),
+                         tenant.untrusted_projects[1].load_classes)
+        self.assertTrue('common-config-job' in tenant.layout.jobs)
+        self.assertTrue('project1-job' in tenant.layout.jobs)
+        self.assertTrue('project2-job' in tenant.layout.jobs)
+        project1_config = tenant.layout.project_configs.get(
+            'review.example.com/org/project1')
+        self.assertTrue('common-config-job' in
+                        project1_config.pipelines['check'].job_list.jobs)
+        self.assertFalse('project1-job' in
+                         project1_config.pipelines['check'].job_list.jobs)
+        project2_config = tenant.layout.project_configs.get(
+            'review.example.com/org/project2')
+        self.assertTrue('common-config-job' in
+                        project2_config.pipelines['check'].job_list.jobs)
+        self.assertFalse('project2-job' in
+                         project2_config.pipelines['check'].job_list.jobs)
+
+
+class TestTenantGroups2(TenantParserTestCase):
+    tenant_config_file = 'config/tenant-parser/groups2.yaml'
+
+    def test_tenant_groups2(self):
+        tenant = self.sched.abide.tenants.get('tenant-one')
+        self.assertEqual(['common-config'],
+                         [x.name for x in tenant.config_projects])
+        self.assertEqual(['org/project1', 'org/project2'],
+                         [x.name for x in tenant.untrusted_projects])
+        self.assertEqual(self.CONFIG_SET,
+                         tenant.config_projects[0].load_classes)
+        self.assertEqual(self.UNTRUSTED_SET - set(['project']),
+                         tenant.untrusted_projects[0].load_classes)
+        self.assertEqual(self.UNTRUSTED_SET - set(['project', 'job']),
+                         tenant.untrusted_projects[1].load_classes)
+        self.assertTrue('common-config-job' in tenant.layout.jobs)
+        self.assertTrue('project1-job' in tenant.layout.jobs)
+        self.assertFalse('project2-job' in tenant.layout.jobs)
+        project1_config = tenant.layout.project_configs.get(
+            'review.example.com/org/project1')
+        self.assertTrue('common-config-job' in
+                        project1_config.pipelines['check'].job_list.jobs)
+        self.assertFalse('project1-job' in
+                         project1_config.pipelines['check'].job_list.jobs)
+        project2_config = tenant.layout.project_configs.get(
+            'review.example.com/org/project2')
+        self.assertTrue('common-config-job' in
+                        project2_config.pipelines['check'].job_list.jobs)
+        self.assertFalse('project2-job' in
+                         project2_config.pipelines['check'].job_list.jobs)
+
+
+class TestTenantGroups3(TenantParserTestCase):
+    tenant_config_file = 'config/tenant-parser/groups3.yaml'
+
+    def test_tenant_groups3(self):
+        tenant = self.sched.abide.tenants.get('tenant-one')
+        self.assertEqual(['common-config'],
+                         [x.name for x in tenant.config_projects])
+        self.assertEqual(['org/project1', 'org/project2'],
+                         [x.name for x in tenant.untrusted_projects])
+        self.assertEqual(self.CONFIG_SET,
+                         tenant.config_projects[0].load_classes)
+        self.assertEqual(set(['job']),
+                         tenant.untrusted_projects[0].load_classes)
+        self.assertEqual(set(['project', 'job']),
+                         tenant.untrusted_projects[1].load_classes)
+        self.assertTrue('common-config-job' in tenant.layout.jobs)
+        self.assertTrue('project1-job' in tenant.layout.jobs)
+        self.assertTrue('project2-job' in tenant.layout.jobs)
+        project1_config = tenant.layout.project_configs.get(
+            'review.example.com/org/project1')
+        self.assertTrue('common-config-job' in
+                        project1_config.pipelines['check'].job_list.jobs)
+        self.assertFalse('project1-job' in
+                         project1_config.pipelines['check'].job_list.jobs)
+        project2_config = tenant.layout.project_configs.get(
+            'review.example.com/org/project2')
+        self.assertTrue('common-config-job' in
+                        project2_config.pipelines['check'].job_list.jobs)
+        self.assertTrue('project2-job' in
+                        project2_config.pipelines['check'].job_list.jobs)
diff --git a/tests/unit/test_github_driver.py b/tests/unit/test_github_driver.py
index 4979087..ba8e497 100644
--- a/tests/unit/test_github_driver.py
+++ b/tests/unit/test_github_driver.py
@@ -248,16 +248,18 @@
 
     @simple_layout('layouts/reporting-github.yaml', driver='github')
     def test_reporting(self):
+        project = 'org/project'
         # pipeline reports pull status both on start and success
         self.executor_server.hold_jobs_in_build = True
-        A = self.fake_github.openFakePullRequest('org/project', 'master', 'A')
+        A = self.fake_github.openFakePullRequest(project, 'master', 'A')
         self.fake_github.emitEvent(A.getPullRequestOpenedEvent())
         self.waitUntilSettled()
         # We should have a status container for the head sha
-        self.assertIn(A.head_sha, A.statuses.keys())
+        statuses = self.fake_github.statuses[project][A.head_sha]
+        self.assertIn(A.head_sha, self.fake_github.statuses[project].keys())
         # We should only have one status for the head sha
-        self.assertEqual(1, len(A.statuses[A.head_sha]))
-        check_status = A.statuses[A.head_sha][0]
+        self.assertEqual(1, len(statuses))
+        check_status = statuses[0]
         check_url = ('http://zuul.example.com/status/#%s,%s' %
                      (A.number, A.head_sha))
         self.assertEqual('tenant-one/check', check_status['context'])
@@ -270,8 +272,9 @@
         self.executor_server.release()
         self.waitUntilSettled()
         # We should only have two statuses for the head sha
-        self.assertEqual(2, len(A.statuses[A.head_sha]))
-        check_status = A.statuses[A.head_sha][0]
+        statuses = self.fake_github.statuses[project][A.head_sha]
+        self.assertEqual(2, len(statuses))
+        check_status = statuses[0]
         check_url = ('http://zuul.example.com/status/#%s,%s' %
                      (A.number, A.head_sha))
         self.assertEqual('tenant-one/check', check_status['context'])
@@ -286,7 +289,8 @@
         self.fake_github.emitEvent(
             A.getCommentAddedEvent('reporting check'))
         self.waitUntilSettled()
-        self.assertEqual(2, len(A.statuses[A.head_sha]))
+        statuses = self.fake_github.statuses[project][A.head_sha]
+        self.assertEqual(2, len(statuses))
         # comments increased by one for the start message
         self.assertEqual(2, len(A.comments))
         self.assertThat(A.comments[1],
@@ -295,8 +299,9 @@
         self.executor_server.release()
         self.waitUntilSettled()
         # pipeline reports success status
-        self.assertEqual(3, len(A.statuses[A.head_sha]))
-        report_status = A.statuses[A.head_sha][0]
+        statuses = self.fake_github.statuses[project][A.head_sha]
+        self.assertEqual(3, len(statuses))
+        report_status = statuses[0]
         self.assertEqual('tenant-one/reporting', report_status['context'])
         self.assertEqual('success', report_status['state'])
         self.assertEqual(2, len(A.comments))
diff --git a/tests/unit/test_github_requirements.py b/tests/unit/test_github_requirements.py
index 5dd6e80..301ea2f 100644
--- a/tests/unit/test_github_requirements.py
+++ b/tests/unit/test_github_requirements.py
@@ -25,7 +25,8 @@
     @simple_layout('layouts/requirements-github.yaml', driver='github')
     def test_pipeline_require_status(self):
         "Test pipeline requirement: status"
-        A = self.fake_github.openFakePullRequest('org/project1', 'master', 'A')
+        project = 'org/project1'
+        A = self.fake_github.openFakePullRequest(project, 'master', 'A')
         # A comment event that we will keep submitting to trigger
         comment = A.getCommentAddedEvent('test me')
         self.fake_github.emitEvent(comment)
@@ -34,13 +35,15 @@
         self.assertEqual(len(self.history), 0)
 
         # An error status should not cause it to be enqueued
-        A.setStatus(A.head_sha, 'error', 'null', 'null', 'check')
+        self.fake_github.setCommitStatus(project, A.head_sha, 'error',
+                                         context='check')
         self.fake_github.emitEvent(comment)
         self.waitUntilSettled()
         self.assertEqual(len(self.history), 0)
 
         # A success status goes in
-        A.setStatus(A.head_sha, 'success', 'null', 'null', 'check')
+        self.fake_github.setCommitStatus(project, A.head_sha, 'success',
+                                         context='check')
         self.fake_github.emitEvent(comment)
         self.waitUntilSettled()
         self.assertEqual(len(self.history), 1)
@@ -49,7 +52,8 @@
     @simple_layout('layouts/requirements-github.yaml', driver='github')
     def test_trigger_require_status(self):
         "Test trigger requirement: status"
-        A = self.fake_github.openFakePullRequest('org/project1', 'master', 'A')
+        project = 'org/project1'
+        A = self.fake_github.openFakePullRequest(project, 'master', 'A')
         # A comment event that we will keep submitting to trigger
         comment = A.getCommentAddedEvent('trigger me')
         self.fake_github.emitEvent(comment)
@@ -58,13 +62,15 @@
         self.assertEqual(len(self.history), 0)
 
         # An error status should not cause it to be enqueued
-        A.setStatus(A.head_sha, 'error', 'null', 'null', 'check')
+        self.fake_github.setCommitStatus(project, A.head_sha, 'error',
+                                         context='check')
         self.fake_github.emitEvent(comment)
         self.waitUntilSettled()
         self.assertEqual(len(self.history), 0)
 
         # A success status goes in
-        A.setStatus(A.head_sha, 'success', 'null', 'null', 'check')
+        self.fake_github.setCommitStatus(project, A.head_sha, 'success',
+                                         context='check')
         self.fake_github.emitEvent(comment)
         self.waitUntilSettled()
         self.assertEqual(len(self.history), 1)
@@ -73,10 +79,12 @@
     @simple_layout('layouts/requirements-github.yaml', driver='github')
     def test_trigger_on_status(self):
         "Test trigger on: status"
-        A = self.fake_github.openFakePullRequest('org/project2', 'master', 'A')
+        project = 'org/project2'
+        A = self.fake_github.openFakePullRequest(project, 'master', 'A')
 
         # An error status should not cause it to be enqueued
-        A.setStatus(A.head_sha, 'error', 'null', 'null', 'check')
+        self.fake_github.setCommitStatus(project, A.head_sha, 'error',
+                                         context='check')
         self.fake_github.emitEvent(A.getCommitStatusEvent('check',
                                                           state='error'))
         self.waitUntilSettled()
@@ -84,7 +92,8 @@
 
         # A success status from unknown user should not cause it to be
         # enqueued
-        A.setStatus(A.head_sha, 'success', 'null', 'null', 'check', user='foo')
+        self.fake_github.setCommitStatus(project, A.head_sha, 'success',
+                                         context='check', user='foo')
         self.fake_github.emitEvent(A.getCommitStatusEvent('check',
                                                           state='success',
                                                           user='foo'))
@@ -92,7 +101,8 @@
         self.assertEqual(len(self.history), 0)
 
         # A success status from zuul goes in
-        A.setStatus(A.head_sha, 'success', 'null', 'null', 'check')
+        self.fake_github.setCommitStatus(project, A.head_sha, 'success',
+                                         context='check')
         self.fake_github.emitEvent(A.getCommitStatusEvent('check'))
         self.waitUntilSettled()
         self.assertEqual(len(self.history), 1)
@@ -100,7 +110,8 @@
 
         # An error status for a different context should not cause it to be
         # enqueued
-        A.setStatus(A.head_sha, 'error', 'null', 'null', 'gate')
+        self.fake_github.setCommitStatus(project, A.head_sha, 'error',
+                                         context='gate')
         self.fake_github.emitEvent(A.getCommitStatusEvent('gate',
                                                           state='error'))
         self.waitUntilSettled()
diff --git a/tests/unit/test_model.py b/tests/unit/test_model.py
index e7e53c4..7a4d53e 100644
--- a/tests/unit/test_model.py
+++ b/tests/unit/test_model.py
@@ -186,7 +186,7 @@
             'post-run': 'base-post',
             'nodes': [{
                 'name': 'controller',
-                'image': 'base',
+                'label': 'base',
             }],
         })
         layout.addJob(base)
@@ -199,7 +199,7 @@
             'post-run': 'py27-post',
             'nodes': [{
                 'name': 'controller',
-                'image': 'new',
+                'label': 'new',
             }],
             'timeout': 40,
         })
@@ -216,7 +216,7 @@
             'post-run': 'py27-diablo-post',
             'nodes': [{
                 'name': 'controller',
-                'image': 'old',
+                'label': 'old',
             }],
             'timeout': 50,
         })
@@ -264,7 +264,7 @@
         self.assertEqual(job.timeout, 40)
         nodes = job.nodeset.getNodes()
         self.assertEqual(len(nodes), 1)
-        self.assertEqual(nodes[0].image, 'new')
+        self.assertEqual(nodes[0].label, 'new')
         self.assertEqual([x.path for x in job.pre_run],
                          ['playbooks/base-pre',
                           'playbooks/py27-pre'])
@@ -292,7 +292,7 @@
         self.assertEqual(job.timeout, 50)
         nodes = job.nodeset.getNodes()
         self.assertEqual(len(nodes), 1)
-        self.assertEqual(nodes[0].image, 'old')
+        self.assertEqual(nodes[0].label, 'old')
         self.assertEqual([x.path for x in job.pre_run],
                          ['playbooks/base-pre',
                           'playbooks/py27-pre',
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index de8246c..1124beb 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -35,12 +35,36 @@
 import zuul.model
 
 from tests.base import (
+    SSLZuulTestCase,
     ZuulTestCase,
     repack_repo,
     simple_layout,
 )
 
 
+class TestSchedulerSSL(SSLZuulTestCase):
+    tenant_config_file = 'config/single-tenant/main.yaml'
+
+    def test_jobs_executed(self):
+        "Test that jobs are executed and a change is merged"
+
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+        A.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+        self.waitUntilSettled()
+        self.assertEqual(self.getJobFromHistory('project-merge').result,
+                         'SUCCESS')
+        self.assertEqual(self.getJobFromHistory('project-test1').result,
+                         'SUCCESS')
+        self.assertEqual(self.getJobFromHistory('project-test2').result,
+                         'SUCCESS')
+        self.assertEqual(A.data['status'], 'MERGED')
+        self.assertEqual(A.reported, 2)
+        self.assertEqual(self.getJobFromHistory('project-test1').node,
+                         'label1')
+        self.assertIsNone(self.getJobFromHistory('project-test2').node)
+
+
 class TestScheduler(ZuulTestCase):
     tenant_config_file = 'config/single-tenant/main.yaml'
 
@@ -60,7 +84,7 @@
         self.assertEqual(A.data['status'], 'MERGED')
         self.assertEqual(A.reported, 2)
         self.assertEqual(self.getJobFromHistory('project-test1').node,
-                         'image1')
+                         'label1')
         self.assertIsNone(self.getJobFromHistory('project-test2').node)
 
         # TODOv3(jeblair): we may want to report stats by tenant (also?).
@@ -110,7 +134,7 @@
         self.assertIn('gate', A.messages[1],
                       "A should transit gate")
         self.assertEqual(self.getJobFromHistory('project-test1').node,
-                         'image2')
+                         'label2')
 
     def test_parallel_changes(self):
         "Test that changes are tested in parallel and merged in series"
@@ -1516,7 +1540,7 @@
         trusted, project = tenant.getProject('org/project')
         url = self.fake_gerrit.getGitUrl(project)
         self.executor_server.merger._addProject('review.example.com',
-                                                'org/project', url)
+                                                'org/project', url, None)
         A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
         A.addPatchset(large=True)
         # TODOv3(jeblair): add hostname to upstream root
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 3854804..7d84b1f 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -368,6 +368,59 @@
         self.assertIn('the only project definition permitted', A.messages[0],
                       "A should have a syntax error reported")
 
+    def test_duplicate_node_error(self):
+        in_repo_conf = textwrap.dedent(
+            """
+            - nodeset:
+                name: duplicate
+                nodes:
+                  - name: compute
+                    label: foo
+                  - name: compute
+                    label: foo
+            """)
+
+        file_dict = {'.zuul.yaml': in_repo_conf}
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+                                           files=file_dict)
+        A.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+        self.waitUntilSettled()
+
+        self.assertEqual(A.data['status'], 'NEW')
+        self.assertEqual(A.reported, 1,
+                         "A should report failure")
+        self.assertIn('appears multiple times', A.messages[0],
+                      "A should have a syntax error reported")
+
+    def test_duplicate_group_error(self):
+        in_repo_conf = textwrap.dedent(
+            """
+            - nodeset:
+                name: duplicate
+                nodes:
+                  - name: compute
+                    label: foo
+                groups:
+                  - name: group
+                    nodes: compute
+                  - name: group
+                    nodes: compute
+            """)
+
+        file_dict = {'.zuul.yaml': in_repo_conf}
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+                                           files=file_dict)
+        A.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+        self.waitUntilSettled()
+
+        self.assertEqual(A.data['status'], 'NEW')
+        self.assertEqual(A.reported, 1,
+                         "A should report failure")
+        self.assertIn('appears multiple times', A.messages[0],
+                      "A should have a syntax error reported")
+
 
 class TestAnsible(AnsibleZuulTestCase):
     # A temporary class to hold new tests while others are disabled
diff --git a/zuul/ansible/callback/zuul_stream.py b/zuul/ansible/callback/zuul_stream.py
index 8882c33..c064f3e 100644
--- a/zuul/ansible/callback/zuul_stream.py
+++ b/zuul/ansible/callback/zuul_stream.py
@@ -13,13 +13,11 @@
 # You should have received a copy of the GNU General Public License
 # along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
 
-from __future__ import absolute_import
-
 import datetime
-import multiprocessing
 import logging
 import os
 import socket
+import threading
 import time
 import uuid
 
@@ -104,22 +102,25 @@
         logging.basicConfig(filename=path, level=level, format='%(message)s')
         self._log = logging.getLogger('zuul.executor.ansible')
 
-    def _read_log(self, host, ip, log_id):
-        self._log.debug("[%s] Starting to log" % host)
+    def _read_log(self, host, ip, log_id, task_name):
+        self._display.display("[%s] Starting to log %s for task %s"
+                              % (host, log_id, task_name))
         s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         while True:
             try:
                 s.connect((ip, LOG_STREAM_PORT))
             except Exception:
-                self._log.debug("[%s] Waiting on logger" % host)
+                self._display.vvv("[%s] Waiting on logger" % host)
                 time.sleep(0.1)
                 continue
-            s.send(log_id + '\n')
+            msg = "%s\n" % log_id
+            s.send(msg.encode("utf-8"))
             for line in linesplit(s):
                 if "[Zuul] Task exit code" in line:
                     return
                 else:
-                    ts, ln = line.strip().split(' | ', 1)
+                    ts, ln = line.split(' | ', 1)
+                    ln = ln.strip()
 
                     self._log.info("%s | %s | %s " % (ts, host, ln))
 
@@ -142,7 +143,7 @@
         self._task = task
 
         if self._play.strategy != 'free':
-            self._print_task_banner(task)
+            task_name = self._print_task_banner(task)
         if task.action == 'command':
             log_id = uuid.uuid4().hex
             task.args['zuul_log_id'] = log_id
@@ -161,14 +162,22 @@
                     'ansible_host', play_vars[host].get(
                         'ansible_inventory_host'))
                 self._host_dict[host] = ip
-                self._streamer = multiprocessing.Process(
-                    target=self._read_log, args=(host, ip, log_id))
+                self._streamer = threading.Thread(
+                    target=self._read_log, args=(host, ip, log_id, task_name))
                 self._streamer.daemon = True
                 self._streamer.start()
 
-    def v2_runner_on_failed(self, result, ignore_errors=False):
+    def _stop_streamer(self):
         if self._streamer:
-            self._streamer.join()
+            self._streamer.join(30)
+            if self._streamer.is_alive():
+                msg = "{now} | [Zuul] Log Stream did not terminate".format(
+                    now=datetime.datetime.now())
+                self._log.info(msg)
+                self._display.display("WARNING: Streamer could not join")
+
+    def v2_runner_on_failed(self, result, ignore_errors=False):
+        self._stop_streamer()
         if result._task.action in ('command', 'shell'):
             zuul_filter_result(result._result)
         self._handle_exception(result._result)
@@ -194,8 +203,7 @@
         if result._task.action in ('include', 'include_role'):
             return
 
-        if self._streamer:
-            self._streamer.join()
+        self._stop_streamer()
 
         if result._result.get('changed', False):
             status = 'changed'
@@ -248,6 +256,7 @@
             task=task_name,
             args=args)
         self._log.info(msg)
+        return task
 
     def _log_message(self, result, msg, status="ok"):
         now = datetime.datetime.now()
diff --git a/zuul/ansible/library/command.py b/zuul/ansible/library/command.py
index 4b3a30f..39a961e 100644
--- a/zuul/ansible/library/command.py
+++ b/zuul/ansible/library/command.py
@@ -19,6 +19,10 @@
 # You should have received a copy of the GNU General Public License
 # along with this software.  If not, see <http://www.gnu.org/licenses/>.
 
+ANSIBLE_METADATA = {'metadata_version': '1.0',
+                    'status': ['stableinterface'],
+                    'supported_by': 'core'}
+
 # flake8: noqa
 # This file shares a significant chunk of code with an upstream ansible
 # function, run_command. The goal is to not have to fork quite so much
@@ -34,7 +38,7 @@
 short_description: Executes a command on a remote node
 version_added: historical
 description:
-     - The M(command) module takes the command name followed by a list of space-delimited arguments.
+     - The C(command) module takes the command name followed by a list of space-delimited arguments.
      - The given command will be executed on all selected nodes. It will not be
        processed through the shell, so variables like C($HOME) and operations
        like C("<"), C(">"), C("|"), C(";") and C("&") will not work (use the M(shell)
@@ -76,30 +80,33 @@
       - if command warnings are on in ansible.cfg, do not warn about this particular line if set to no/false.
     required: false
 notes:
-    -  If you want to run a command through the shell (say you are using C(<),
-       C(>), C(|), etc), you actually want the M(shell) module instead. The
-       M(command) module is much more secure as it's not affected by the user's
-       environment.
-    -  " C(creates), C(removes), and C(chdir) can be specified after the command. For instance, if you only want to run a command if a certain file does not exist, use this."
+    -  If you want to run a command through the shell (say you are using C(<), C(>), C(|), etc), you actually want the M(shell) module instead.
+       The C(command) module is much more secure as it's not affected by the user's environment.
+    -  " C(creates), C(removes), and C(chdir) can be specified after the command.
+       For instance, if you only want to run a command if a certain file does not exist, use this."
 author:
     - Ansible Core Team
     - Michael DeHaan
 '''
 
 EXAMPLES = '''
-# Example from Ansible Playbooks.
-- command: /sbin/shutdown -t now
+- name: return motd to registered var
+  command: cat /etc/motd
+  register: mymotd
 
-# Run the command if the specified file does not exist.
-- command: /usr/bin/make_database.sh arg1 arg2 creates=/path/to/database
+- name: Run the command if the specified file does not exist.
+  command: /usr/bin/make_database.sh arg1 arg2 creates=/path/to/database
 
-# You can also use the 'args' form to provide the options. This command
-# will change the working directory to somedir/ and will only run when
-# /path/to/database doesn't exist.
-- command: /usr/bin/make_database.sh arg1 arg2
+# You can also use the 'args' form to provide the options.
+- name: This command will change the working directory to somedir/ and will only run when /path/to/database doesn't exist.
+  command: /usr/bin/make_database.sh arg1 arg2
   args:
     chdir: somedir/
     creates: /path/to/database
+
+- name: safely use templated variable to run command. Always use the quote filter to avoid injection issues.
+  command: cat {{ myfile|quote }}
+  register: myoutput
 '''
 
 import datetime
@@ -116,10 +123,19 @@
 import threading
 
 from ansible.module_utils.basic import AnsibleModule, heuristic_log_sanitize
-from ansible.module_utils.basic import get_exception
-# ZUUL: Hardcode python2 until we're on ansible 2.2
-from ast import literal_eval
-
+from ansible.module_utils.pycompat24 import get_exception, literal_eval
+from ansible.module_utils.six import (
+    PY2,
+    PY3,
+    b,
+    binary_type,
+    integer_types,
+    iteritems,
+    string_types,
+    text_type,
+)
+from ansible.module_utils.six.moves import map, reduce
+from ansible.module_utils._text import to_native, to_bytes, to_text
 
 LOG_STREAM_FILE = '/tmp/console-{log_uuid}.log'
 PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
@@ -166,7 +182,7 @@
 
 # Taken from ansible/module_utils/basic.py ... forking the method for now
 # so that we can dive in and figure out how to make appropriate hook points
-def zuul_run_command(self, args, zuul_log_id, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None, environ_update=None):
+def zuul_run_command(self, args, zuul_log_id, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict'):
     '''
     Execute a command, returns rc, stdout, and stderr.
 
@@ -188,7 +204,27 @@
     :kw prompt_regex: Regex string (not a compiled regex) which can be
         used to detect prompts in the stdout which would otherwise cause
         the execution to hang (especially if no input data is specified)
-    :kwarg environ_update: dictionary to *update* os.environ with
+    :kw environ_update: dictionary to *update* os.environ with
+    :kw umask: Umask to be used when running the command. Default None
+    :kw encoding: Since we return native strings, on python3 we need to
+        know the encoding to use to transform from bytes to text.  If you
+        want to always get bytes back, use encoding=None.  The default is
+        "utf-8".  This does not affect transformation of strings given as
+        args.
+    :kw errors: Since we return native strings, on python3 we need to
+        transform stdout and stderr from bytes to text.  If the bytes are
+        undecodable in the ``encoding`` specified, then use this error
+        handler to deal with them.  The default is ``surrogate_or_strict``
+        which means that the bytes will be decoded using the
+        surrogateescape error handler if available (available on all
+        python3 versions we support) otherwise a UnicodeError traceback
+        will be raised.  This does not affect transformations of strings
+        given as args.
+    :returns: A 3-tuple of return code (integer), stdout (native string),
+        and stderr (native string).  On python2, stdout and stderr are both
+        byte strings.  On python3, stdout and stderr are text strings converted
+        according to the encoding and errors parameters.  If you want byte
+        strings on python3, use encoding=None to turn decoding to text off.
     '''
 
     shell = False
@@ -196,13 +232,15 @@
         if use_unsafe_shell:
             args = " ".join([pipes.quote(x) for x in args])
             shell = True
-    elif isinstance(args, (str, unicode)) and use_unsafe_shell:
+    elif isinstance(args, (binary_type, text_type)) and use_unsafe_shell:
         shell = True
-    elif isinstance(args, (str, unicode)):
+    elif isinstance(args, (binary_type, text_type)):
         # On python2.6 and below, shlex has problems with text type
-        # ZUUL: Hardcode python2 until we're on ansible 2.2
-        if isinstance(args, unicode):
-            args = args.encode('utf-8')
+        # On python3, shlex needs a text type.
+        if PY2:
+            args = to_bytes(args, errors='surrogate_or_strict')
+        elif PY3:
+            args = to_text(args, errors='surrogateescape')
         args = shlex.split(args)
     else:
         msg = "Argument 'args' to run_command must be list or string"
@@ -210,6 +248,11 @@
 
     prompt_re = None
     if prompt_regex:
+        if isinstance(prompt_regex, text_type):
+            if PY3:
+                prompt_regex = to_bytes(prompt_regex, errors='surrogateescape')
+            elif PY2:
+                prompt_regex = to_bytes(prompt_regex, errors='surrogate_or_strict')
         try:
             prompt_re = re.compile(prompt_regex, re.MULTILINE)
         except re.error:
@@ -217,7 +260,7 @@
 
     # expand things like $HOME and ~
     if not shell:
-        args = [ os.path.expanduser(os.path.expandvars(x)) for x in args if x is not None ]
+        args = [os.path.expanduser(os.path.expandvars(x)) for x in args if x is not None]
 
     rc = 0
     msg = None
@@ -245,9 +288,9 @@
     # Clean out python paths set by ansiballz
     if 'PYTHONPATH' in os.environ:
         pypaths = os.environ['PYTHONPATH'].split(':')
-        pypaths = [x for x in pypaths \
-                    if not x.endswith('/ansible_modlib.zip') \
-                    and not x.endswith('/debug_dir')]
+        pypaths = [x for x in pypaths
+                   if not x.endswith('/ansible_modlib.zip') and
+                   not x.endswith('/debug_dir')]
         os.environ['PYTHONPATH'] = ':'.join(pypaths)
         if not os.environ['PYTHONPATH']:
             del os.environ['PYTHONPATH']
@@ -256,8 +299,13 @@
     # in reporting later, which strips out things like
     # passwords from the args list
     to_clean_args = args
-    # ZUUL: Hardcode python2 until we're on ansible 2.2
-    if isinstance(args, (unicode, str)):
+    if PY2:
+        if isinstance(args, text_type):
+            to_clean_args = to_bytes(args)
+    else:
+        if isinstance(args, binary_type):
+            to_clean_args = to_text(args)
+    if isinstance(args, (text_type, binary_type)):
         to_clean_args = shlex.split(to_clean_args)
 
     clean_args = []
@@ -291,34 +339,36 @@
         stderr=subprocess.STDOUT,
     )
 
-    if cwd and os.path.isdir(cwd):
-        kwargs['cwd'] = cwd
-
     # store the pwd
     prev_dir = os.getcwd()
 
     # make sure we're in the right working directory
     if cwd and os.path.isdir(cwd):
+        cwd = os.path.abspath(os.path.expanduser(cwd))
+        kwargs['cwd'] = cwd
         try:
             os.chdir(cwd)
         except (OSError, IOError):
             e = get_exception()
             self.fail_json(rc=e.errno, msg="Could not open %s, %s" % (cwd, str(e)))
 
-    try:
+    old_umask = None
+    if umask:
+        old_umask = os.umask(umask)
 
+    try:
         if self._debug:
-            if isinstance(args, list):
-                running = ' '.join(args)
-            else:
-                running = args
-            self.log('Executing: ' + running)
+            self.log('Executing: ' + clean_args)
+        cmd = subprocess.Popen(args, **kwargs)
+
         # ZUUL: Replaced the excution loop with the zuul_runner run function
         cmd = subprocess.Popen(args, **kwargs)
         t = threading.Thread(target=follow, args=(cmd.stdout, zuul_log_id))
         t.daemon = True
         t.start()
+
         ret = cmd.wait()
+
         # Give the thread that is writing the console log up to 10 seconds
         # to catch up and exit.  If it hasn't done so by then, it is very
         # likely stuck in readline() because it spawed a child that is
@@ -334,19 +384,21 @@
         # we can't close stdout (attempting to do so raises an
         # exception) , so this is disabled.
         # cmd.stdout.close()
+        # cmd.stderr.close()
 
         # ZUUL: stdout and stderr are in the console log file
         # ZUUL: return the saved log lines so we can ship them back
-        stdout = ''.join(_log_lines)
-        stderr = ''
+        stdout = b('').join(_log_lines)
+        stderr = b('')
 
         rc = cmd.returncode
     except (OSError, IOError):
         e = get_exception()
-        self.fail_json(rc=e.errno, msg=str(e), cmd=clean_args)
+        self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(e)))
+        self.fail_json(rc=e.errno, msg=to_native(e), cmd=clean_args)
     except Exception:
-        e = get_exception()
-        self.fail_json(rc=257, msg=str(e), exception=traceback.format_exc(), cmd=clean_args)
+        self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(traceback.format_exc())))
+        self.fail_json(rc=257, msg=to_native(e), exception=traceback.format_exc(), cmd=clean_args)
 
     # Restore env settings
     for key, val in old_env_vals.items():
@@ -355,6 +407,9 @@
         else:
             os.environ[key] = val
 
+    if old_umask:
+        os.umask(old_umask)
+
     if rc != 0 and check_rc:
         msg = heuristic_log_sanitize(stderr.rstrip(), self.no_log_values)
         self.fail_json(cmd=clean_args, rc=rc, stdout=stdout, stderr=stderr, msg=msg)
@@ -362,6 +417,9 @@
     # reset the pwd
     os.chdir(prev_dir)
 
+    if encoding is not None:
+        return (rc, to_native(stdout, encoding=encoding, errors=errors),
+                to_native(stderr, encoding=encoding, errors=errors))
     return (rc, stdout, stderr)
 
 
@@ -392,24 +450,24 @@
     # hence don't copy this one if you are looking to build others!
     module = AnsibleModule(
         argument_spec=dict(
-          _raw_params = dict(),
-          _uses_shell = dict(type='bool', default=False),
-          chdir = dict(type='path'),
-          executable = dict(),
-          creates = dict(type='path'),
-          removes = dict(type='path'),
-          warn = dict(type='bool', default=True),
-          environ = dict(type='dict', default=None),
-          zuul_log_id = dict(type='str'),
+            _raw_params = dict(),
+            _uses_shell = dict(type='bool', default=False),
+            chdir = dict(type='path'),
+            executable = dict(),
+            creates = dict(type='path'),
+            removes = dict(type='path'),
+            warn = dict(type='bool', default=True),
+            environ = dict(type='dict', default=None),
+            zuul_log_id = dict(type='str'),
         )
     )
 
     shell = module.params['_uses_shell']
     chdir = module.params['chdir']
     executable = module.params['executable']
-    args  = module.params['_raw_params']
-    creates  = module.params['creates']
-    removes  = module.params['removes']
+    args = module.params['_raw_params']
+    creates = module.params['creates']
+    removes = module.params['removes']
     warn = module.params['warn']
     environ = module.params['environ']
     zuul_log_id = module.params['zuul_log_id']
@@ -434,9 +492,9 @@
             )
 
     if removes:
-    # do not run the command if the line contains removes=filename
-    # and the filename does not exist.  This allows idempotence
-    # of command executions.
+        # do not run the command if the line contains removes=filename
+        # and the filename does not exist.  This allows idempotence
+        # of command executions.
         if not glob.glob(removes):
             module.exit_json(
                 cmd=args,
@@ -453,20 +511,20 @@
         args = shlex.split(args)
     startd = datetime.datetime.now()
 
-    rc, out, err = zuul_run_command(module, args, zuul_log_id, executable=executable, use_unsafe_shell=shell, environ_update=environ)
+    rc, out, err = zuul_run_command(module, args, zuul_log_id, executable=executable, use_unsafe_shell=shell, encoding=None, environ_update=environ)
 
     endd = datetime.datetime.now()
     delta = endd - startd
 
     if out is None:
-        out = ''
+        out = b('')
     if err is None:
-        err = ''
+        err = b('')
 
     module.exit_json(
         cmd      = args,
-        stdout   = out.rstrip("\r\n"),
-        stderr   = err.rstrip("\r\n"),
+        stdout   = out.rstrip(b("\r\n")),
+        stderr   = err.rstrip(b("\r\n")),
         rc       = rc,
         start    = str(startd),
         end      = str(endd),
diff --git a/zuul/ansible/library/zuul_console.py b/zuul/ansible/library/zuul_console.py
index 7f8a1b6..42f41f0 100644
--- a/zuul/ansible/library/zuul_console.py
+++ b/zuul/ansible/library/zuul_console.py
@@ -15,10 +15,12 @@
 # You should have received a copy of the GNU General Public License
 # along with this software.  If not, see <http://www.gnu.org/licenses/>.
 
+import glob
 import os
 import sys
 import select
 import socket
+import subprocess
 import threading
 import time
 
@@ -196,6 +198,53 @@
                 pass
 
 
+def get_inode(port_number=19885):
+    for netfile in ('/proc/net/tcp6', '/proc/net/tcp'):
+        if not os.path.exists(netfile):
+            continue
+        with open(netfile) as f:
+            # discard header line
+            f.readline()
+            for line in f:
+                # sl local_address rem_address st tx_queue:rx_queue tr:tm->when
+                # retrnsmt   uid  timeout inode
+                fields = line.split()
+                # Format is localaddr:localport in hex
+                port = int(fields[1].split(':')[1], base=16)
+                if port == port_number:
+                    return fields[9]
+
+
+def get_pid_from_inode(inode):
+    my_euid = os.geteuid()
+    exceptions = []
+    for d in os.listdir('/proc'):
+        try:
+            try:
+                int(d)
+            except Exception as e:
+                continue
+            d_abs_path = os.path.join('/proc', d)
+            if os.stat(d_abs_path).st_uid != my_euid:
+                continue
+            fd_dir = os.path.join(d_abs_path, 'fd')
+            if os.path.exists(fd_dir):
+                if os.stat(fd_dir).st_uid != my_euid:
+                    continue
+                for fd in os.listdir(fd_dir):
+                    try:
+                        fd_path = os.path.join(fd_dir, fd)
+                        if os.path.islink(fd_path):
+                            target = os.readlink(fd_path)
+                            if '[' + inode + ']' in target:
+                                return d, exceptions
+                    except Exception as e:
+                        exceptions.append(e)
+        except Exception as e:
+            exceptions.append(e)
+    return None, exceptions
+
+
 def test():
     s = Server(LOG_STREAM_FILE, LOG_STREAM_PORT)
     s.run()
@@ -206,19 +255,54 @@
         argument_spec=dict(
             path=dict(default=LOG_STREAM_FILE),
             port=dict(default=LOG_STREAM_PORT, type='int'),
+            state=dict(default='present', choices=['absent', 'present']),
         )
     )
 
     p = module.params
     path = p['path']
     port = p['port']
+    state = p['state']
 
-    if daemonize():
+    if state == 'present':
+        if daemonize():
+            module.exit_json()
+
+        s = Server(path, port)
+        s.run()
+    else:
+        pid = None
+        exceptions = []
+        inode = get_inode()
+        if not inode:
+            module.fail_json(
+                "Could not find inode for port",
+                exceptions=[])
+
+        pid, exceptions = get_pid_from_inode(inode)
+        if not pid:
+            except_strings = [str(e) for e in exceptions]
+            module.fail_json(
+                msg="Could not find zuul_console process for inode",
+                exceptions=except_strings)
+
+        try:
+            subprocess.check_output(['kill', pid])
+        except subprocess.CalledProcessError as e:
+            module.fail_json(
+                msg="Could not kill zuul_console pid",
+                exceptions=[str(e)])
+
+        for fn in glob.glob(LOG_STREAM_FILE.format(log_uuid='*')):
+            try:
+                os.unlink(fn)
+            except Exception as e:
+                module.fail_json(
+                    msg="Could not remove logfile {fn}".format(fn=fn),
+                    exceptions=[str(e)])
+
         module.exit_json()
 
-    s = Server(path, port)
-    s.run()
-
 from ansible.module_utils.basic import *  # noqa
 from ansible.module_utils.basic import AnsibleModule
 
diff --git a/zuul/cmd/client.py b/zuul/cmd/client.py
index 3f67a38..d8250c5 100644
--- a/zuul/cmd/client.py
+++ b/zuul/cmd/client.py
@@ -125,6 +125,18 @@
             self.port = self.config.get('gearman', 'port')
         else:
             self.port = 4730
+        if self.config.has_option('gearman', 'ssl_key'):
+            self.ssl_key = self.config.get('gearman', 'ssl_key')
+        else:
+            self.ssl_key = None
+        if self.config.has_option('gearman', 'ssl_cert'):
+            self.ssl_cert = self.config.get('gearman', 'ssl_cert')
+        else:
+            self.ssl_cert = None
+        if self.config.has_option('gearman', 'ssl_ca'):
+            self.ssl_ca = self.config.get('gearman', 'ssl_ca')
+        else:
+            self.ssl_ca = None
 
         if self.args.func():
             sys.exit(0)
@@ -132,7 +144,8 @@
             sys.exit(1)
 
     def enqueue(self):
-        client = zuul.rpcclient.RPCClient(self.server, self.port)
+        client = zuul.rpcclient.RPCClient(
+            self.server, self.port, self.ssl_key, self.ssl_cert, self.ssl_ca)
         r = client.enqueue(tenant=self.args.tenant,
                            pipeline=self.args.pipeline,
                            project=self.args.project,
@@ -141,7 +154,8 @@
         return r
 
     def enqueue_ref(self):
-        client = zuul.rpcclient.RPCClient(self.server, self.port)
+        client = zuul.rpcclient.RPCClient(
+            self.server, self.port, self.ssl_key, self.ssl_cert, self.ssl_ca)
         r = client.enqueue_ref(tenant=self.args.tenant,
                                pipeline=self.args.pipeline,
                                project=self.args.project,
@@ -152,14 +166,16 @@
         return r
 
     def promote(self):
-        client = zuul.rpcclient.RPCClient(self.server, self.port)
+        client = zuul.rpcclient.RPCClient(
+            self.server, self.port, self.ssl_key, self.ssl_cert, self.ssl_ca)
         r = client.promote(tenant=self.args.tenant,
                            pipeline=self.args.pipeline,
                            change_ids=self.args.changes)
         return r
 
     def show_running_jobs(self):
-        client = zuul.rpcclient.RPCClient(self.server, self.port)
+        client = zuul.rpcclient.RPCClient(
+            self.server, self.port, self.ssl_key, self.ssl_cert, self.ssl_ca)
         running_items = client.get_running_jobs()
 
         if len(running_items) == 0:
diff --git a/zuul/cmd/executor.py b/zuul/cmd/executor.py
index bd14b74..7cc8dd8 100755
--- a/zuul/cmd/executor.py
+++ b/zuul/cmd/executor.py
@@ -71,7 +71,8 @@
         path = os.path.join(state_dir, 'executor.socket')
         s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
         s.connect(path)
-        s.sendall('%s\n' % cmd)
+        cmd = '%s\n' % cmd
+        s.sendall(cmd.encode('utf8'))
 
     def exit_handler(self):
         self.executor.stop()
diff --git a/zuul/cmd/scheduler.py b/zuul/cmd/scheduler.py
index 5328bba..d16eb17 100755
--- a/zuul/cmd/scheduler.py
+++ b/zuul/cmd/scheduler.py
@@ -102,7 +102,22 @@
                 host = self.config.get('gearman_server', 'listen_address')
             else:
                 host = None
+            if self.config.has_option('gearman_server', 'ssl_key'):
+                ssl_key = self.config.get('gearman_server', 'ssl_key')
+            else:
+                ssl_key = None
+            if self.config.has_option('gearman_server', 'ssl_cert'):
+                ssl_cert = self.config.get('gearman_server', 'ssl_cert')
+            else:
+                ssl_cert = None
+            if self.config.has_option('gearman_server', 'ssl_ca'):
+                ssl_ca = self.config.get('gearman_server', 'ssl_ca')
+            else:
+                ssl_ca = None
             zuul.lib.gearserver.GearServer(4730,
+                                           ssl_key=ssl_key,
+                                           ssl_cert=ssl_cert,
+                                           ssl_ca=ssl_ca,
                                            host=host,
                                            statsd_host=statsd_host,
                                            statsd_port=statsd_port,
diff --git a/zuul/configloader.py b/zuul/configloader.py
index f78e8a4..5e0fe65 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -50,17 +50,37 @@
 class NodeFromGroupNotFoundError(Exception):
     def __init__(self, nodeset, node, group):
         message = textwrap.dedent("""\
-        In nodeset {nodeset} the group {group} contains a
-        node named {node} which is not defined in the nodeset.""")
+        In nodeset "{nodeset}" the group "{group}" contains a
+        node named "{node}" which is not defined in the nodeset.""")
         message = textwrap.fill(message.format(nodeset=nodeset,
                                                node=node, group=group))
         super(NodeFromGroupNotFoundError, self).__init__(message)
 
 
+class DuplicateNodeError(Exception):
+    def __init__(self, nodeset, node):
+        message = textwrap.dedent("""\
+        In nodeset "{nodeset}" the node "{node}" appears multiple times.
+        Node names must be unique within a nodeset.""")
+        message = textwrap.fill(message.format(nodeset=nodeset,
+                                               node=node))
+        super(DuplicateNodeError, self).__init__(message)
+
+
+class DuplicateGroupError(Exception):
+    def __init__(self, nodeset, group):
+        message = textwrap.dedent("""\
+        In nodeset "{nodeset}" the group "{group}" appears multiple times.
+        Group names must be unique within a nodeset.""")
+        message = textwrap.fill(message.format(nodeset=nodeset,
+                                               group=group))
+        super(DuplicateGroupError, self).__init__(message)
+
+
 class ProjectNotFoundError(Exception):
     def __init__(self, project):
         message = textwrap.dedent("""\
-        The project {project} was not found.  All projects
+        The project "{project}" was not found.  All projects
         referenced within a Zuul configuration must first be
         added to the main configuration file by the Zuul
         administrator.""")
@@ -194,16 +214,16 @@
     @staticmethod
     def getSchema():
         node = {vs.Required('name'): str,
-                vs.Required('image'): str,
+                vs.Required('label'): str,
                 }
 
         group = {vs.Required('name'): str,
-                 vs.Required('nodes'): [str]
+                 vs.Required('nodes'): to_list(str),
                  }
 
         nodeset = {vs.Required('name'): str,
-                   vs.Required('nodes'): [node],
-                   'groups': [group],
+                   vs.Required('nodes'): to_list(node),
+                   'groups': to_list(group),
                    '_source_context': model.SourceContext,
                    '_start_mark': yaml.Mark,
                    }
@@ -212,21 +232,26 @@
 
     @staticmethod
     def fromYaml(layout, conf):
-        with configuration_exceptions('nodeset', conf):
-            NodeSetParser.getSchema()(conf)
+        NodeSetParser.getSchema()(conf)
         ns = model.NodeSet(conf['name'])
-        node_names = []
+        node_names = set()
+        group_names = set()
         for conf_node in as_list(conf['nodes']):
-            node = model.Node(conf_node['name'], conf_node['image'])
+            if conf_node['name'] in node_names:
+                raise DuplicateNodeError(conf['name'], conf_node['name'])
+            node = model.Node(conf_node['name'], conf_node['label'])
             ns.addNode(node)
-            node_names.append(conf_node['name'])
+            node_names.add(conf_node['name'])
         for conf_group in as_list(conf.get('groups', [])):
-            for node_name in conf_group['nodes']:
+            for node_name in as_list(conf_group['nodes']):
                 if node_name not in node_names:
                     raise NodeFromGroupNotFoundError(conf['name'], node_name,
                                                      conf_group['name'])
+            if conf_group['name'] in group_names:
+                raise DuplicateGroupError(conf['name'], conf_group['name'])
             group = model.Group(conf_group['name'], conf_group['nodes'])
             ns.addGroup(group)
+            group_names.add(conf_group['name'])
         return ns
 
 
@@ -260,7 +285,7 @@
                 }
 
         node = {vs.Required('name'): str,
-                vs.Required('image'): str,
+                vs.Required('label'): str,
                 }
 
         zuul_role = {vs.Required('zuul'): str,
@@ -302,6 +327,7 @@
                'dependencies': to_list(str),
                'allowed-projects': to_list(str),
                'override-branch': str,
+               'description': str,
                }
 
         return vs.Schema(job)
@@ -407,7 +433,7 @@
             else:
                 ns = model.NodeSet()
                 for conf_node in conf_nodes:
-                    node = model.Node(conf_node['name'], conf_node['image'])
+                    node = model.Node(conf_node['name'], conf_node['label'])
                     ns.addNode(node)
             job.nodeset = ns
 
@@ -859,8 +885,28 @@
 class TenantParser(object):
     log = logging.getLogger("zuul.TenantParser")
 
-    tenant_source = vs.Schema({'config-projects': [str],
-                               'untrusted-projects': [str]})
+    classes = vs.Any('pipeline', 'job', 'semaphore', 'project',
+                     'project-template', 'nodeset', 'secret')
+
+    project_dict = {str: {
+        'include': to_list(classes),
+        'exclude': to_list(classes),
+    }}
+
+    project = vs.Any(str, project_dict)
+
+    group = {
+        'include': to_list(classes),
+        'exclude': to_list(classes),
+        vs.Required('projects'): to_list(project),
+    }
+
+    project_or_group = vs.Any(project, group)
+
+    tenant_source = vs.Schema({
+        'config-projects': to_list(project_or_group),
+        'untrusted-projects': to_list(project_or_group),
+    })
 
     @staticmethod
     def validateTenantSources(connections):
@@ -960,24 +1006,84 @@
                 encryption.deserialize_rsa_keypair(f.read())
 
     @staticmethod
+    def _getProject(source, conf, current_include):
+        if isinstance(conf, six.string_types):
+            # Return a project object whether conf is a dict or a str
+            project = source.getProject(conf)
+            project_include = current_include
+        else:
+            project_name = list(conf.keys())[0]
+            project = source.getProject(project_name)
+
+            project_include = frozenset(
+                as_list(conf[project_name].get('include', [])))
+            if not project_include:
+                project_include = current_include
+            project_exclude = frozenset(
+                as_list(conf[project_name].get('exclude', [])))
+            if project_exclude:
+                project_include = frozenset(project_include - project_exclude)
+
+        project.load_classes = frozenset(project_include)
+        return project
+
+    @staticmethod
+    def _getProjects(source, conf, current_include):
+        # Return a project object whether conf is a dict or a str
+        projects = []
+        if isinstance(conf, six.string_types):
+            # A simple project name string
+            projects.append(TenantParser._getProject(
+                source, conf, current_include))
+        elif len(conf.keys()) > 1 and 'projects' in conf:
+            # This is a project group
+            if 'include' in conf:
+                current_include = set(as_list(conf['include']))
+            else:
+                current_include = current_include.copy()
+            if 'exclude' in conf:
+                exclude = set(as_list(conf['exclude']))
+                current_include = current_include - exclude
+            for project in conf['projects']:
+                sub_projects = TenantParser._getProjects(source, project,
+                                                         current_include)
+                projects.extend(sub_projects)
+        elif len(conf.keys()) == 1:
+            # A project with overrides
+            projects.append(TenantParser._getProject(
+                source, conf, current_include))
+        else:
+            raise Exception("Unable to parse project %s", conf)
+        return projects
+
+    @staticmethod
     def _loadTenantProjects(project_key_dir, connections, conf_tenant):
         config_projects = []
         untrusted_projects = []
 
+        default_include = frozenset(['pipeline', 'job', 'semaphore', 'project',
+                                     'secret', 'project-template', 'nodeset'])
+
         for source_name, conf_source in conf_tenant.get('source', {}).items():
             source = connections.getSource(source_name)
 
+            current_include = default_include
             for conf_repo in conf_source.get('config-projects', []):
-                project = source.getProject(conf_repo)
-                TenantParser._loadProjectKeys(
-                    project_key_dir, source_name, project)
-                config_projects.append(project)
+                projects = TenantParser._getProjects(source, conf_repo,
+                                                     current_include)
+                for project in projects:
+                    TenantParser._loadProjectKeys(
+                        project_key_dir, source_name, project)
+                    config_projects.append(project)
 
+            current_include = frozenset(default_include - set(['pipeline']))
             for conf_repo in conf_source.get('untrusted-projects', []):
-                project = source.getProject(conf_repo)
-                TenantParser._loadProjectKeys(
-                    project_key_dir, source_name, project)
-                untrusted_projects.append(project)
+                projects = TenantParser._getProjects(source, conf_repo,
+                                                     current_include)
+                for project in projects:
+                    TenantParser._loadProjectKeys(
+                        project_key_dir, source_name, project)
+                    untrusted_projects.append(project)
 
         return config_projects, untrusted_projects
 
@@ -1090,34 +1196,80 @@
         return config
 
     @staticmethod
-    def _parseLayout(base, tenant, data, scheduler, connections):
-        layout = model.Layout()
-
-        for config_pipeline in data.pipelines:
-            layout.addPipeline(PipelineParser.fromYaml(layout, connections,
-                                                       scheduler,
-                                                       config_pipeline))
+    def _parseLayoutItems(layout, tenant, data, scheduler, connections,
+                          skip_pipelines=False, skip_semaphores=False):
+        if not skip_pipelines:
+            for config_pipeline in data.pipelines:
+                classes = config_pipeline['_source_context'].\
+                    project.load_classes
+                if 'pipeline' not in classes:
+                    continue
+                layout.addPipeline(PipelineParser.fromYaml(
+                    layout, connections,
+                    scheduler, config_pipeline))
 
         for config_nodeset in data.nodesets:
-            layout.addNodeSet(NodeSetParser.fromYaml(layout, config_nodeset))
+            classes = config_nodeset['_source_context'].project.load_classes
+            if 'nodeset' not in classes:
+                continue
+            with configuration_exceptions('nodeset', config_nodeset):
+                layout.addNodeSet(NodeSetParser.fromYaml(
+                    layout, config_nodeset))
 
         for config_secret in data.secrets:
+            classes = config_secret['_source_context'].project.load_classes
+            if 'secret' not in classes:
+                continue
             layout.addSecret(SecretParser.fromYaml(layout, config_secret))
 
         for config_job in data.jobs:
+            classes = config_job['_source_context'].project.load_classes
+            if 'job' not in classes:
+                continue
             with configuration_exceptions('job', config_job):
-                layout.addJob(JobParser.fromYaml(tenant, layout, config_job))
+                job = JobParser.fromYaml(tenant, layout, config_job)
+                layout.addJob(job)
 
-        for config_semaphore in data.semaphores:
-            layout.addSemaphore(SemaphoreParser.fromYaml(config_semaphore))
+        if not skip_semaphores:
+            for config_semaphore in data.semaphores:
+                classes = config_semaphore['_source_context'].\
+                    project.load_classes
+                if 'semaphore' not in classes:
+                    continue
+                layout.addSemaphore(SemaphoreParser.fromYaml(config_semaphore))
 
         for config_template in data.project_templates:
+            classes = config_template['_source_context'].project.load_classes
+            if 'project-template' not in classes:
+                continue
             layout.addProjectTemplate(ProjectTemplateParser.fromYaml(
                 tenant, layout, config_template))
 
-        for config_project in data.projects.values():
+        for config_projects in data.projects.values():
+            # Unlike other config classes, we expect multiple project
+            # stanzas with the same name, so that a config repo can
+            # define a project-pipeline and the project itself can
+            # augment it.  To that end, config_project is a list of
+            # each of the project stanzas.  Each one may be (should
+            # be!) from a different repo, so filter them according to
+            # the include/exclude rules before parsing them.
+            filtered_projects = [
+                p for p in config_projects if
+                'project' in p['_source_context'].project.load_classes
+            ]
+
+            if not filtered_projects:
+                continue
+
             layout.addProjectConfig(ProjectParser.fromYaml(
-                tenant, layout, config_project))
+                tenant, layout, filtered_projects))
+
+    @staticmethod
+    def _parseLayout(base, tenant, data, scheduler, connections):
+        layout = model.Layout()
+
+        TenantParser._parseLayoutItems(layout, tenant, data,
+                                       scheduler, connections)
 
         layout.tenant = tenant
 
@@ -1228,21 +1380,8 @@
         # configuration changes.
         layout.semaphores = tenant.layout.semaphores
 
-        for config_nodeset in config.nodesets:
-            layout.addNodeSet(NodeSetParser.fromYaml(layout, config_nodeset))
+        TenantParser._parseLayoutItems(layout, tenant, config, None, None,
+                                       skip_pipelines=True,
+                                       skip_semaphores=True)
 
-        for config_secret in config.secrets:
-            layout.addSecret(SecretParser.fromYaml(layout, config_secret))
-
-        for config_job in config.jobs:
-            with configuration_exceptions('job', config_job):
-                layout.addJob(JobParser.fromYaml(tenant, layout, config_job))
-
-        for config_template in config.project_templates:
-            layout.addProjectTemplate(ProjectTemplateParser.fromYaml(
-                tenant, layout, config_template))
-
-        for config_project in config.projects.values():
-            layout.addProjectConfig(ProjectParser.fromYaml(
-                tenant, layout, config_project))
         return layout
diff --git a/zuul/driver/bubblewrap/__init__.py b/zuul/driver/bubblewrap/__init__.py
index c93e912..7c9d48d 100644
--- a/zuul/driver/bubblewrap/__init__.py
+++ b/zuul/driver/bubblewrap/__init__.py
@@ -152,6 +152,8 @@
 
 
 def main(args=None):
+    logging.basicConfig(level=logging.DEBUG)
+
     driver = BubblewrapDriver()
 
     parser = argparse.ArgumentParser()
diff --git a/zuul/driver/github/githubconnection.py b/zuul/driver/github/githubconnection.py
index e294b15..f9f1c27 100644
--- a/zuul/driver/github/githubconnection.py
+++ b/zuul/driver/github/githubconnection.py
@@ -253,7 +253,7 @@
             raise webob.exc.HTTPUnauthorized(
                 'Please specify a X-Hub-Signature header with secret.')
 
-        payload_signature = 'sha1=' + hmac.new(secret,
+        payload_signature = 'sha1=' + hmac.new(secret.encode('utf-8'),
                                                body,
                                                hashlib.sha1).hexdigest()
 
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index 52074a1..6ecb27c 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -119,9 +119,21 @@
             port = config.get('gearman', 'port')
         else:
             port = 4730
+        if self.config.has_option('gearman', 'ssl_key'):
+            ssl_key = self.config.get('gearman', 'ssl_key')
+        else:
+            ssl_key = None
+        if self.config.has_option('gearman', 'ssl_cert'):
+            ssl_cert = self.config.get('gearman', 'ssl_cert')
+        else:
+            ssl_cert = None
+        if self.config.has_option('gearman', 'ssl_ca'):
+            ssl_ca = self.config.get('gearman', 'ssl_ca')
+        else:
+            ssl_ca = None
 
         self.gearman = ZuulGearmanClient(self)
-        self.gearman.addServer(server, port)
+        self.gearman.addServer(server, port, ssl_key, ssl_cert, ssl_ca)
 
         self.cleanup_thread = GearmanCleanup(self)
         self.cleanup_thread.start()
@@ -279,7 +291,7 @@
         nodeset = item.current_build_set.getJobNodeSet(job.name)
         nodes = []
         for node in nodeset.getNodes():
-            nodes.append(dict(name=node.name, image=node.image,
+            nodes.append(dict(name=node.name, label=node.label,
                               az=node.az,
                               host_keys=node.host_keys,
                               provider=node.provider,
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index f44fd50..bf7c0fd 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -363,6 +363,7 @@
         self.hostname = socket.gethostname()
         self.zuul_url = config.get('merger', 'zuul_url')
         self.merger_lock = threading.Lock()
+        self.verbose = False
         self.command_map = dict(
             stop=self.stop,
             pause=self.pause,
@@ -458,10 +459,22 @@
             port = self.config.get('gearman', 'port')
         else:
             port = 4730
+        if self.config.has_option('gearman', 'ssl_key'):
+            ssl_key = self.config.get('gearman', 'ssl_key')
+        else:
+            ssl_key = None
+        if self.config.has_option('gearman', 'ssl_cert'):
+            ssl_cert = self.config.get('gearman', 'ssl_cert')
+        else:
+            ssl_cert = None
+        if self.config.has_option('gearman', 'ssl_ca'):
+            ssl_ca = self.config.get('gearman', 'ssl_ca')
+        else:
+            ssl_ca = None
         self.merger_worker = ExecutorMergeWorker(self, 'Zuul Executor Merger')
-        self.merger_worker.addServer(server, port)
+        self.merger_worker.addServer(server, port, ssl_key, ssl_cert, ssl_ca)
         self.executor_worker = gear.TextWorker('Zuul Executor Server')
-        self.executor_worker.addServer(server, port)
+        self.executor_worker.addServer(server, port, ssl_key, ssl_cert, ssl_ca)
         self.log.debug("Waiting for server")
         self.merger_worker.waitForServer()
         self.executor_worker.waitForServer()
@@ -522,12 +535,10 @@
         pass
 
     def verboseOn(self):
-        # TODOv3: implement
-        pass
+        self.verbose = True
 
     def verboseOff(self):
-        # TODOv3: implement
-        pass
+        self.verbose = False
 
     def join(self):
         self.update_thread.join()
@@ -904,9 +915,10 @@
             host_vars = dict(
                 ansible_host=ip,
                 ansible_user=self.executor_server.default_username,
-                nodepool_az=node.get('az'),
-                nodepool_provider=node.get('provider'),
-                nodepool_region=node.get('region'))
+                nodepool=dict(
+                    az=node.get('az'),
+                    provider=node.get('provider'),
+                    region=node.get('region')))
 
             host_keys = []
             for key in node.get('host_keys'):
@@ -1308,7 +1320,7 @@
         env_copy = os.environ.copy()
         env_copy['LOGNAME'] = 'zuul'
 
-        if False:  # TODOv3: self.options['verbose']:
+        if self.executor_server.verbose:
             verbose = '-vvv'
         else:
             verbose = '-v'
diff --git a/zuul/lib/log_streamer.py b/zuul/lib/log_streamer.py
index e00a89b..6695723 100644
--- a/zuul/lib/log_streamer.py
+++ b/zuul/lib/log_streamer.py
@@ -95,7 +95,7 @@
             return
 
         # check if log file exists
-        log_file = os.path.join(job_dir, 'ansible', 'job-output.txt')
+        log_file = os.path.join(job_dir, 'work', 'logs', 'job-output.txt')
         if not os.path.exists(log_file):
             msg = 'Log not found for build ID %s' % build_uuid
             self.request.sendall(msg.encode("utf-8"))
@@ -144,6 +144,11 @@
                 else:
                     break
 
+            # See if the file has been removed, meaning we should stop
+            # streaming it.
+            if not os.path.exists(log.path):
+                return False
+
             # At this point, we are waiting for more data to be written
             time.sleep(0.5)
 
@@ -159,16 +164,6 @@
                 if not ret:
                     return False
 
-            # See if the file has been truncated
-            try:
-                st = os.stat(log.path)
-                if (st.st_ino != log.stat.st_ino or
-                    st.st_size < log.size):
-                    return True
-            except Exception:
-                return True
-            log.size = st.st_size
-
 
 class CustomForkingTCPServer(ss.ForkingTCPServer):
     '''
diff --git a/zuul/merger/client.py b/zuul/merger/client.py
index c98f20e..4054df6 100644
--- a/zuul/merger/client.py
+++ b/zuul/merger/client.py
@@ -79,9 +79,21 @@
             port = self.config.get('gearman', 'port')
         else:
             port = 4730
+        if self.config.has_option('gearman', 'ssl_key'):
+            ssl_key = self.config.get('gearman', 'ssl_key')
+        else:
+            ssl_key = None
+        if self.config.has_option('gearman', 'ssl_cert'):
+            ssl_cert = self.config.get('gearman', 'ssl_cert')
+        else:
+            ssl_cert = None
+        if self.config.has_option('gearman', 'ssl_ca'):
+            ssl_ca = self.config.get('gearman', 'ssl_ca')
+        else:
+            ssl_ca = None
         self.log.debug("Connecting to gearman at %s:%s" % (server, port))
         self.gearman = MergeGearmanClient(self)
-        self.gearman.addServer(server, port)
+        self.gearman.addServer(server, port, ssl_key, ssl_cert, ssl_ca)
         self.log.debug("Waiting for gearman")
         self.gearman.waitForServer()
         self.jobs = set()
diff --git a/zuul/merger/merger.py b/zuul/merger/merger.py
index 6cfd904..2ac0de8 100644
--- a/zuul/merger/merger.py
+++ b/zuul/merger/merger.py
@@ -42,12 +42,16 @@
 
 
 class Repo(object):
-    def __init__(self, remote, local, email, username,
+    def __init__(self, remote, local, email, username, sshkey=None,
                  cache_path=None, logger=None):
         if logger is None:
             self.log = logging.getLogger("zuul.Repo")
         else:
             self.log = logger
+        if sshkey:
+            self.env = {'GIT_SSH_COMMAND': 'ssh -i %s' % (sshkey,)}
+        else:
+            self.env = {}
         self.remote_url = remote
         self.local_path = local
         self.email = email
@@ -69,11 +73,14 @@
             self.log.debug("Cloning from %s to %s" % (self.remote_url,
                                                       self.local_path))
             if self.cache_path:
-                git.Repo.clone_from(self.cache_path, self.local_path)
+                git.Repo.clone_from(self.cache_path, self.local_path,
+                                    env=self.env)
                 rewrite_url = True
             else:
-                git.Repo.clone_from(self.remote_url, self.local_path)
+                git.Repo.clone_from(self.remote_url, self.local_path,
+                                    env=self.env)
         repo = git.Repo(self.local_path)
+        repo.git.update_environment(**self.env)
         # Create local branches corresponding to all the remote branches
         if not repo_is_cloned:
             origin = repo.remotes.origin
@@ -98,6 +105,7 @@
     def createRepoObject(self):
         self._ensure_cloned()
         repo = git.Repo(self.local_path)
+        repo.git.update_environment(**self.env)
         return repo
 
     def reset(self):
@@ -282,23 +290,7 @@
         self.username = username
         self.cache_root = cache_root
 
-    def _get_ssh_cmd(self, connection_name):
-        sshkey = self.connections.connections.get(connection_name).\
-            connection_config.get('sshkey')
-        if sshkey:
-            return 'ssh -i %s' % sshkey
-        else:
-            return None
-
-    def _setGitSsh(self, connection_name):
-        wrapper_name = '.ssh_wrapper_%s' % connection_name
-        name = os.path.join(self.working_root, wrapper_name)
-        if os.path.isfile(name):
-            os.environ['GIT_SSH'] = name
-        elif 'GIT_SSH' in os.environ:
-            del os.environ['GIT_SSH']
-
-    def _addProject(self, hostname, project_name, url):
+    def _addProject(self, hostname, project_name, url, sshkey):
         repo = None
         key = '/'.join([hostname, project_name])
         try:
@@ -308,8 +300,8 @@
                                           project_name)
             else:
                 cache_path = None
-            repo = Repo(url, path, self.email, self.username, cache_path,
-                        self.logger)
+            repo = Repo(url, path, self.email, self.username,
+                        sshkey, cache_path, self.logger)
 
             self.repos[key] = repo
         except Exception:
@@ -325,11 +317,13 @@
         key = '/'.join([hostname, project_name])
         if key in self.repos:
             return self.repos[key]
+        sshkey = self.connections.connections.get(connection_name).\
+            connection_config.get('sshkey')
         if not url:
             raise Exception("Unable to set up repo for project %s/%s"
                             " without a url" %
                             (connection_name, project_name,))
-        return self._addProject(hostname, project_name, url)
+        return self._addProject(hostname, project_name, url, sshkey)
 
     def updateRepo(self, connection_name, project_name):
         # TODOv3(jhesketh): Reimplement
@@ -437,28 +431,26 @@
         else:
             self.log.debug("Found base commit %s for %s" % (base, key,))
         # Merge the change
-        with repo.createRepoObject().git.custom_environment(
-            GIT_SSH_COMMAND=self._get_ssh_cmd(item['connection'])):
-            commit = self._mergeChange(item, base)
-            if not commit:
+        commit = self._mergeChange(item, base)
+        if not commit:
+            return None
+        # Store this commit as the most recent for this project-branch
+        recent[key] = commit
+        # Set the Zuul ref for this item to point to the most recent
+        # commits of each project-branch
+        for key, mrc in recent.items():
+            connection, project, branch = key
+            zuul_ref = None
+            try:
+                repo = self.getRepo(connection, project)
+                zuul_ref = branch + '/' + item['ref']
+                if not repo.getCommitFromRef(zuul_ref):
+                    repo.createZuulRef(zuul_ref, mrc)
+            except Exception:
+                self.log.exception("Unable to set zuul ref %s for "
+                                   "item %s" % (zuul_ref, item))
                 return None
-            # Store this commit as the most recent for this project-branch
-            recent[key] = commit
-            # Set the Zuul ref for this item to point to the most recent
-            # commits of each project-branch
-            for key, mrc in recent.items():
-                connection, project, branch = key
-                zuul_ref = None
-                try:
-                    repo = self.getRepo(connection, project)
-                    zuul_ref = branch + '/' + item['ref']
-                    if not repo.getCommitFromRef(zuul_ref):
-                        repo.createZuulRef(zuul_ref, mrc)
-                except Exception:
-                    self.log.exception("Unable to set zuul ref %s for "
-                                       "item %s" % (zuul_ref, item))
-                    return None
-            return commit
+        return commit
 
     def mergeChanges(self, items, files=None, repo_state=None):
         # connection+project+branch -> commit
diff --git a/zuul/merger/server.py b/zuul/merger/server.py
index 1a32f96..7d7e771 100644
--- a/zuul/merger/server.py
+++ b/zuul/merger/server.py
@@ -54,8 +54,20 @@
             port = self.config.get('gearman', 'port')
         else:
             port = 4730
+        if self.config.has_option('gearman', 'ssl_key'):
+            ssl_key = self.config.get('gearman', 'ssl_key')
+        else:
+            ssl_key = None
+        if self.config.has_option('gearman', 'ssl_cert'):
+            ssl_cert = self.config.get('gearman', 'ssl_cert')
+        else:
+            ssl_cert = None
+        if self.config.has_option('gearman', 'ssl_ca'):
+            ssl_ca = self.config.get('gearman', 'ssl_ca')
+        else:
+            ssl_ca = None
         self.worker = gear.TextWorker('Zuul Merger')
-        self.worker.addServer(server, port)
+        self.worker.addServer(server, port, ssl_key, ssl_cert, ssl_ca)
         self.log.debug("Waiting for server")
         self.worker.waitForServer()
         self.log.debug("Registering")
diff --git a/zuul/model.py b/zuul/model.py
index e504dca..5eedc75 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -336,6 +336,9 @@
         self.foreign = foreign
         self.unparsed_config = None
         self.unparsed_branch_config = {}  # branch -> UnparsedTenantConfig
+        # Configuration object classes to include or exclude when
+        # loading zuul config files.
+        self.load_classes = frozenset()
 
     def __str__(self):
         return self.name
@@ -351,9 +354,9 @@
     provided by Nodepool.
     """
 
-    def __init__(self, name, image):
+    def __init__(self, name, label):
         self.name = name
-        self.image = image
+        self.label = label
         self.id = None
         self.lock = None
         # Attributes from Nodepool
@@ -380,7 +383,7 @@
         self.state_time = time.time()
 
     def __repr__(self):
-        return '<Node %s %s:%s>' % (self.id, self.name, self.image)
+        return '<Node %s %s:%s>' % (self.id, self.name, self.label)
 
     def __ne__(self, other):
         return not self.__eq__(other)
@@ -389,7 +392,7 @@
         if not isinstance(other, Node):
             return False
         return (self.name == other.name and
-                self.image == other.image and
+                self.label == other.label and
                 self.id == other.id)
 
     def toDict(self):
@@ -468,7 +471,7 @@
     def copy(self):
         n = NodeSet(self.name)
         for name, node in self.nodes.items():
-            n.addNode(Node(node.name, node.image))
+            n.addNode(Node(node.name, node.label))
         for name, group in self.groups.items():
             n.addGroup(Group(group.name, group.nodes[:]))
         return n
@@ -535,7 +538,7 @@
 
     def toDict(self):
         d = {}
-        nodes = [n.image for n in self.nodeset.getNodes()]
+        nodes = [n.label for n in self.nodeset.getNodes()]
         d['node_types'] = nodes
         d['requestor'] = self.requestor
         d['state'] = self.state
diff --git a/zuul/rpcclient.py b/zuul/rpcclient.py
index d980992..6f0d34b 100644
--- a/zuul/rpcclient.py
+++ b/zuul/rpcclient.py
@@ -26,10 +26,10 @@
 class RPCClient(object):
     log = logging.getLogger("zuul.RPCClient")
 
-    def __init__(self, server, port):
+    def __init__(self, server, port, ssl_key=None, ssl_cert=None, ssl_ca=None):
         self.log.debug("Connecting to gearman at %s:%s" % (server, port))
         self.gearman = gear.Client()
-        self.gearman.addServer(server, port)
+        self.gearman.addServer(server, port, ssl_key, ssl_cert, ssl_ca)
         self.log.debug("Waiting for gearman")
         self.gearman.waitForServer()
 
diff --git a/zuul/rpclistener.py b/zuul/rpclistener.py
index 6508e84..0079ab8 100644
--- a/zuul/rpclistener.py
+++ b/zuul/rpclistener.py
@@ -38,9 +38,23 @@
             port = self.config.get('gearman', 'port')
         else:
             port = 4730
+        if self.config.has_option('gearman', 'ssl_key'):
+            ssl_key = self.config.get('gearman', 'ssl_key')
+        else:
+            ssl_key = None
+        if self.config.has_option('gearman', 'ssl_cert'):
+            ssl_cert = self.config.get('gearman', 'ssl_cert')
+        else:
+            ssl_cert = None
+        if self.config.has_option('gearman', 'ssl_ca'):
+            ssl_ca = self.config.get('gearman', 'ssl_ca')
+        else:
+            ssl_ca = None
         self.worker = gear.TextWorker('Zuul RPC Listener')
-        self.worker.addServer(server, port)
+        self.worker.addServer(server, port, ssl_key, ssl_cert, ssl_ca)
+        self.log.debug("Waiting for server")
         self.worker.waitForServer()
+        self.log.debug("Registering")
         self.register()
         self.thread = threading.Thread(target=self.run)
         self.thread.daemon = True