Merge "Fail early if people attempt to add zuul vars or secrets" into feature/zuulv3
diff --git a/doc/source/admin/components.rst b/doc/source/admin/components.rst
index 2c70d47..fbb8cbc 100644
--- a/doc/source/admin/components.rst
+++ b/doc/source/admin/components.rst
@@ -149,6 +149,11 @@
 
       IP address or domain name on which to listen.
 
+   .. attr:: port
+      :default: 4730
+
+      TCP port on which to listen.
+
    .. attr:: log_config
 
       Path to log config file for internal Gearman server.
diff --git a/doc/source/user/config.rst b/doc/source/user/config.rst
index cad2167..973470d 100644
--- a/doc/source/user/config.rst
+++ b/doc/source/user/config.rst
@@ -755,7 +755,7 @@
 
    .. attr:: timeout
 
-      The time in minutes that the job should be allowed to run before
+      The time in seconds that the job should be allowed to run before
       it is automatically aborted and failure is reported.  If no
       timeout is supplied, the job may run indefinitely.  Supplying a
       timeout is highly recommended.
diff --git a/doc/source/user/jobs.rst b/doc/source/user/jobs.rst
index 4e1880a..4c8b784 100644
--- a/doc/source/user/jobs.rst
+++ b/doc/source/user/jobs.rst
@@ -207,10 +207,42 @@
 
       .. var:: src_dir
 
-         The path to the source code on the remote host, relative
-         to the home dir of the remote user.
-         E.g., `src/git.example.com/org/project`.
+         The path to the source code relative to the work dir.  E.g.,
+         `src/git.example.com/org/project`.
 
+   .. var:: projects
+      :type: list
+
+      A list of all projects prepared by Zuul for the item.  It
+      includes, at least, the item's own project.  It also includes
+      the projects of any items this item depends on, as well as the
+      projects that appear in :attr:`job.required-projects`.
+
+      This is a list of dictionaries, with each element consisting of:
+
+      .. var:: name
+
+         The name of the project, excluding hostname.  E.g., `org/project`.
+
+      .. var:: short_name
+
+         The name of the project, excluding directories or
+         organizations.  E.g., `project`.
+
+      .. var:: canonical_hostname
+
+         The canonical hostname where the project lives.  E.g.,
+         `git.example.com`.
+
+      .. var:: canonical_name
+
+         The full canonical name of the project including hostname.
+         E.g., `git.example.com/org/project`.
+
+      .. var:: src_dir
+
+         The path to the source code, relative to the work dir.  E.g.,
+         `src/git.example.com/org/project`.
 
    .. var:: tenant
 
diff --git a/etc/zuul.conf-sample b/etc/zuul.conf-sample
index 6e79f9b..ba7aace 100644
--- a/etc/zuul.conf-sample
+++ b/etc/zuul.conf-sample
@@ -1,5 +1,6 @@
 [gearman]
 server=127.0.0.1
+;port=4730
 ;ssl_ca=/path/to/ca.pem
 ;ssl_cert=/path/to/client.pem
 ;ssl_key=/path/to/client.key
@@ -12,6 +13,7 @@
 ;ssl_ca=/path/to/ca.pem
 ;ssl_cert=/path/to/server.pem
 ;ssl_key=/path/to/server.key
+;port=4730
 
 [scheduler]
 tenant_config=/etc/zuul/main.yaml
diff --git a/requirements.txt b/requirements.txt
index 69509d0..eadb508 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -16,7 +16,7 @@
 apscheduler>=3.0
 PrettyTable>=0.6,<0.8
 babel>=1.0
-ansible>=2.3.0.0
+ansible>=2.3.0.0,<2.4
 kazoo
 sqlalchemy
 alembic
diff --git a/tests/base.py b/tests/base.py
index 357dd7a..c49e1ce 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -967,6 +967,7 @@
         data = {
             'state': state,
             'sha': self.head_sha,
+            'name': self.project,
             'description': 'Test results for %s: %s' % (self.head_sha, state),
             'target_url': 'http://zuul/%s' % self.head_sha,
             'branches': [],
@@ -1088,8 +1089,9 @@
         }
         return data
 
-    def getPullBySha(self, sha):
-        prs = list(set([p for p in self.pull_requests if sha == p.head_sha]))
+    def getPullBySha(self, sha, project):
+        prs = list(set([p for p in self.pull_requests if
+                        sha == p.head_sha and project == p.project]))
         if len(prs) > 1:
             raise Exception('Multiple pulls found with head sha: %s' % sha)
         pr = prs[0]
diff --git a/zuul/ansible/callback/zuul_json.py b/zuul/ansible/callback/zuul_json.py
index 612a720..222302c 100644
--- a/zuul/ansible/callback/zuul_json.py
+++ b/zuul/ansible/callback/zuul_json.py
@@ -33,6 +33,8 @@
     # It's here in 2.3
     from ansible.vars.manager import strip_internal_keys
 
+from zuul.ansible import logconfig
+
 
 class CallbackModule(CallbackBase):
     CALLBACK_VERSION = 2.0
@@ -45,8 +47,12 @@
         self.results = []
         self.output = []
         self.playbook = {}
+        logging_config = logconfig.load_job_config(
+            os.environ['ZUUL_JOB_LOG_CONFIG'])
+
         self.output_path = os.path.splitext(
-            os.environ['ZUUL_JOB_OUTPUT_FILE'])[0] + '.json'
+            logging_config.job_output_file)[0] + '.json'
+
         # For now, just read in the old file and write it all out again
         # This may well not scale from a memory perspective- but let's see how
         # it goes.
diff --git a/zuul/ansible/callback/zuul_stream.py b/zuul/ansible/callback/zuul_stream.py
index 4cfd19e..19b1192 100644
--- a/zuul/ansible/callback/zuul_stream.py
+++ b/zuul/ansible/callback/zuul_stream.py
@@ -21,6 +21,7 @@
 
 import datetime
 import logging
+import logging.config
 import json
 import os
 import socket
@@ -30,6 +31,8 @@
 
 from ansible.plugins.callback import default
 
+from zuul.ansible import logconfig
+
 LOG_STREAM_PORT = 19885
 
 
@@ -108,14 +111,13 @@
         # ansible appends timestamp, user and pid to the log lines emitted
         # to the log file. We're doing other things though, so we don't want
         # this.
-        path = os.environ['ZUUL_JOB_OUTPUT_FILE']
+        logging_config = logconfig.load_job_config(
+            os.environ['ZUUL_JOB_LOG_CONFIG'])
+
         if self._display.verbosity > 2:
-            level = logging.DEBUG
-        else:
-            level = logging.INFO
-        logging.basicConfig(filename=path, level=level, format='%(message)s')
-        # Squelch logging from ara so we don't get the initializing message
-        logging.getLogger('ara').setLevel(logging.ERROR)
+            logging_config.setDebug()
+
+        logging_config.apply()
 
         self._logger = logging.getLogger('zuul.executor.ansible')
 
diff --git a/zuul/ansible/logconfig.py b/zuul/ansible/logconfig.py
new file mode 100644
index 0000000..b37912d
--- /dev/null
+++ b/zuul/ansible/logconfig.py
@@ -0,0 +1,237 @@
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import abc
+import logging.config
+import json
+import os
+
+import yaml
+
+_DEFAULT_JOB_LOGGING_CONFIG = {
+    'version': 1,
+    'formatters': {
+        'plain': {'format': '%(message)s'},
+        'result': {'format': 'RESULT %(message)s'},
+    },
+    'handlers': {
+        'console': {
+            'class': 'logging.StreamHandler',
+            'stream': 'ext://sys.stdout',
+            'level': 'INFO',
+            'formatter': 'plain',
+        },
+        'result': {
+            # result is returned to subprocess stdout and is used to pass
+            # control information from the callback back to the executor
+            'class': 'logging.StreamHandler',
+            'stream': 'ext://sys.stdout',
+            'level': 'INFO',
+            'formatter': 'result',
+        },
+        'jobfile': {
+            # used by executor to emit log file
+            'class': 'logging.FileHandler',
+            'level': 'INFO',
+            'formatter': 'plain',
+        },
+    },
+    'loggers': {
+        'zuul.executor.ansible.result': {
+            'handlers': ['result'],
+            'level': 'INFO',
+        },
+        'zuul.executor.ansible': {
+            'handlers': ['jobfile'],
+            'level': 'INFO',
+        },
+        'sqlalchemy.engine': {
+            'handlers': ['console'],
+            'level': 'WARN',
+        },
+        'alembic': {
+            'handlers': ['console'],
+            'level': 'WARN',
+        },
+    },
+    'root': {'handlers': []},
+}
+
+_DEFAULT_SERVER_LOGGING_CONFIG = {
+    'version': 1,
+    'formatters': {
+        'simple': {
+            'format': '%(asctime)s %(levelname)s %(name)s: %(message)s'
+        },
+    },
+    'handlers': {
+        'console': {
+            # Used for printing to stdout
+            'class': 'logging.StreamHandler',
+            'stream': 'ext://sys.stdout',
+            'level': 'INFO',
+            'formatter': 'simple',
+        },
+    },
+    'loggers': {
+        'zuul': {
+            'handlers': ['console'],
+            'level': 'INFO',
+        },
+        'sqlalchemy.engine': {
+            'handlers': ['console'],
+            'level': 'WARN',
+        },
+        'gerrit': {
+            'handlers': ['console'],
+            'level': 'INFO',
+        },
+        'gear': {
+            'handlers': ['console'],
+            'level': 'WARN',
+        },
+        'alembic': {
+            'handlers': ['console'],
+            'level': 'WARN',
+        },
+    },
+    'root': {'handlers': []},
+}
+
+_DEFAULT_SERVER_FILE_HANDLERS = {
+    'normal': {
+        # Used for writing normal log files
+        'class': 'logging.handlers.WatchedFileHandler',
+        # This will get set to something real by DictLoggingConfig.server
+        'filename': '/var/log/zuul/{server}.log',
+        'level': 'INFO',
+        'formatter': 'simple',
+    },
+}
+
+
+def _read_config_file(filename: str):
+    if not os.path.exists(filename):
+        raise ValueError("Unable to read logging config file at %s" % filename)
+
+    if os.path.splitext(filename)[1] in ('.yml', '.yaml', '.json'):
+        return yaml.safe_load(open(filename, 'r'))
+    return filename
+
+
+def load_config(filename: str):
+    config = _read_config_file(filename)
+    if isinstance(config, dict):
+        return DictLoggingConfig(config)
+    return FileLoggingConfig(filename)
+
+
+def load_job_config(filename: str):
+    return JobLoggingConfig(_read_config_file(filename))
+
+
+class LoggingConfig(object, metaclass=abc.ABCMeta):
+
+    @abc.abstractmethod
+    def apply(self):
+        """Apply the config information to the current logging config."""
+
+
+class DictLoggingConfig(LoggingConfig, metaclass=abc.ABCMeta):
+
+    def __init__(self, config):
+        self._config = config
+
+    def apply(self):
+        logging.config.dictConfig(self._config)
+
+    def writeJson(self, filename: str):
+        open(filename, 'w').write(json.dumps(self._config, indent=2))
+
+
+class JobLoggingConfig(DictLoggingConfig):
+
+    def __init__(self, config=None, job_output_file=None):
+        if not config:
+            config = _DEFAULT_JOB_LOGGING_CONFIG.copy()
+        super(JobLoggingConfig, self).__init__(config=config)
+        if job_output_file:
+            self.job_output_file = job_output_file
+
+    def setDebug(self):
+        # Set the level for zuul.executor.ansible to DEBUG
+        self._config['loggers']['zuul.executor.ansible']['level'] = 'DEBUG'
+
+    @property
+    def job_output_file(self) -> str:
+        return self._config['handlers']['jobfile']['filename']
+
+    @job_output_file.setter
+    def job_output_file(self, filename: str):
+        self._config['handlers']['jobfile']['filename'] = filename
+
+
+class ServerLoggingConfig(DictLoggingConfig):
+
+    def __init__(self, config=None, server=None):
+        if not config:
+            config = _DEFAULT_SERVER_LOGGING_CONFIG.copy()
+        super(ServerLoggingConfig, self).__init__(config=config)
+        if server:
+            self.server = server
+
+    @property
+    def server(self):
+        return self._server
+
+    @server.setter
+    def server(self, server):
+        self._server = server
+        # Add the normal file handler. It's not included in the default
+        # config above because we're templating out the filename. Also, we
+        # only want to add the handler if we're actually going to use it.
+        for name, handler in _DEFAULT_SERVER_FILE_HANDLERS.items():
+            server_handler = handler.copy()
+            server_handler['filename'] = server_handler['filename'].format(
+                server=server)
+            self._config['handlers'][name] = server_handler
+        # Change everything configured to write to stdout to write to
+        # log files instead.
+        for logger in self._config['loggers'].values():
+            if logger['handlers'] == ['console']:
+                logger['handlers'] = ['normal']
+
+    def setDebug(self):
+        # Change level from INFO to DEBUG
+        for section in ('handlers', 'loggers'):
+            for handler in self._config[section].values():
+                if handler.get('level') == 'INFO':
+                    handler['level'] = 'DEBUG'
+
+
+class FileLoggingConfig(LoggingConfig):
+
+    def __init__(self, filename):
+        self._filename = filename
+
+    def apply(self):
+        logging.config.fileConfig(self._filename)
+
+
+if __name__ == '__main__':
+    # Use this to emit a working logging output for testing zuul_stream
+    # locally.
+    config = JobLoggingConfig(
+        job_output_file=os.path.abspath('job-output.txt'))
+    config.writeJson('logging.json')
diff --git a/zuul/cmd/__init__.py b/zuul/cmd/__init__.py
index 8610114..a5db9a6 100755
--- a/zuul/cmd/__init__.py
+++ b/zuul/cmd/__init__.py
@@ -26,8 +26,8 @@
 
 yappi = extras.try_import('yappi')
 
+from zuul.ansible import logconfig
 import zuul.lib.connections
-from zuul.lib import yamlutil as yaml
 
 # Do not import modules that will pull in paramiko which must not be
 # imported until after the daemonization.
@@ -84,19 +84,19 @@
     def setup_logging(self, section, parameter):
         if self.config.has_option(section, parameter):
             fp = os.path.expanduser(self.config.get(section, parameter))
-            if not os.path.exists(fp):
-                raise Exception("Unable to read logging config file at %s" %
-                                fp)
-
-            if os.path.splitext(fp)[1] in ('.yml', '.yaml'):
-                with open(fp, 'r') as f:
-                    logging.config.dictConfig(yaml.safe_load(f))
-
-            else:
-                logging.config.fileConfig(fp)
-
+            logging_config = logconfig.load_config(fp)
         else:
-            logging.basicConfig(level=logging.DEBUG)
+            # If someone runs in the foreground and doesn't give a logging
+            # config, leave the config set to emit to stdout.
+            if hasattr(self.args, 'nodaemon') and self.args.nodaemon:
+                logging_config = logconfig.ServerLoggingConfig()
+                logging_config.setDebug()
+            else:
+                # Setting a server value updates the defaults to use
+                # WatchedFileHandler on /var/log/zuul/{server}-debug.log
+                # and /var/log/zuul/{server}.log
+                logging_config = logconfig.ServerLoggingConfig(server=section)
+        logging_config.apply()
 
     def configure_connections(self, source_only=False):
         self.connections = zuul.lib.connections.ConnectionRegistry()
diff --git a/zuul/cmd/scheduler.py b/zuul/cmd/scheduler.py
index b7b12fe..a9923c6 100755
--- a/zuul/cmd/scheduler.py
+++ b/zuul/cmd/scheduler.py
@@ -100,10 +100,12 @@
             statsd_host = os.environ.get('STATSD_HOST')
             statsd_port = int(os.environ.get('STATSD_PORT', 8125))
             host = get_default(self.config, 'gearman_server', 'listen_address')
+            port = int(get_default(self.config, 'gearman_server', 'port',
+                                   4730))
             ssl_key = get_default(self.config, 'gearman_server', 'ssl_key')
             ssl_cert = get_default(self.config, 'gearman_server', 'ssl_cert')
             ssl_ca = get_default(self.config, 'gearman_server', 'ssl_ca')
-            zuul.lib.gearserver.GearServer(4730,
+            zuul.lib.gearserver.GearServer(port,
                                            ssl_key=ssl_key,
                                            ssl_cert=ssl_cert,
                                            ssl_ca=ssl_ca,
diff --git a/zuul/configloader.py b/zuul/configloader.py
index f175bab..57fb7e2 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -1232,6 +1232,14 @@
                                  untrusted_projects, cached, tenant):
         config_projects_config = model.UnparsedTenantConfig()
         untrusted_projects_config = model.UnparsedTenantConfig()
+        # project -> config; these will replace
+        # project.unparsed_config if this method succesfully
+        # completes
+        new_project_unparsed_config = {}
+        # project -> branch -> config; these will replace
+        # project.unparsed_branch_config if this method succesfully
+        # completes
+        new_project_unparsed_branch_config = {}
         jobs = []
 
         # In some cases, we can use cached data, but it's still
@@ -1251,7 +1259,7 @@
                 continue
             # Otherwise, prepare an empty unparsed config object to
             # hold cached data later.
-            project.unparsed_config = model.UnparsedTenantConfig()
+            new_project_unparsed_config[project] = model.UnparsedTenantConfig()
             # Get main config files.  These files are permitted the
             # full range of configuration.
             job = merger.getFiles(
@@ -1270,7 +1278,8 @@
                 continue
             # Otherwise, prepare an empty unparsed config object to
             # hold cached data later.
-            project.unparsed_config = model.UnparsedTenantConfig()
+            new_project_unparsed_config[project] = model.UnparsedTenantConfig()
+            new_project_unparsed_branch_config[project] = {}
             # Get in-project-repo config files which have a restricted
             # set of options.
             # For each branch in the repo, get the zuul.yaml for that
@@ -1278,7 +1287,7 @@
             # branch selector to each job there.  This makes the
             # in-repo configuration apply only to that branch.
             for branch in project.source.getProjectBranches(project, tenant):
-                project.unparsed_branch_config[branch] = \
+                new_project_unparsed_branch_config[project][branch] = \
                     model.UnparsedTenantConfig()
                 job = merger.getFiles(
                     project.source.connection.connection_name,
@@ -1337,9 +1346,19 @@
                         incdata = TenantParser._parseUntrustedProjectLayout(
                             job.files[fn], job.source_context)
                         untrusted_projects_config.extend(incdata)
-                    project.unparsed_config.extend(incdata)
-                    if branch in project.unparsed_branch_config:
-                        project.unparsed_branch_config[branch].extend(incdata)
+                    new_project_unparsed_config[project].extend(incdata)
+                    if branch in new_project_unparsed_branch_config.get(
+                            project, {}):
+                        new_project_unparsed_branch_config[project][branch].\
+                            extend(incdata)
+        # Now that we've sucessfully loaded all of the configuration,
+        # cache the unparsed data on the project objects.
+        for project, data in new_project_unparsed_config.items():
+            project.unparsed_config = data
+        for project, branch_config in \
+            new_project_unparsed_branch_config.items():
+            for branch, data in branch_config.items():
+                project.unparsed_branch_config[branch] = data
         return config_projects_config, untrusted_projects_config
 
     @staticmethod
diff --git a/zuul/driver/github/githubconnection.py b/zuul/driver/github/githubconnection.py
index f31df6a..5a40422 100644
--- a/zuul/driver/github/githubconnection.py
+++ b/zuul/driver/github/githubconnection.py
@@ -29,7 +29,7 @@
 import webob.dec
 import voluptuous as v
 import github3
-from github3.exceptions import MethodNotAllowed
+import github3.exceptions
 
 from zuul.connection import BaseConnection
 from zuul.model import Ref, Branch, Tag
@@ -75,7 +75,9 @@
             raise webob.exc.HTTPMethodNotAllowed(
                 'Only POST method is allowed.')
 
-        self.log.debug("Github Webhook Received.")
+        delivery = request.headers.get('X-GitHub-Delivery')
+        self.log.debug("Github Webhook Received: {delivery}".format(
+            delivery=delivery))
 
         self._validate_signature(request)
 
@@ -250,7 +252,8 @@
         action = body.get('action')
         if action == 'pending':
             return
-        pr_body = self.connection.getPullBySha(body['sha'])
+        project = body.get('name')
+        pr_body = self.connection.getPullBySha(body['sha'], project)
         if pr_body is None:
             return
 
@@ -348,6 +351,7 @@
     def _init_data(self):
         if self._data is None:
             user = self._github.user(self._username)
+            self.log.debug("Initialized data for user %s", self._username)
             log_rate_limit(self.log, self._github)
             self._data = {
                 'username': user.login,
@@ -365,6 +369,7 @@
         super(GithubConnection, self).__init__(
             driver, connection_name, connection_config)
         self._change_cache = {}
+        self._project_branch_cache = {}
         self.projects = {}
         self.git_ssh_key = self.connection_config.get('sshkey')
         self.server = self.connection_config.get('server', 'github.com')
@@ -421,6 +426,9 @@
         # anything going through requests to http/s goes through cache
         github.session.mount('http://', self.cache_adapter)
         github.session.mount('https://', self.cache_adapter)
+        # Add properties to store project and user for logging later
+        github._zuul_project = None
+        github._zuul_user_id = None
         return github
 
     def _authenticateGithubAPI(self):
@@ -507,6 +515,8 @@
         if use_app and project and self.app_id:
             github = self._createGithubClient()
             github.login(token=self._get_installation_key(project, user_id))
+            github._zuul_project = project
+            github._zuul_user_id = user_id
             return github
 
         # if we're using api_key authentication then this is already token
@@ -612,6 +622,7 @@
             prs.append(pr)
             seen.add(issue)
 
+        self.log.debug("Ran search issues: %s", query)
         log_rate_limit(self.log, github)
         return prs
 
@@ -716,13 +727,28 @@
         else:
             exclude_unprotected = tenant.exclude_unprotected_branches
 
-        github = self.getGithubClient()
-        owner, proj = project.name.split('/')
-        repository = github.repository(owner, proj)
-        branches = [branch.name for branch in repository.branches(
-            protected=exclude_unprotected)]
-        log_rate_limit(self.log, github)
-        return branches
+        # TODO(mordred) Does it work for Github Apps to get repository
+        # branches? If not, can we avoid doing this as an API for projects that
+        # aren't trying to exclude protected branches by doing a git command
+        # (gerrit driver does an upload pack over ssh)
+        github = self.getGithubClient(project.name)
+        try:
+            owner, proj = project.name.split('/')
+            repository = github.repository(owner, proj)
+            self._project_branch_cache[project.name] = [
+                branch.name for branch in repository.branches(
+                    protected=exclude_unprotected)]
+            self.log.debug('Got project branches for %s', project.name)
+            log_rate_limit(self.log, github)
+        except github3.exceptions.ForbiddenError as e:
+            self.log.error(str(e), exc_info=True)
+            rate_limit = github.rate_limit()
+            if rate_limit['resources']['core']['remaining'] == 0:
+                self.log.debug("Rate limit exceeded, using stale branch list")
+            else:
+                self.log.error(str(e), exc_info=True)
+
+        return self._project_branch_cache[project.name]
 
     def getPullUrl(self, project, number):
         return '%s/pull/%s' % (self.getGitwebUrl(project), number)
@@ -742,6 +768,7 @@
         pr = probj.as_dict()
         pr['files'] = [f.filename for f in probj.files()]
         pr['labels'] = [l.name for l in issueobj.labels()]
+        self.log.debug('Got PR %s#%s', project_name, number)
         log_rate_limit(self.log, github)
         return pr
 
@@ -758,24 +785,19 @@
         # For now, just send back a True value.
         return True
 
-    def getPullBySha(self, sha):
-        query = '%s type:pr is:open' % sha
+    def getPullBySha(self, sha, project):
         pulls = []
-        github = self.getGithubClient()
-        for issue in github.search_issues(query=query):
-            pr_url = issue.issue.pull_request().as_dict().get('url')
-            if not pr_url:
-                continue
-            # the issue provides no good description of the project :\
-            owner, project, _, number = pr_url.split('/')[-4:]
-            github = self.getGithubClient("%s/%s" % (owner, project))
-            pr = github.pull_request(owner, project, number)
+        owner, project = project.split('/')
+        github = self.getGithubClient("%s/%s" % (owner, project))
+        repo = github.repository(owner, project)
+        for pr in repo.pull_requests(state='open'):
             if pr.head.sha != sha:
                 continue
             if pr.as_dict() in pulls:
                 continue
             pulls.append(pr.as_dict())
 
+        self.log.debug('Got PR on project %s for sha %s', project, sha)
         log_rate_limit(self.log, github)
         if len(pulls) > 1:
             raise Exception('Multiple pulls found with head sha %s' % sha)
@@ -844,6 +866,7 @@
         reviews = [review.as_dict() for review in
                    github.pull_request(owner, project, number).reviews()]
 
+        self.log.debug('Got reviews for PR %s/%s#%s', owner, project, number)
         log_rate_limit(self.log, github)
         return reviews
 
@@ -872,6 +895,7 @@
         # Get the data
         perms = repository._get(url, headers=headers)
 
+        self.log.debug("Got repo permissions for %s/%s", owner, proj)
         log_rate_limit(self.log, github)
 
         # no known user, maybe deleted since review?
@@ -887,6 +911,7 @@
         repository = github.repository(owner, proj)
         pull_request = repository.issue(pr_number)
         pull_request.create_comment(message)
+        self.log.debug("Commented on PR %s/%s#%s", owner, proj, pr_number)
         log_rate_limit(self.log, github)
 
     def mergePull(self, project, pr_number, commit_message='', sha=None):
@@ -895,9 +920,11 @@
         pull_request = github.pull_request(owner, proj, pr_number)
         try:
             result = pull_request.merge(commit_message=commit_message, sha=sha)
-        except MethodNotAllowed as e:
+        except github3.exceptions.MethodNotAllowed as e:
             raise MergeFailure('Merge was not successful due to mergeability'
                                ' conflict, original error is %s' % e)
+
+        self.log.debug("Merged PR %s/%s#%s", owner, proj, pr_number)
         log_rate_limit(self.log, github)
         if not result:
             raise Exception('Pull request was not merged')
@@ -911,6 +938,7 @@
         # API transaction
         statuses = [status.as_dict() for status in commit.statuses()]
 
+        self.log.debug("Got commit statuses for sha %s on %s", sha, project)
         log_rate_limit(self.log, github)
         return statuses
 
@@ -920,6 +948,8 @@
         owner, proj = project.split('/')
         repository = github.repository(owner, proj)
         repository.create_status(sha, state, url, description, context)
+        self.log.debug("Set commit status to %s for sha %s on %s",
+                       state, sha, project)
         log_rate_limit(self.log, github)
 
     def labelPull(self, project, pr_number, label):
@@ -927,6 +957,7 @@
         owner, proj = project.split('/')
         pull_request = github.issue(owner, proj, pr_number)
         pull_request.add_labels(label)
+        self.log.debug("Added label %s to %s#%s", label, proj, pr_number)
         log_rate_limit(self.log, github)
 
     def unlabelPull(self, project, pr_number, label):
@@ -934,6 +965,7 @@
         owner, proj = project.split('/')
         pull_request = github.issue(owner, proj, pr_number)
         pull_request.remove_label(label)
+        self.log.debug("Removed label %s from %s#%s", label, proj, pr_number)
         log_rate_limit(self.log, github)
 
     def getPushedFileNames(self, event):
@@ -985,8 +1017,12 @@
         reset = rate_limit['resources']['core']['reset']
     except:
         return
-    log.debug('GitHub API rate limit remaining: %s reset: %s' %
-              (remaining, reset))
+    if github._zuul_user_id:
+        log.debug('GitHub API rate limit (%s, %s) remaining: %s reset: %s',
+                  github._zuul_project, github._zuul_user_id, remaining, reset)
+    else:
+        log.debug('GitHub API rate limit remaining: %s reset: %s',
+                  remaining, reset)
 
 
 def getSchema():
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index 64b635d..fcb6bb2 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -182,6 +182,7 @@
         if (hasattr(item.change, 'newrev') and item.change.newrev
             and item.change.newrev != '0' * 40):
             zuul_params['newrev'] = item.change.newrev
+        zuul_params['projects'] = []  # Set below
         zuul_params['items'] = []
         for i in all_items:
             d = dict()
@@ -200,7 +201,6 @@
                 d['branch'] = i.change.branch
             zuul_params['items'].append(d)
 
-        # Legacy environment variables
         params = dict()
         params['job'] = job.name
         params['timeout'] = job.timeout
@@ -261,6 +261,15 @@
                 params['projects'].append(make_project_dict(project))
                 projects.add(project)
 
+        for p in projects:
+            zuul_params['projects'].append(dict(
+                name=p.name,
+                short_name=p.name.split('/')[-1],
+                canonical_hostname=p.canonical_hostname,
+                canonical_name=p.canonical_name,
+                src_dir=os.path.join('src', p.canonical_name),
+            ))
+
         build = Build(job, uuid)
         build.parameters = params
 
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index cb139c6..3daafc7 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -280,6 +280,7 @@
         '''
         # root
         #   ansible (mounted in bwrap read-only)
+        #     logging.json
         #     inventory.yaml
         #   .ansible (mounted in bwrap read-write)
         #     fact-cache/localhost
@@ -336,6 +337,7 @@
             pass
         self.known_hosts = os.path.join(ssh_dir, 'known_hosts')
         self.inventory = os.path.join(self.ansible_root, 'inventory.yaml')
+        self.logging_json = os.path.join(self.ansible_root, 'logging.json')
         self.playbooks = []  # The list of candidate playbooks
         self.playbook = None  # A pointer to the candidate we have chosen
         self.pre_playbooks = []
@@ -1004,6 +1006,7 @@
         self.preparePlaybooks(args)
 
         self.prepareAnsibleFiles(args)
+        self.writeLoggingConfig()
 
         data = {
             # TODO(mordred) worker_name is needed as a unique name for the
@@ -1098,6 +1101,7 @@
         result = None
 
         pre_failed = False
+        success = False
         for index, playbook in enumerate(self.jobdir.pre_playbooks):
             # TODOv3(pabelanger): Implement pre-run timeout setting.
             pre_status, pre_code = self.runAnsiblePlaybook(
@@ -1106,26 +1110,28 @@
                 # These should really never fail, so return None and have
                 # zuul try again
                 pre_failed = True
-                success = False
                 break
 
         if not pre_failed:
             job_status, job_code = self.runAnsiblePlaybook(
                 self.jobdir.playbook, args['timeout'], phase='run')
-            if job_status == self.RESULT_TIMED_OUT:
-                return 'TIMED_OUT'
             if job_status == self.RESULT_ABORTED:
                 return 'ABORTED'
-            if job_status != self.RESULT_NORMAL:
+            elif job_status == self.RESULT_TIMED_OUT:
+                # Set the pre-failure flag so this doesn't get
+                # overridden by a post-failure.
+                pre_failed = True
+                result = 'TIMED_OUT'
+            elif job_status == self.RESULT_NORMAL:
+                success = (job_code == 0)
+                if success:
+                    result = 'SUCCESS'
+                else:
+                    result = 'FAILURE'
+            else:
                 # The result of the job is indeterminate.  Zuul will
                 # run it again.
-                return result
-
-            success = (job_code == 0)
-            if success:
-                result = 'SUCCESS'
-            else:
-                result = 'FAILURE'
+                return None
 
         for index, playbook in enumerate(self.jobdir.post_playbooks):
             # TODOv3(pabelanger): Implement post-run timeout setting.
@@ -1404,6 +1410,14 @@
                 for key in node['host_keys']:
                     known_hosts.write('%s\n' % key)
 
+    def writeLoggingConfig(self):
+        self.log.debug("Writing logging config for job %s %s",
+                       self.jobdir.job_output_file,
+                       self.jobdir.logging_json)
+        logging_config = zuul.ansible.logconfig.JobLoggingConfig(
+            job_output_file=self.jobdir.job_output_file)
+        logging_config.writeJson(self.jobdir.logging_json)
+
     def writeAnsibleConfig(self, jobdir_playbook, playbook):
         trusted = jobdir_playbook.trusted
 
@@ -1486,7 +1500,9 @@
         config_file = playbook.ansible_config
         env_copy = os.environ.copy()
         env_copy.update(self.ssh_agent.env)
-        env_copy['ZUUL_JOB_OUTPUT_FILE'] = self.jobdir.job_output_file
+        if ara_callbacks:
+            env_copy['ARA_LOG_CONFIG'] = self.jobdir.logging_json
+        env_copy['ZUUL_JOB_LOG_CONFIG'] = self.jobdir.logging_json
         env_copy['ZUUL_JOBDIR'] = self.jobdir.root
         pythonpath = env_copy.get('PYTHONPATH')
         if pythonpath:
@@ -1554,7 +1570,15 @@
                                 ("Ansible timeout exceeded",))
             watchdog.start()
         try:
-            for idx, line in enumerate(iter(self.proc.stdout.readline, b'')):
+            # Use manual idx instead of enumerate so that RESULT lines
+            # don't count towards BUFFER_LINES_FOR_SYNTAX
+            idx = 0
+            for line in iter(self.proc.stdout.readline, b''):
+                if line.startswith(b'RESULT'):
+                    # TODO(mordred) Process result commands if sent
+                    continue
+                else:
+                    idx += 1
                 if idx < BUFFER_LINES_FOR_SYNTAX:
                     syntax_buffer.append(line)
                 line = line[:1024].rstrip()