Merge "Avoid using apt-add-repository" into feature/zuulv3
diff --git a/README.rst b/README.rst
index 16e7385..52b89df 100644
--- a/README.rst
+++ b/README.rst
@@ -7,6 +7,9 @@
 preparation for the third major version of Zuul.  We call this effort
 `Zuul v3`_ and it is described in more detail below.
 
+The latest documentation for Zuul v3 is published at:
+https://docs.openstack.org/infra/zuul/feature/zuulv3/
+
 Contributing
 ------------
 
diff --git a/doc/source/admin/components.rst b/doc/source/admin/components.rst
index a24b833..26a85b2 100644
--- a/doc/source/admin/components.rst
+++ b/doc/source/admin/components.rst
@@ -323,3 +323,41 @@
 To enable or disable running Ansible in verbose mode (with the '-vvv'
 argument to ansible-playbook) run ``zuul-executor verbose`` and
 ``zuul-executor unverbose``.
+
+Web Server
+----------
+
+The Zuul web server currently acts as a websocket interface to live log
+streaming. Eventually, it will serve as the single process handling all
+HTTP interactions with Zuul.
+
+Configuration
+~~~~~~~~~~~~~
+
+In addition to the ``gearman`` common configuration section, the following
+sections of **zuul.conf** are used by the web server:
+
+web
+"""
+
+**listen_address**
+  IP address or domain name on which to listen (default: 127.0.0.1).
+  ``listen_address=127.0.0.1``
+
+**log_config**
+  Path to log config file for the web server process.
+  ``log_config=/etc/zuul/logging.yaml``
+
+**pidfile**
+  Path to PID lock file for the web server process.
+  ``pidfile=/var/run/zuul-web/zuul-web.pid``
+
+**port**
+  Port to use for web server process.
+  ``port=9000``
+
+Operation
+~~~~~~~~~
+
+To start the web server, run ``zuul-web``.  To stop it, kill the
+PID which was saved in the pidfile specified in the configuration.
diff --git a/doc/source/admin/quick-start.rst b/doc/source/admin/quick-start.rst
index 9993775..77dee4a 100644
--- a/doc/source/admin/quick-start.rst
+++ b/doc/source/admin/quick-start.rst
@@ -33,6 +33,9 @@
       merger if dedicated mergers are not provided.  One or more of
       these must be run.
 
+    - **zuul-web**: A web server that currently provides websocket access to
+      live-streaming of logs.
+
     - **gearman**: optional builtin gearman daemon provided by zuul-scheduler
 
 External components:
diff --git a/tests/fixtures/config/in-repo/git/org_project1/.zuul.yaml b/tests/fixtures/config/in-repo/git/org_project1/.zuul.yaml
new file mode 100644
index 0000000..3fd423b
--- /dev/null
+++ b/tests/fixtures/config/in-repo/git/org_project1/.zuul.yaml
@@ -0,0 +1,5 @@
+- project:
+    name: org/project1
+    tenant-one-gate:
+      jobs:
+        - project-test1
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index aa061ff..61bf9f8 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -2324,6 +2324,16 @@
                 uuid=status_jobs[2]['uuid']),
             status_jobs[2]['report_url'])
 
+        # check job dependencies
+        self.assertIsNotNone(status_jobs[0]['dependencies'])
+        self.assertIsNotNone(status_jobs[1]['dependencies'])
+        self.assertIsNotNone(status_jobs[2]['dependencies'])
+        self.assertEqual(len(status_jobs[0]['dependencies']), 0)
+        self.assertEqual(len(status_jobs[1]['dependencies']), 1)
+        self.assertEqual(len(status_jobs[2]['dependencies']), 1)
+        self.assertIn('project-merge', status_jobs[1]['dependencies'])
+        self.assertIn('project-merge', status_jobs[2]['dependencies'])
+
     def test_live_reconfiguration(self):
         "Test that live reconfiguration works"
         self.executor_server.hold_jobs_in_build = True
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 112f48c..2b865cf 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -486,6 +486,62 @@
         self.assertIn('appears multiple times', A.messages[0],
                       "A should have a syntax error reported")
 
+    def test_multi_repo(self):
+        downstream_repo_conf = textwrap.dedent(
+            """
+            - project:
+                name: org/project1
+                tenant-one-gate:
+                  jobs:
+                    - project-test1
+
+            - job:
+                name: project1-test1
+                parent: project-test1
+            """)
+
+        file_dict = {'.zuul.yaml': downstream_repo_conf}
+        A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A',
+                                           files=file_dict)
+        A.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+        self.waitUntilSettled()
+
+        self.assertEqual(A.data['status'], 'MERGED')
+        self.fake_gerrit.addEvent(A.getChangeMergedEvent())
+        self.waitUntilSettled()
+
+        upstream_repo_conf = textwrap.dedent(
+            """
+            - job:
+                name: project-test1
+
+            - job:
+                name: project-test2
+
+            - project:
+                name: org/project
+                tenant-one-gate:
+                  jobs:
+                    - project-test1
+            """)
+
+        file_dict = {'.zuul.yaml': upstream_repo_conf}
+        B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B',
+                                           files=file_dict)
+        B.addApproval('code-review', 2)
+        self.fake_gerrit.addEvent(B.addApproval('approved', 1))
+        self.waitUntilSettled()
+
+        self.assertEqual(B.data['status'], 'MERGED')
+        self.fake_gerrit.addEvent(B.getChangeMergedEvent())
+        self.waitUntilSettled()
+
+        tenant = self.sched.abide.tenants.get('tenant-one')
+        # Ensure the latest change is reflected in the config; if it
+        # isn't this will raise an exception.
+        tenant.layout.getJob('project-test2')
+
 
 class TestAnsible(AnsibleZuulTestCase):
     # A temporary class to hold new tests while others are disabled
diff --git a/zuul/configloader.py b/zuul/configloader.py
index de6c4bb..3c9ecf7 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -1109,13 +1109,20 @@
         untrusted_projects_config = model.UnparsedTenantConfig()
         jobs = []
 
+        # In some cases, we can use cached data, but it's still
+        # important that we process that in the same order along with
+        # any jobs that we run.  This class is used to hold the cached
+        # data and is inserted in the ordered jobs list for later
+        # processing.
+        class CachedDataJob(object):
+            def __init__(self, config_project, project):
+                self.config_project = config_project
+                self.project = project
+
         for project in config_projects:
             # If we have cached data (this is a reconfiguration) use it.
             if cached and project.unparsed_config:
-                TenantParser.log.info(
-                    "Loading previously parsed configuration from %s" %
-                    (project,))
-                config_projects_config.extend(project.unparsed_config)
+                jobs.append(CachedDataJob(True, project))
                 continue
             # Otherwise, prepare an empty unparsed config object to
             # hold cached data later.
@@ -1134,10 +1141,7 @@
         for project in untrusted_projects:
             # If we have cached data (this is a reconfiguration) use it.
             if cached and project.unparsed_config:
-                TenantParser.log.info(
-                    "Loading previously parsed configuration from %s" %
-                    (project,))
-                untrusted_projects_config.extend(project.unparsed_config)
+                jobs.append(CachedDataJob(False, project))
                 continue
             # Otherwise, prepare an empty unparsed config object to
             # hold cached data later.
@@ -1165,6 +1169,17 @@
             # complete in the order they were executed which is the
             # same order they were defined in the main config file.
             # This is important for correct inheritance.
+            if isinstance(job, CachedDataJob):
+                TenantParser.log.info(
+                    "Loading previously parsed configuration from %s" %
+                    (job.project,))
+                if job.config_project:
+                    config_projects_config.extend(
+                        job.project.unparsed_config)
+                else:
+                    untrusted_projects_config.extend(
+                        job.project.unparsed_config)
+                continue
             TenantParser.log.debug("Waiting for cat job %s" % (job,))
             job.wait()
             TenantParser.log.debug("Cat job %s got files %s" %
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index 442d1c5..d17e47e 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -135,40 +135,6 @@
         self.gearman.shutdown()
         self.log.debug("Stopped")
 
-    def isJobRegistered(self, name):
-        if self.function_cache_time:
-            for connection in self.gearman.active_connections:
-                if connection.connect_time > self.function_cache_time:
-                    self.function_cache = set()
-                    self.function_cache_time = 0
-                    break
-        if name in self.function_cache:
-            self.log.debug("Function %s is registered" % name)
-            return True
-        if ((time.time() - self.function_cache_time) <
-            self.negative_function_cache_ttl):
-            self.log.debug("Function %s is not registered "
-                           "(negative ttl in effect)" % name)
-            return False
-        self.function_cache_time = time.time()
-        for connection in self.gearman.active_connections:
-            try:
-                req = gear.StatusAdminRequest()
-                connection.sendAdminRequest(req, timeout=300)
-            except Exception:
-                self.log.exception("Exception while checking functions")
-                continue
-            for line in req.response.split('\n'):
-                parts = [x.strip() for x in line.split()]
-                if not parts or parts[0] == '.':
-                    continue
-                self.function_cache.add(parts[0])
-        if name in self.function_cache:
-            self.log.debug("Function %s is registered" % name)
-            return True
-        self.log.debug("Function %s is not registered" % name)
-        return False
-
     def execute(self, job, item, pipeline, dependent_items=[],
                 merger_items=[]):
         tenant = pipeline.layout.tenant
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index 0b05711..bc30386 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -118,12 +118,12 @@
         env.update(self.env)
         key_path = os.path.expanduser(key_path)
         self.log.debug('Adding SSH Key {}'.format(key_path))
-        output = ''
         try:
-            output = subprocess.check_output(['ssh-add', key_path], env=env,
-                                             stderr=subprocess.PIPE)
-        except subprocess.CalledProcessError:
-            self.log.error('ssh-add failed: {}'.format(output))
+            subprocess.check_output(['ssh-add', key_path], env=env,
+                                    stderr=subprocess.PIPE)
+        except subprocess.CalledProcessError as e:
+            self.log.error('ssh-add failed. stdout: %s, stderr: %s',
+                           e.output, e.stderr)
             raise
         self.log.info('Added SSH Key {}'.format(key_path))
 
@@ -695,16 +695,12 @@
         self.running = False
         self.aborted = False
         self.thread = None
-        self.ssh_agent = None
-
         self.private_key_file = get_default(self.executor_server.config,
                                             'executor', 'private_key_file',
                                             '~/.ssh/id_rsa')
         self.ssh_agent = SshAgent()
 
     def run(self):
-        self.ssh_agent.start()
-        self.ssh_agent.add(self.private_key_file)
         self.running = True
         self.thread = threading.Thread(target=self.execute)
         self.thread.start()
@@ -717,6 +713,8 @@
 
     def execute(self):
         try:
+            self.ssh_agent.start()
+            self.ssh_agent.add(self.private_key_file)
             self.jobdir = JobDir(self.executor_server.jobdir_root,
                                  self.executor_server.keep_jobdir,
                                  str(self.job.unique))
@@ -726,19 +724,20 @@
             self.job.sendWorkException(traceback.format_exc())
         finally:
             self.running = False
-            try:
-                self.jobdir.cleanup()
-            except Exception:
-                self.log.exception("Error cleaning up jobdir:")
-            try:
-                self.executor_server.finishJob(self.job.unique)
-            except Exception:
-                self.log.exception("Error finalizing job thread:")
+            if self.jobdir:
+                try:
+                    self.jobdir.cleanup()
+                except Exception:
+                    self.log.exception("Error cleaning up jobdir:")
             if self.ssh_agent:
                 try:
                     self.ssh_agent.stop()
                 except Exception:
                     self.log.exception("Error stopping SSH agent:")
+            try:
+                self.executor_server.finishJob(self.job.unique)
+            except Exception:
+                self.log.exception("Error finalizing job thread:")
 
     def _execute(self):
         args = json.loads(self.job.arguments)
@@ -1084,11 +1083,8 @@
                 for entry in os.listdir(d):
                     self._blockPluginDirs(os.path.join(d, entry))
             return d
-        # We assume the repository itself is a collection of roles
-        if not trusted:
-            for entry in os.listdir(path):
-                self._blockPluginDirs(os.path.join(path, entry))
-        return path
+        # It is neither a bare role, nor a collection of roles
+        raise Exception("Unable to find role in %s" % (path,))
 
     def prepareZuulRole(self, args, role, root, trusted, untrusted):
         self.log.debug("Prepare zuul role for %s" % (role,))
diff --git a/zuul/model.py b/zuul/model.py
index 4744bbe..9d39a0c 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -1693,6 +1693,7 @@
 
             ret['jobs'].append({
                 'name': job.name,
+                'dependencies': list(job.dependencies),
                 'elapsed_time': elapsed,
                 'remaining_time': remaining,
                 'url': build_url,
diff --git a/zuul/web.py b/zuul/web.py
index 2ef65fe..ab16e11 100644
--- a/zuul/web.py
+++ b/zuul/web.py
@@ -138,6 +138,9 @@
                     break
                 elif msg.type == aiohttp.WSMsgType.CLOSED:
                     break
+        except asyncio.CancelledError:
+            self.log.debug("Websocket request handling cancelled")
+            pass
         except Exception as e:
             self.log.exception("Websocket exception:")
             await ws.close(code=4009, message=str(e).encode('utf-8'))
@@ -228,5 +231,6 @@
     logging.basicConfig(level=logging.DEBUG)
     loop = asyncio.get_event_loop()
     loop.set_debug(True)
-    z = ZuulWeb()
+    z = ZuulWeb(listen_address="127.0.0.1", listen_port=9000,
+                gear_server="127.0.0.1", gear_port=4730)
     z.run(loop)