Merge "Close logging config file after write" into feature/zuulv3
diff --git a/etc/status/public_html/jquery.zuul.js b/etc/status/public_html/jquery.zuul.js
index c2cf279..1937cd5 100644
--- a/etc/status/public_html/jquery.zuul.js
+++ b/etc/status/public_html/jquery.zuul.js
@@ -554,14 +554,11 @@
                         }
 
                         $.each(changes, function (change_i, change) {
-                            // Only add a change when it has jobs
-                            if (change.jobs.length > 0) {
-                                var $change_box =
-                                    format.change_with_status_tree(
-                                        change, change_queue);
-                                $html.append($change_box);
-                                format.display_patchset($change_box);
-                            }
+                            var $change_box =
+                                format.change_with_status_tree(
+                                    change, change_queue);
+                            $html.append($change_box);
+                            format.display_patchset($change_box);
                         });
                     });
                 });
diff --git a/tests/base.py b/tests/base.py
index c49e1ce..4214809 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -195,9 +195,16 @@
         if not large:
             for fn, content in files.items():
                 fn = os.path.join(path, fn)
-                with open(fn, 'w') as f:
-                    f.write(content)
-                repo.index.add([fn])
+                if content is None:
+                    os.unlink(fn)
+                    repo.index.remove([fn])
+                else:
+                    d = os.path.dirname(fn)
+                    if not os.path.exists(d):
+                        os.makedirs(d)
+                    with open(fn, 'w') as f:
+                        f.write(content)
+                    repo.index.add([fn])
         else:
             for fni in range(100):
                 fn = os.path.join(path, str(fni))
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 9d695aa..c681305 100755
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -775,6 +775,76 @@
         # isn't this will raise an exception.
         tenant.layout.getJob('project-test2')
 
+    def test_pipeline_error(self):
+        with open(os.path.join(FIXTURE_DIR,
+                               'config/in-repo/git/',
+                               'common-config/zuul.yaml')) as f:
+            base_common_config = f.read()
+
+        in_repo_conf_A = textwrap.dedent(
+            """
+            - pipeline:
+                name: periodic
+                foo: error
+            """)
+
+        file_dict = {'zuul.yaml': None,
+                     'zuul.d/main.yaml': base_common_config,
+                     'zuul.d/test1.yaml': in_repo_conf_A}
+        A = self.fake_gerrit.addFakeChange('common-config', 'master', 'A',
+                                           files=file_dict)
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+        self.assertEqual(A.reported, 1,
+                         "A should report failure")
+        self.assertIn('syntax error',
+                      A.messages[0],
+                      "A should have an error reported")
+
+    def test_change_series_error(self):
+        with open(os.path.join(FIXTURE_DIR,
+                               'config/in-repo/git/',
+                               'common-config/zuul.yaml')) as f:
+            base_common_config = f.read()
+
+        in_repo_conf_A = textwrap.dedent(
+            """
+            - pipeline:
+                name: periodic
+                foo: error
+            """)
+
+        file_dict = {'zuul.yaml': None,
+                     'zuul.d/main.yaml': base_common_config,
+                     'zuul.d/test1.yaml': in_repo_conf_A}
+        A = self.fake_gerrit.addFakeChange('common-config', 'master', 'A',
+                                           files=file_dict)
+
+        in_repo_conf_B = textwrap.dedent(
+            """
+            - job:
+                name: project-test2
+                foo: error
+            """)
+
+        file_dict = {'zuul.yaml': None,
+                     'zuul.d/main.yaml': base_common_config,
+                     'zuul.d/test1.yaml': in_repo_conf_A,
+                     'zuul.d/test2.yaml': in_repo_conf_B}
+        B = self.fake_gerrit.addFakeChange('common-config', 'master', 'B',
+                                           files=file_dict)
+        B.setDependsOn(A, 1)
+        C = self.fake_gerrit.addFakeChange('common-config', 'master', 'C')
+        C.setDependsOn(B, 1)
+        self.fake_gerrit.addEvent(C.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+
+        self.assertEqual(C.reported, 1,
+                         "C should report failure")
+        self.assertIn('depends on a change that failed to merge',
+                      C.messages[0],
+                      "C should have an error reported")
+
 
 class TestAnsible(AnsibleZuulTestCase):
     # A temporary class to hold new tests while others are disabled
diff --git a/zuul/ansible/library/command.py b/zuul/ansible/library/command.py
index f701b48..0fc6129 100644
--- a/zuul/ansible/library/command.py
+++ b/zuul/ansible/library/command.py
@@ -159,9 +159,14 @@
         # Jenkins format but with microsecond resolution instead of
         # millisecond.  It is kept so log parsing/formatting remains
         # consistent.
-        ts = datetime.datetime.now()
-        outln = '%s | %s' % (ts, ln)
-        self.logfile.write(outln.encode('utf-8'))
+        ts = str(datetime.datetime.now()).encode('utf-8')
+        if not isinstance(ln, bytes):
+            try:
+                ln = ln.encode('utf-8')
+            except Exception:
+                ln = repr(ln).encode('utf-8') + b'\n'
+        outln = b'%s | %s' % (ts, ln)
+        self.logfile.write(outln)
 
 
 def follow(fd, log_uuid):
diff --git a/zuul/configloader.py b/zuul/configloader.py
index 94c0d2a..3b09623 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -1471,6 +1471,8 @@
 
     @staticmethod
     def _parseLayout(base, tenant, data, scheduler, connections):
+        # Don't call this method from dynamic reconfiguration because
+        # it interacts with drivers and connections.
         layout = model.Layout(tenant)
 
         TenantParser._parseLayoutItems(layout, tenant, data,
@@ -1582,7 +1584,8 @@
                     config.extend(incdata)
 
     def createDynamicLayout(self, tenant, files,
-                            include_config_projects=False):
+                            include_config_projects=False,
+                            scheduler=None, connections=None):
         if include_config_projects:
             config = model.UnparsedTenantConfig()
             for project in tenant.config_projects:
@@ -1594,22 +1597,29 @@
             self._loadDynamicProjectData(config, project, files, False, tenant)
 
         layout = model.Layout(tenant)
-        # NOTE: the actual pipeline objects (complete with queues and
-        # enqueued items) are copied by reference here.  This allows
-        # our shadow dynamic configuration to continue to interact
-        # with all the other changes, each of which may have their own
-        # version of reality.  We do not support creating, updating,
-        # or deleting pipelines in dynamic layout changes.
-        layout.pipelines = tenant.layout.pipelines
+        if not include_config_projects:
+            # NOTE: the actual pipeline objects (complete with queues
+            # and enqueued items) are copied by reference here.  This
+            # allows our shadow dynamic configuration to continue to
+            # interact with all the other changes, each of which may
+            # have their own version of reality.  We do not support
+            # creating, updating, or deleting pipelines in dynamic
+            # layout changes.
+            layout.pipelines = tenant.layout.pipelines
 
-        # NOTE: the semaphore definitions are copied from the static layout
-        # here. For semaphores there should be no per patch max value but
-        # exactly one value at any time. So we do not support dynamic semaphore
-        # configuration changes.
-        layout.semaphores = tenant.layout.semaphores
+            # NOTE: the semaphore definitions are copied from the
+            # static layout here. For semaphores there should be no
+            # per patch max value but exactly one value at any
+            # time. So we do not support dynamic semaphore
+            # configuration changes.
+            layout.semaphores = tenant.layout.semaphores
+            skip_pipelines = skip_semaphores = True
+        else:
+            skip_pipelines = skip_semaphores = False
 
-        TenantParser._parseLayoutItems(layout, tenant, config, None, None,
-                                       skip_pipelines=True,
-                                       skip_semaphores=True)
+        TenantParser._parseLayoutItems(layout, tenant, config,
+                                       scheduler, connections,
+                                       skip_pipelines=skip_pipelines,
+                                       skip_semaphores=skip_semaphores)
 
         return layout
diff --git a/zuul/driver/gerrit/gerritconnection.py b/zuul/driver/gerrit/gerritconnection.py
index 35137c7..ecf5f94 100644
--- a/zuul/driver/gerrit/gerritconnection.py
+++ b/zuul/driver/gerrit/gerritconnection.py
@@ -133,25 +133,42 @@
                 event.branch_deleted = True
                 event.branch = event.ref
 
-        if event.change_number:
-            # TODO(jhesketh): Check if the project exists?
-            # and self.connection.sched.getProject(event.project_name):
-
-            # Call _getChange for the side effect of updating the
-            # cache.  Note that this modifies Change objects outside
-            # the main thread.
-            # NOTE(jhesketh): Ideally we'd just remove the change from the
-            # cache to denote that it needs updating. However the change
-            # object is already used by Items and hence BuildSets etc. and
-            # we need to update those objects by reference so that they have
-            # the correct/new information and also avoid hitting gerrit
-            # multiple times.
-            self.connection._getChange(event.change_number,
-                                       event.patch_number,
-                                       refresh=True)
+        self._getChange(event)
         self.connection.logEvent(event)
         self.connection.sched.addEvent(event)
 
+    def _getChange(self, event):
+        # Grab the change if we are managing the project or if it exists in the
+        # cache as it may be a dependency
+        if event.change_number:
+            refresh = True
+            if event.change_number not in self.connection._change_cache:
+                refresh = False
+                for tenant in self.connection.sched.abide.tenants.values():
+                    # TODO(fungi): it would be better to have some simple means
+                    # of inferring the hostname from the connection, or at
+                    # least split this into separate method arguments, rather
+                    # than assembling and passing in a baked string.
+                    if (None, None) != tenant.getProject('/'.join((
+                            self.connection.canonical_hostname,
+                            event.project_name))):
+                        refresh = True
+                        break
+
+            if refresh:
+                # Call _getChange for the side effect of updating the
+                # cache.  Note that this modifies Change objects outside
+                # the main thread.
+                # NOTE(jhesketh): Ideally we'd just remove the change from the
+                # cache to denote that it needs updating. However the change
+                # object is already used by Items and hence BuildSets etc. and
+                # we need to update those objects by reference so that they
+                # have the correct/new information and also avoid hitting
+                # gerrit multiple times.
+                self.connection._getChange(event.change_number,
+                                           event.patch_number,
+                                           refresh=True)
+
     def run(self):
         while True:
             if self._stopped:
@@ -298,12 +315,17 @@
         # This lets the user supply a list of change objects that are
         # still in use.  Anything in our cache that isn't in the supplied
         # list should be safe to remove from the cache.
-        remove = []
-        for key, change in self._change_cache.items():
-            if change not in relevant:
-                remove.append(key)
-        for key in remove:
-            del self._change_cache[key]
+        remove = {}
+        for change_number, patchsets in self._change_cache.items():
+            for patchset, change in patchsets.items():
+                if change not in relevant:
+                    remove.setdefault(change_number, [])
+                    remove[change_number].append(patchset)
+        for change_number, patchsets in remove.items():
+            for patchset in patchsets:
+                del self._change_cache[change_number][patchset]
+            if not self._change_cache[change_number]:
+                del self._change_cache[change_number]
 
     def getChange(self, event, refresh=False):
         if event.change_number:
@@ -349,21 +371,22 @@
         return change
 
     def _getChange(self, number, patchset, refresh=False, history=None):
-        key = '%s,%s' % (number, patchset)
-        change = self._change_cache.get(key)
+        change = self._change_cache.get(number, {}).get(patchset)
         if change and not refresh:
             return change
         if not change:
             change = GerritChange(None)
             change.number = number
             change.patchset = patchset
-        key = '%s,%s' % (change.number, change.patchset)
-        self._change_cache[key] = change
+        self._change_cache.setdefault(change.number, {})
+        self._change_cache[change.number][change.patchset] = change
         try:
             self._updateChange(change, history)
         except Exception:
-            if key in self._change_cache:
-                del self._change_cache[key]
+            if self._change_cache.get(change.number, {}).get(change.patchset):
+                del self._change_cache[change.number][change.patchset]
+                if not self._change_cache[change.number]:
+                    del self._change_cache[change.number]
             raise
         return change
 
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index de928db..daa029f 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -36,7 +36,7 @@
 import gear
 
 import zuul.merger.merger
-import zuul.ansible
+import zuul.ansible.logconfig
 from zuul.lib import commandsocket
 
 BUFFER_LINES_FOR_SYNTAX = 200
diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py
index 8282f86..efd86eb 100644
--- a/zuul/manager/__init__.py
+++ b/zuul/manager/__init__.py
@@ -444,7 +444,9 @@
             loader.createDynamicLayout(
                 item.pipeline.layout.tenant,
                 build_set.files,
-                include_config_projects=True)
+                include_config_projects=True,
+                scheduler=self.sched,
+                connections=self.sched.connections)
 
             # Then create the config a second time but without changes
             # to config repos so that we actually use this config.
@@ -540,6 +542,7 @@
     def _processOneItem(self, item, nnfi):
         changed = False
         ready = False
+        dequeued = False
         failing_reasons = []  # Reasons this item is failing
 
         item_ahead = item.item_ahead
@@ -594,8 +597,14 @@
                     item.reported_start = True
                 if item.current_build_set.unable_to_merge:
                     failing_reasons.append("it has a merge conflict")
+                    if (not item.live) and (not dequeued):
+                        self.dequeueItem(item)
+                        changed = dequeued = True
                 if item.current_build_set.config_error:
                     failing_reasons.append("it has an invalid configuration")
+                    if (not item.live) and (not dequeued):
+                        self.dequeueItem(item)
+                        changed = dequeued = True
                 if ready and self.provisionNodes(item):
                     changed = True
         if ready and self.executeJobs(item):
@@ -603,10 +612,10 @@
 
         if item.didAnyJobFail():
             failing_reasons.append("at least one job failed")
-        if (not item.live) and (not item.items_behind):
+        if (not item.live) and (not item.items_behind) and (not dequeued):
             failing_reasons.append("is a non-live item with no items behind")
             self.dequeueItem(item)
-            changed = True
+            changed = dequeued = True
         if ((not item_ahead) and item.areAllJobsComplete() and item.live):
             try:
                 self.reportItem(item)
@@ -618,7 +627,7 @@
                                   (item_behind.change, item))
                     self.cancelJobs(item_behind)
             self.dequeueItem(item)
-            changed = True
+            changed = dequeued = True
         elif not failing_reasons and item.live:
             nnfi = item
         item.current_build_set.failing_reasons = failing_reasons