Merge "Use {{ ansible_host }} for ssh-keyscan"
diff --git a/other-requirements.txt b/bindep.txt
similarity index 100%
rename from other-requirements.txt
rename to bindep.txt
diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py
index 628775d..335f987 100755
--- a/tests/test_scheduler.py
+++ b/tests/test_scheduler.py
@@ -4267,6 +4267,25 @@
         self.waitUntilSettled()
         self.assertEqual(self.history[-1].changes, '3,2 2,1 1,2')
 
+    def test_crd_check_unknown(self):
+        "Test unknown projects in independent pipeline"
+        self.init_repo("org/unknown")
+        A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
+        B = self.fake_gerrit.addFakeChange('org/unknown', 'master', 'D')
+        # A Depends-On: B
+        A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+            A.subject, B.data['id'])
+
+        # Make sure zuul has seen an event on B.
+        self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+
+        self.assertEqual(A.data['status'], 'NEW')
+        self.assertEqual(A.reported, 1)
+        self.assertEqual(B.data['status'], 'NEW')
+        self.assertEqual(B.reported, 0)
+
     def test_crd_cycle_join(self):
         "Test an updated change creates a cycle"
         A = self.fake_gerrit.addFakeChange('org/project2', 'master', 'A')
diff --git a/zuul/ansible/library/zuul_console.py b/zuul/ansible/library/zuul_console.py
index 1186235..e70dac8 100644
--- a/zuul/ansible/library/zuul_console.py
+++ b/zuul/ansible/library/zuul_console.py
@@ -60,28 +60,13 @@
 class Server(object):
     def __init__(self, path, port):
         self.path = path
-        s = None
-        for res in socket.getaddrinfo(None, port, socket.AF_UNSPEC,
-                                      socket.SOCK_STREAM, 0,
-                                      socket.AI_PASSIVE):
-            af, socktype, proto, canonname, sa = res
-            try:
-                s = socket.socket(af, socktype, proto)
-                s.setsockopt(socket.SOL_SOCKET,
-                             socket.SO_REUSEADDR, 1)
-            except socket.error:
-                s = None
-                continue
-            try:
-                s.bind(sa)
-                s.listen(1)
-            except socket.error:
-                s.close()
-                s = None
-                continue
-            break
-        if s is None:
-            sys.exit(1)
+
+        s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
+        s.setsockopt(socket.SOL_SOCKET,
+                     socket.SO_REUSEADDR, 1)
+        s.bind(('::', port))
+        s.listen(1)
+
         self.socket = s
 
     def accept(self):
diff --git a/zuul/launcher/ansiblelaunchserver.py b/zuul/launcher/ansiblelaunchserver.py
index 819e878..40d0d5a 100644
--- a/zuul/launcher/ansiblelaunchserver.py
+++ b/zuul/launcher/ansiblelaunchserver.py
@@ -68,11 +68,16 @@
     MASS_DO = 101
 
     def sendMassDo(self, functions):
-        data = b'\x00'.join([gear.convert_to_bytes(x) for x in functions])
+        names = [gear.convert_to_bytes(x) for x in functions]
+        data = b'\x00'.join(names)
+        new_function_dict = {}
+        for name in names:
+            new_function_dict[name] = gear.FunctionRecord(name)
         self.broadcast_lock.acquire()
         try:
             p = gear.Packet(gear.constants.REQ, self.MASS_DO, data)
             self.broadcast(p)
+            self.functions = new_function_dict
         finally:
             self.broadcast_lock.release()
 
@@ -547,6 +552,11 @@
 
 
 class NodeWorker(object):
+    retry_args = dict(register='task_result',
+                      until='task_result.rc == 0',
+                      retries=3,
+                      delay=30)
+
     def __init__(self, config, jobs, builds, sites, name, host,
                  description, labels, manager_name, zmq_send_queue,
                  termination_queue, keep_jobdir, callback_dir,
@@ -596,11 +606,6 @@
             self.username = config.get('launcher', 'username')
         else:
             self.username = 'zuul'
-        if self.config.has_option('launcher', 'register_labels'):
-            self.register_labels = config.getboolean('launcher',
-                                                     'register_labels')
-        else:
-            self.register_labels = True
         self.callback_dir = callback_dir
         self.library_dir = library_dir
         self.options = options
@@ -740,9 +745,8 @@
             if not matching_labels:
                 return ret
         ret.add('build:%s' % (job['name'],))
-        if self.register_labels:
-            for label in matching_labels:
-                ret.add('build:%s:%s' % (job['name'], label))
+        for label in matching_labels:
+            ret.add('build:%s:%s' % (job['name'], label))
         return ret
 
     def register(self):
@@ -961,6 +965,8 @@
                                 dest=os.path.join(scproot, '_zuul_ansible'))
                 task = dict(copy=copyargs,
                             delegate_to='127.0.0.1')
+                # This is a local copy and should not fail, so does
+                # not need a retry stanza.
                 tasks.append(task)
 
                 # Fetch the console log from the remote host.
@@ -982,10 +988,12 @@
             task = dict(synchronize=syncargs)
             if not scpfile.get('copy-after-failure'):
                 task['when'] = 'success'
+            task.update(self.retry_args)
             tasks.append(task)
 
             task = self._makeSCPTaskLocalAction(
                 site, scpfile, scproot, parameters)
+            task.update(self.retry_args)
             tasks.append(task)
         return tasks
 
@@ -1053,6 +1061,7 @@
             syncargs['rsync_opts'] = rsync_opts
         task = dict(synchronize=syncargs,
                     when='success')
+        task.update(self.retry_args)
         tasks.append(task)
         task = dict(shell='lftp -f %s' % ftpscript,
                     when='success',
@@ -1075,6 +1084,7 @@
             script.write('open %s\n' % site['host'])
             script.write('user %s %s\n' % (site['user'], site['pass']))
             script.write('mirror -R %s %s\n' % (ftpsource, ftptarget))
+        task.update(self.retry_args)
         tasks.append(task)
         return tasks
 
@@ -1258,6 +1268,9 @@
             config.write('gathering = explicit\n')
             config.write('callback_plugins = %s\n' % self.callback_dir)
             config.write('library = %s\n' % self.library_dir)
+            # bump the timeout because busy nodes may take more than
+            # 10s to respond
+            config.write('timeout = 30\n')
 
             config.write('[ssh_connection]\n')
             ssh_args = "-o ControlMaster=auto -o ControlPersist=60s " \
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index b974762..b52931e 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -625,12 +625,12 @@
     def setMerger(self, merger):
         self.merger = merger
 
-    def getProject(self, name, create_foreign=False):
+    def getProject(self, name):
         self.layout_lock.acquire()
         p = None
         try:
             p = self.layout.projects.get(name)
-            if p is None and create_foreign:
+            if p is None:
                 self.log.info("Registering foreign project: %s" % name)
                 p = Project(name, foreign=True)
                 self.layout.projects[name] = p
diff --git a/zuul/source/gerrit.py b/zuul/source/gerrit.py
index 73cf726..463f315 100644
--- a/zuul/source/gerrit.py
+++ b/zuul/source/gerrit.py
@@ -243,11 +243,7 @@
 
         if 'project' not in data:
             raise exceptions.ChangeNotFound(change.number, change.patchset)
-        # If updated changed came as a dependent on
-        # and its project is not defined,
-        # then create a 'foreign' project for it in layout
-        change.project = self.sched.getProject(data['project'],
-                                               create_foreign=bool(history))
+        change.project = self.sched.getProject(data['project'])
         change.branch = data['branch']
         change.url = data['url']
         max_ps = 0