Merge "Close command socket after sending stop" into feature/zuulv3
diff --git a/.zuul.yaml b/.zuul.yaml
index c4e3cf2..8522507 100644
--- a/.zuul.yaml
+++ b/.zuul.yaml
@@ -45,7 +45,7 @@
- tox-py35
post:
jobs:
- - publish-openstack-python-docs:
+ - publish-openstack-python-docs-infra:
vars:
afs_publisher_target: 'infra/zuul'
- publish-openstack-python-branch-tarball
diff --git a/etc/status/public_html/jquery.zuul.js b/etc/status/public_html/jquery.zuul.js
index c2cf279..1937cd5 100644
--- a/etc/status/public_html/jquery.zuul.js
+++ b/etc/status/public_html/jquery.zuul.js
@@ -554,14 +554,11 @@
}
$.each(changes, function (change_i, change) {
- // Only add a change when it has jobs
- if (change.jobs.length > 0) {
- var $change_box =
- format.change_with_status_tree(
- change, change_queue);
- $html.append($change_box);
- format.display_patchset($change_box);
- }
+ var $change_box =
+ format.change_with_status_tree(
+ change, change_queue);
+ $html.append($change_box);
+ format.display_patchset($change_box);
});
});
});
diff --git a/playbooks/zuul-stream/functional.yaml b/playbooks/zuul-stream/functional.yaml
index 9275037..6b67b05 100644
--- a/playbooks/zuul-stream/functional.yaml
+++ b/playbooks/zuul-stream/functional.yaml
@@ -22,8 +22,8 @@
- name: Validate output - shell task
shell: |
- egrep "^.*\| node1 \| link/loopback" job-output.txt
- egrep "^.*\| node2 \| link/loopback" job-output.txt
+ egrep "^.*\| node1 \| link/loopback" job-output.txt
+ egrep "^.*\| node2 \| link/loopback" job-output.txt
- name: Validate output - loop with items
shell: |
diff --git a/tests/base.py b/tests/base.py
index c49e1ce..fcc5e84 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -195,9 +195,16 @@
if not large:
for fn, content in files.items():
fn = os.path.join(path, fn)
- with open(fn, 'w') as f:
- f.write(content)
- repo.index.add([fn])
+ if content is None:
+ os.unlink(fn)
+ repo.index.remove([fn])
+ else:
+ d = os.path.dirname(fn)
+ if not os.path.exists(d):
+ os.makedirs(d)
+ with open(fn, 'w') as f:
+ f.write(content)
+ repo.index.add([fn])
else:
for fni in range(100):
fn = os.path.join(path, str(fni))
@@ -1188,7 +1195,7 @@
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
- self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ self.sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
self.sock.bind(('', 0))
self.port = self.sock.getsockname()[1]
self.wake_read, self.wake_write = os.pipe()
@@ -2138,6 +2145,7 @@
def getGithubConnection(driver, name, config):
con = FakeGithubConnection(driver, name, config,
upstream_root=self.upstream_root)
+ self.event_queues.append(con.event_queue)
setattr(self, 'fake_' + name, con)
return con
diff --git a/tests/fixtures/config/in-repo-join/git/common-config/playbooks/common-config-test.yaml b/tests/fixtures/config/in-repo-join/git/common-config/playbooks/common-config-test.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/in-repo-join/git/common-config/playbooks/common-config-test.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/in-repo-join/git/common-config/zuul.yaml b/tests/fixtures/config/in-repo-join/git/common-config/zuul.yaml
new file mode 100644
index 0000000..561fc39
--- /dev/null
+++ b/tests/fixtures/config/in-repo-join/git/common-config/zuul.yaml
@@ -0,0 +1,46 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ Verified: 1
+ failure:
+ gerrit:
+ Verified: -1
+
+- pipeline:
+ name: gate
+ manager: dependent
+ success-message: Build succeeded (tenant-one-gate).
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - Approved: 1
+ success:
+ gerrit:
+ Verified: 2
+ submit: true
+ failure:
+ gerrit:
+ Verified: -2
+ start:
+ gerrit:
+ Verified: 0
+ precedence: high
+
+- job:
+ name: base
+ parent: null
+
+- job:
+ name: common-config-test
+
+- project:
+ name: org/project
+ check:
+ jobs:
+ - common-config-test
diff --git a/tests/fixtures/config/in-repo-join/git/org_project/.zuul.yaml b/tests/fixtures/config/in-repo-join/git/org_project/.zuul.yaml
new file mode 100644
index 0000000..280342c
--- /dev/null
+++ b/tests/fixtures/config/in-repo-join/git/org_project/.zuul.yaml
@@ -0,0 +1,2 @@
+- job:
+ name: project-test1
diff --git a/tests/fixtures/config/in-repo-join/git/org_project/README b/tests/fixtures/config/in-repo-join/git/org_project/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/in-repo-join/git/org_project/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/in-repo-join/git/org_project/playbooks/project-test1.yaml b/tests/fixtures/config/in-repo-join/git/org_project/playbooks/project-test1.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/in-repo-join/git/org_project/playbooks/project-test1.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/in-repo-join/main.yaml b/tests/fixtures/config/in-repo-join/main.yaml
new file mode 100644
index 0000000..208e274
--- /dev/null
+++ b/tests/fixtures/config/in-repo-join/main.yaml
@@ -0,0 +1,8 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - common-config
+ untrusted-projects:
+ - org/project
diff --git a/tests/fixtures/config/in-repo/git/common-config/zuul.yaml b/tests/fixtures/config/in-repo/git/common-config/zuul.yaml
index ff4268b..5623467 100644
--- a/tests/fixtures/config/in-repo/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/in-repo/git/common-config/zuul.yaml
@@ -78,6 +78,8 @@
- project:
name: common-config
+ check:
+ jobs: []
tenant-one-gate:
jobs:
- common-config-test
diff --git a/tests/fixtures/config/in-repo/git/org_project/.zuul.yaml b/tests/fixtures/config/in-repo/git/org_project/.zuul.yaml
index 60cd434..e1c27bb 100644
--- a/tests/fixtures/config/in-repo/git/org_project/.zuul.yaml
+++ b/tests/fixtures/config/in-repo/git/org_project/.zuul.yaml
@@ -3,6 +3,8 @@
- project:
name: org/project
+ check:
+ jobs: []
tenant-one-gate:
jobs:
- project-test1
diff --git a/tests/unit/test_github_driver.py b/tests/unit/test_github_driver.py
index a088236..ebb5e1c 100644
--- a/tests/unit/test_github_driver.py
+++ b/tests/unit/test_github_driver.py
@@ -17,6 +17,7 @@
from testtools.matchers import MatchesRegex, StartsWith
import urllib
import time
+from unittest import skip
import git
@@ -685,6 +686,8 @@
# New timestamp should be greater than the old timestamp
self.assertLess(old, new)
+ # TODO(jlk): Make this a more generic test for unknown project
+ @skip("Skipped for rewrite of webhook handler")
@simple_layout('layouts/basic-github.yaml', driver='github')
def test_ping_event(self):
# Test valid ping
diff --git a/tests/unit/test_log_streamer.py b/tests/unit/test_log_streamer.py
index f47a8c8..03d3563 100644
--- a/tests/unit/test_log_streamer.py
+++ b/tests/unit/test_log_streamer.py
@@ -22,6 +22,7 @@
import os.path
import socket
import tempfile
+import testtools
import threading
import time
@@ -46,16 +47,13 @@
streamer = self.startStreamer(port)
self.addCleanup(streamer.stop)
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.addCleanup(s.close)
- self.assertEqual(0, s.connect_ex((self.host, port)))
+ s = socket.create_connection((self.host, port))
s.close()
streamer.stop()
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.addCleanup(s.close)
- self.assertNotEqual(0, s.connect_ex((self.host, port)))
+ with testtools.ExpectedException(ConnectionRefusedError):
+ s = socket.create_connection((self.host, port))
s.close()
@@ -80,8 +78,7 @@
root = tempfile.gettempdir()
self.streamer = zuul.lib.log_streamer.LogStreamer(None, self.host,
port, root)
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- s.connect((self.host, port))
+ s = socket.create_connection((self.host, port))
self.addCleanup(s.close)
req = '%s\n' % build_uuid
@@ -237,8 +234,11 @@
self.addCleanup(web_server.stop)
# Wait until web server is started
- with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
- while s.connect_ex((self.host, 9000)):
+ while True:
+ try:
+ with socket.create_connection((self.host, 9000)):
+ break
+ except ConnectionRefusedError:
time.sleep(0.1)
# Start a thread with the websocket client
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 9d695aa..2b27b0e 100755
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -371,55 +371,6 @@
dict(name='project-test1', result='SUCCESS', changes='1,2'),
dict(name='project-test2', result='SUCCESS', changes='1,2')])
- def test_dynamic_dependent_pipeline(self):
- # Test dynamically adding a project to a
- # dependent pipeline for the first time
- self.executor_server.hold_jobs_in_build = True
-
- tenant = self.sched.abide.tenants.get('tenant-one')
- gate_pipeline = tenant.layout.pipelines['gate']
-
- in_repo_conf = textwrap.dedent(
- """
- - job:
- name: project-test1
-
- - job:
- name: project-test2
-
- - project:
- name: org/project
- gate:
- jobs:
- - project-test2
- """)
-
- in_repo_playbook = textwrap.dedent(
- """
- - hosts: all
- tasks: []
- """)
-
- file_dict = {'.zuul.yaml': in_repo_conf,
- 'playbooks/project-test2.yaml': in_repo_playbook}
- A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
- files=file_dict)
- A.addApproval('Approved', 1)
- self.fake_gerrit.addEvent(A.addApproval('Code-Review', 2))
- self.waitUntilSettled()
-
- items = gate_pipeline.getAllItems()
- self.assertEqual(items[0].change.number, '1')
- self.assertEqual(items[0].change.patchset, '1')
- self.assertTrue(items[0].live)
-
- self.executor_server.hold_jobs_in_build = False
- self.executor_server.release()
- self.waitUntilSettled()
-
- # Make sure the dynamic queue got cleaned up
- self.assertEqual(gate_pipeline.queues, [])
-
def test_in_repo_branch(self):
in_repo_conf = textwrap.dedent(
"""
@@ -544,6 +495,84 @@
dict(name='project-test2', result='SUCCESS', changes='1,1 2,1'),
])
+ def test_yaml_list_error(self):
+ in_repo_conf = textwrap.dedent(
+ """
+ job: foo
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf}
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+ files=file_dict)
+ A.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(A.reported, 1,
+ "A should report failure")
+ self.assertIn('not a list', A.messages[0],
+ "A should have a syntax error reported")
+
+ def test_yaml_dict_error(self):
+ in_repo_conf = textwrap.dedent(
+ """
+ - job
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf}
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+ files=file_dict)
+ A.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(A.reported, 1,
+ "A should report failure")
+ self.assertIn('not a dictionary', A.messages[0],
+ "A should have a syntax error reported")
+
+ def test_yaml_key_error(self):
+ in_repo_conf = textwrap.dedent(
+ """
+ - job:
+ name: project-test2
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf}
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+ files=file_dict)
+ A.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(A.reported, 1,
+ "A should report failure")
+ self.assertIn('has more than one key', A.messages[0],
+ "A should have a syntax error reported")
+
+ def test_yaml_unknown_error(self):
+ in_repo_conf = textwrap.dedent(
+ """
+ - foobar:
+ foo: bar
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf}
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+ files=file_dict)
+ A.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(A.reported, 1,
+ "A should report failure")
+ self.assertIn('not recognized', A.messages[0],
+ "A should have a syntax error reported")
+
def test_untrusted_syntax_error(self):
in_repo_conf = textwrap.dedent(
"""
@@ -775,6 +804,194 @@
# isn't this will raise an exception.
tenant.layout.getJob('project-test2')
+ def test_pipeline_error(self):
+ with open(os.path.join(FIXTURE_DIR,
+ 'config/in-repo/git/',
+ 'common-config/zuul.yaml')) as f:
+ base_common_config = f.read()
+
+ in_repo_conf_A = textwrap.dedent(
+ """
+ - pipeline:
+ name: periodic
+ foo: error
+ """)
+
+ file_dict = {'zuul.yaml': None,
+ 'zuul.d/main.yaml': base_common_config,
+ 'zuul.d/test1.yaml': in_repo_conf_A}
+ A = self.fake_gerrit.addFakeChange('common-config', 'master', 'A',
+ files=file_dict)
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertEqual(A.reported, 1,
+ "A should report failure")
+ self.assertIn('syntax error',
+ A.messages[0],
+ "A should have an error reported")
+
+ def test_change_series_error(self):
+ with open(os.path.join(FIXTURE_DIR,
+ 'config/in-repo/git/',
+ 'common-config/zuul.yaml')) as f:
+ base_common_config = f.read()
+
+ in_repo_conf_A = textwrap.dedent(
+ """
+ - pipeline:
+ name: periodic
+ foo: error
+ """)
+
+ file_dict = {'zuul.yaml': None,
+ 'zuul.d/main.yaml': base_common_config,
+ 'zuul.d/test1.yaml': in_repo_conf_A}
+ A = self.fake_gerrit.addFakeChange('common-config', 'master', 'A',
+ files=file_dict)
+
+ in_repo_conf_B = textwrap.dedent(
+ """
+ - job:
+ name: project-test2
+ foo: error
+ """)
+
+ file_dict = {'zuul.yaml': None,
+ 'zuul.d/main.yaml': base_common_config,
+ 'zuul.d/test1.yaml': in_repo_conf_A,
+ 'zuul.d/test2.yaml': in_repo_conf_B}
+ B = self.fake_gerrit.addFakeChange('common-config', 'master', 'B',
+ files=file_dict)
+ B.setDependsOn(A, 1)
+ C = self.fake_gerrit.addFakeChange('common-config', 'master', 'C')
+ C.setDependsOn(B, 1)
+ self.fake_gerrit.addEvent(C.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.assertEqual(C.reported, 1,
+ "C should report failure")
+ self.assertIn('depends on a change that failed to merge',
+ C.messages[0],
+ "C should have an error reported")
+
+
+class TestInRepoJoin(ZuulTestCase):
+ # In this config, org/project is not a member of any pipelines, so
+ # that we may test the changes that cause it to join them.
+
+ tenant_config_file = 'config/in-repo-join/main.yaml'
+
+ def test_dynamic_dependent_pipeline(self):
+ # Test dynamically adding a project to a
+ # dependent pipeline for the first time
+ self.executor_server.hold_jobs_in_build = True
+
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ gate_pipeline = tenant.layout.pipelines['gate']
+
+ in_repo_conf = textwrap.dedent(
+ """
+ - job:
+ name: project-test1
+
+ - job:
+ name: project-test2
+
+ - project:
+ name: org/project
+ gate:
+ jobs:
+ - project-test2
+ """)
+
+ in_repo_playbook = textwrap.dedent(
+ """
+ - hosts: all
+ tasks: []
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf,
+ 'playbooks/project-test2.yaml': in_repo_playbook}
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+ files=file_dict)
+ A.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+ self.waitUntilSettled()
+
+ items = gate_pipeline.getAllItems()
+ self.assertEqual(items[0].change.number, '1')
+ self.assertEqual(items[0].change.patchset, '1')
+ self.assertTrue(items[0].live)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ # Make sure the dynamic queue got cleaned up
+ self.assertEqual(gate_pipeline.queues, [])
+
+ def test_dynamic_dependent_pipeline_failure(self):
+ # Test that a change behind a failing change adding a project
+ # to a dependent pipeline is dequeued.
+ self.executor_server.hold_jobs_in_build = True
+
+ in_repo_conf = textwrap.dedent(
+ """
+ - job:
+ name: project-test1
+
+ - project:
+ name: org/project
+ gate:
+ jobs:
+ - project-test1
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf}
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+ files=file_dict)
+ self.executor_server.failJob('project-test1', A)
+ A.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
+ self.waitUntilSettled()
+
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ B.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
+ self.waitUntilSettled()
+
+ self.orderedRelease()
+ self.waitUntilSettled()
+ self.assertEqual(A.reported, 2,
+ "A should report start and failure")
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.reported, 1,
+ "B should report start")
+ self.assertHistory([
+ dict(name='project-test1', result='FAILURE', changes='1,1'),
+ dict(name='project-test1', result='ABORTED', changes='1,1 2,1'),
+ ], ordered=False)
+
+ def test_dynamic_dependent_pipeline_absent(self):
+ # Test that a series of dependent changes don't report merge
+ # failures to a pipeline they aren't in.
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ B.setDependsOn(A, 1)
+
+ A.addApproval('Code-Review', 2)
+ A.addApproval('Approved', 1)
+ B.addApproval('Code-Review', 2)
+ self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
+ self.waitUntilSettled()
+ self.assertEqual(A.reported, 0,
+ "A should not report")
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(B.reported, 0,
+ "B should not report")
+ self.assertEqual(B.data['status'], 'NEW')
+ self.assertHistory([])
+
class TestAnsible(AnsibleZuulTestCase):
# A temporary class to hold new tests while others are disabled
diff --git a/tools/zuul-cloner-shim.py b/tools/zuul-cloner-shim.py
new file mode 100755
index 0000000..3d1b2ae
--- /dev/null
+++ b/tools/zuul-cloner-shim.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+# Copyright 2017 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import argparse
+import os
+import re
+import sys
+import yaml
+
+from collections import defaultdict
+from collections import OrderedDict
+
+REPO_SRC_DIR = "~zuul/src/git.openstack.org/"
+
+
+# Class copied from zuul/lib/conemapper.py with minor logging changes
+class CloneMapper(object):
+
+ def __init__(self, clonemap, projects):
+ self.clonemap = clonemap
+ self.projects = projects
+
+ def expand(self, workspace):
+ print("Workspace path set to: %s" % workspace)
+
+ is_valid = True
+ ret = OrderedDict()
+ errors = []
+ for project in self.projects:
+ dests = []
+ for mapping in self.clonemap:
+ if re.match(r'^%s$' % mapping['name'], project):
+ # Might be matched more than one time
+ dests.append(
+ re.sub(mapping['name'], mapping['dest'], project))
+
+ if len(dests) > 1:
+ errors.append(
+ "Duplicate destinations for %s: %s." % (project, dests))
+ is_valid = False
+ elif len(dests) == 0:
+ print("Using %s as destination (unmatched)" % project)
+ ret[project] = [project]
+ else:
+ ret[project] = dests
+
+ if not is_valid:
+ raise Exception("Expansion errors: %s" % errors)
+
+ print("Mapping projects to workspace...")
+ for project, dest in ret.items():
+ dest = os.path.normpath(os.path.join(workspace, dest[0]))
+ ret[project] = dest
+ print(" %s -> %s" % (project, dest))
+
+ print("Checking overlap in destination directories...")
+ check = defaultdict(list)
+ for project, dest in ret.items():
+ check[dest].append(project)
+
+ dupes = dict((d, p) for (d, p) in check.items() if len(p) > 1)
+ if dupes:
+ raise Exception("Some projects share the same destination: %s",
+ dupes)
+
+ print("Expansion completed.")
+ return ret
+
+
+def parseArgs():
+ ZUUL_ENV_SUFFIXES = ('branch', 'ref', 'url', 'project', 'newrev')
+
+ parser = argparse.ArgumentParser()
+
+ # Ignored arguments
+ parser.add_argument('-v', '--verbose', dest='verbose',
+ action='store_true', help='IGNORED')
+ parser.add_argument('--color', dest='color', action='store_true',
+ help='IGNORED')
+ parser.add_argument('--cache-dir', dest='cache_dir', help='IGNORED')
+ parser.add_argument('git_base_url', help='IGNORED')
+ parser.add_argument('--branch', help='IGNORED')
+ parser.add_argument('--project-branch', nargs=1, action='append',
+ metavar='PROJECT=BRANCH', help='IGNORED')
+ for zuul_suffix in ZUUL_ENV_SUFFIXES:
+ env_name = 'ZUUL_%s' % zuul_suffix.upper()
+ parser.add_argument(
+ '--zuul-%s' % zuul_suffix, metavar='$' + env_name,
+ help='IGNORED'
+ )
+
+ # Active arguments
+ parser.add_argument('-m', '--map', dest='clone_map_file',
+ help='specify clone map file')
+ parser.add_argument('--workspace', dest='workspace',
+ default=os.getcwd(),
+ help='where to clone repositories too')
+ parser.add_argument('projects', nargs='+',
+ help='list of Gerrit projects to clone')
+
+ return parser.parse_args()
+
+
+def readCloneMap(clone_map):
+ clone_map_file = os.path.expanduser(clone_map)
+ if not os.path.exists(clone_map_file):
+ raise Exception("Unable to read clone map file at %s." %
+ clone_map_file)
+ clone_map_file = open(clone_map_file)
+ clone_map = yaml.safe_load(clone_map_file).get('clonemap')
+ return clone_map
+
+
+def main():
+ args = parseArgs()
+
+ clone_map = []
+ if args.clone_map_file:
+ clone_map = readCloneMap(args.clone_map_file)
+
+ mapper = CloneMapper(clone_map, args.projects)
+ dests = mapper.expand(workspace=args.workspace)
+
+ for project in args.projects:
+ src = os.path.join(os.path.expanduser(REPO_SRC_DIR), project)
+ dst = dests[project]
+
+ # Remove the tail end of the path (since the copy operation will
+ # automatically create that)
+ d = dst.rstrip('/')
+ d, base = os.path.split(d)
+ if not os.path.exists(d):
+ print("Creating %s" % d)
+ os.makedirs(d)
+
+ # Create hard link copy of the source directory
+ cmd = "cp -al %s %s" % (src, dst)
+ print("%s" % cmd)
+ if os.system(cmd):
+ print("Error executing: %s" % cmd)
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/zuul/ansible/callback/zuul_stream.py b/zuul/ansible/callback/zuul_stream.py
index 6aec86b..db58951 100644
--- a/zuul/ansible/callback/zuul_stream.py
+++ b/zuul/ansible/callback/zuul_stream.py
@@ -122,6 +122,7 @@
self._logger = logging.getLogger('zuul.executor.ansible')
def _log(self, msg, ts=None, job=True, executor=False, debug=False):
+ msg = msg.rstrip()
if job:
now = ts or datetime.datetime.now()
self._logger.info("{now} | {msg}".format(now=now, msg=msg))
@@ -154,7 +155,6 @@
pass
else:
ts, ln = line.split(' | ', 1)
- ln = ln.strip()
self._log("%s | %s " % (host, ln), ts=ts)
@@ -201,10 +201,11 @@
msg = u"PLAY [{name}]".format(name=name)
self._log(msg)
- # Log an extra blank line to get space after each play
- self._log("")
def v2_playbook_on_task_start(self, task, is_conditional):
+ # Log an extra blank line to get space before each task
+ self._log("")
+
self._task = task
if self._play.strategy != 'free':
@@ -275,7 +276,7 @@
if is_localhost:
for line in stdout_lines:
hostname = self._get_hostname(result)
- self._log("%s | %s " % (hostname, line.strip()))
+ self._log("%s | %s " % (hostname, line))
def v2_runner_on_failed(self, result, ignore_errors=False):
result_dict = dict(result._result)
@@ -302,8 +303,6 @@
result=result, status='ERROR', result_dict=result_dict)
if ignore_errors:
self._log_message(result, "Ignoring Errors", status="ERROR")
- # Log an extra blank line to get space after each task
- self._log("")
def v2_runner_on_skipped(self, result):
if result._task.loop:
@@ -314,8 +313,6 @@
if reason:
# No reason means it's an item, which we'll log differently
self._log_message(result, status='skipping', msg=reason)
- # Log an extra blank line to get space after each skip
- self._log("")
def v2_runner_item_on_skipped(self, result):
reason = result._result.get('skip_reason')
@@ -399,8 +396,6 @@
result,
"Runtime: {delta} Start: {start} End: {end}".format(
**result_dict))
- # Log an extra blank line to get space after each task
- self._log("")
def v2_runner_item_on_ok(self, result):
result_dict = dict(result._result)
@@ -462,10 +457,11 @@
if self._deferred_result:
self._process_deferred(result)
- # Log an extra blank line to get space after each task
- self._log("")
def v2_playbook_on_stats(self, stats):
+ # Add a spacer line before the stats so that there will be a line
+ # between the last task and the recap
+ self._log("")
self._log("PLAY RECAP")
@@ -554,7 +550,7 @@
msg = result_dict['msg']
result_dict = None
if msg:
- msg_lines = msg.strip().split('\n')
+ msg_lines = msg.rstrip().split('\n')
if len(msg_lines) > 1:
self._log("{host} | {status}:".format(
host=hostname, status=status))
diff --git a/zuul/ansible/library/command.py b/zuul/ansible/library/command.py
index f701b48..0fc6129 100644
--- a/zuul/ansible/library/command.py
+++ b/zuul/ansible/library/command.py
@@ -159,9 +159,14 @@
# Jenkins format but with microsecond resolution instead of
# millisecond. It is kept so log parsing/formatting remains
# consistent.
- ts = datetime.datetime.now()
- outln = '%s | %s' % (ts, ln)
- self.logfile.write(outln.encode('utf-8'))
+ ts = str(datetime.datetime.now()).encode('utf-8')
+ if not isinstance(ln, bytes):
+ try:
+ ln = ln.encode('utf-8')
+ except Exception:
+ ln = repr(ln).encode('utf-8') + b'\n'
+ outln = b'%s | %s' % (ts, ln)
+ self.logfile.write(outln)
def follow(fd, log_uuid):
diff --git a/zuul/ansible/logconfig.py b/zuul/ansible/logconfig.py
index 7c3507b..7ef43a8 100644
--- a/zuul/ansible/logconfig.py
+++ b/zuul/ansible/logconfig.py
@@ -13,6 +13,7 @@
# under the License.
import abc
+import copy
import logging.config
import json
import os
@@ -161,14 +162,15 @@
logging.config.dictConfig(self._config)
def writeJson(self, filename: str):
- open(filename, 'w').write(json.dumps(self._config, indent=2))
+ with open(filename, 'w') as f:
+ f.write(json.dumps(self._config, indent=2))
class JobLoggingConfig(DictLoggingConfig):
def __init__(self, config=None, job_output_file=None):
if not config:
- config = _DEFAULT_JOB_LOGGING_CONFIG.copy()
+ config = copy.deepcopy(_DEFAULT_JOB_LOGGING_CONFIG)
super(JobLoggingConfig, self).__init__(config=config)
if job_output_file:
self.job_output_file = job_output_file
@@ -190,7 +192,7 @@
def __init__(self, config=None, server=None):
if not config:
- config = _DEFAULT_SERVER_LOGGING_CONFIG.copy()
+ config = copy.deepcopy(_DEFAULT_SERVER_LOGGING_CONFIG)
super(ServerLoggingConfig, self).__init__(config=config)
if server:
self.server = server
@@ -206,7 +208,7 @@
# config above because we're templating out the filename. Also, we
# only want to add the handler if we're actually going to use it.
for name, handler in _DEFAULT_SERVER_FILE_HANDLERS.items():
- server_handler = handler.copy()
+ server_handler = copy.deepcopy(handler)
server_handler['filename'] = server_handler['filename'].format(
server=server)
self._config['handlers'][name] = server_handler
diff --git a/zuul/configloader.py b/zuul/configloader.py
index 94c0d2a..a923fca 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -120,6 +120,30 @@
@contextmanager
+def early_configuration_exceptions(context):
+ try:
+ yield
+ except ConfigurationSyntaxError:
+ raise
+ except Exception as e:
+ intro = textwrap.fill(textwrap.dedent("""\
+ Zuul encountered a syntax error while parsing its configuration in the
+ repo {repo} on branch {branch}. The error was:""".format(
+ repo=context.project.name,
+ branch=context.branch,
+ )))
+
+ m = textwrap.dedent("""\
+ {intro}
+
+ {error}""")
+
+ m = m.format(intro=intro,
+ error=indent(str(e)))
+ raise ConfigurationSyntaxError(m)
+
+
+@contextmanager
def configuration_exceptions(stanza, conf):
try:
yield
@@ -1367,13 +1391,15 @@
def _parseConfigProjectLayout(data, source_context):
# This is the top-level configuration for a tenant.
config = model.UnparsedTenantConfig()
- config.extend(safe_load_yaml(data, source_context))
+ with early_configuration_exceptions(source_context):
+ config.extend(safe_load_yaml(data, source_context))
return config
@staticmethod
def _parseUntrustedProjectLayout(data, source_context):
config = model.UnparsedTenantConfig()
- config.extend(safe_load_yaml(data, source_context))
+ with early_configuration_exceptions(source_context):
+ config.extend(safe_load_yaml(data, source_context))
if config.pipelines:
with configuration_exceptions('pipeline', config.pipelines[0]):
raise PipelineNotPermittedError()
@@ -1471,6 +1497,8 @@
@staticmethod
def _parseLayout(base, tenant, data, scheduler, connections):
+ # Don't call this method from dynamic reconfiguration because
+ # it interacts with drivers and connections.
layout = model.Layout(tenant)
TenantParser._parseLayoutItems(layout, tenant, data,
@@ -1582,7 +1610,8 @@
config.extend(incdata)
def createDynamicLayout(self, tenant, files,
- include_config_projects=False):
+ include_config_projects=False,
+ scheduler=None, connections=None):
if include_config_projects:
config = model.UnparsedTenantConfig()
for project in tenant.config_projects:
@@ -1594,22 +1623,29 @@
self._loadDynamicProjectData(config, project, files, False, tenant)
layout = model.Layout(tenant)
- # NOTE: the actual pipeline objects (complete with queues and
- # enqueued items) are copied by reference here. This allows
- # our shadow dynamic configuration to continue to interact
- # with all the other changes, each of which may have their own
- # version of reality. We do not support creating, updating,
- # or deleting pipelines in dynamic layout changes.
- layout.pipelines = tenant.layout.pipelines
+ if not include_config_projects:
+ # NOTE: the actual pipeline objects (complete with queues
+ # and enqueued items) are copied by reference here. This
+ # allows our shadow dynamic configuration to continue to
+ # interact with all the other changes, each of which may
+ # have their own version of reality. We do not support
+ # creating, updating, or deleting pipelines in dynamic
+ # layout changes.
+ layout.pipelines = tenant.layout.pipelines
- # NOTE: the semaphore definitions are copied from the static layout
- # here. For semaphores there should be no per patch max value but
- # exactly one value at any time. So we do not support dynamic semaphore
- # configuration changes.
- layout.semaphores = tenant.layout.semaphores
+ # NOTE: the semaphore definitions are copied from the
+ # static layout here. For semaphores there should be no
+ # per patch max value but exactly one value at any
+ # time. So we do not support dynamic semaphore
+ # configuration changes.
+ layout.semaphores = tenant.layout.semaphores
+ skip_pipelines = skip_semaphores = True
+ else:
+ skip_pipelines = skip_semaphores = False
- TenantParser._parseLayoutItems(layout, tenant, config, None, None,
- skip_pipelines=True,
- skip_semaphores=True)
+ TenantParser._parseLayoutItems(layout, tenant, config,
+ scheduler, connections,
+ skip_pipelines=skip_pipelines,
+ skip_semaphores=skip_semaphores)
return layout
diff --git a/zuul/driver/gerrit/gerritconnection.py b/zuul/driver/gerrit/gerritconnection.py
index 35137c7..ecf5f94 100644
--- a/zuul/driver/gerrit/gerritconnection.py
+++ b/zuul/driver/gerrit/gerritconnection.py
@@ -133,25 +133,42 @@
event.branch_deleted = True
event.branch = event.ref
- if event.change_number:
- # TODO(jhesketh): Check if the project exists?
- # and self.connection.sched.getProject(event.project_name):
-
- # Call _getChange for the side effect of updating the
- # cache. Note that this modifies Change objects outside
- # the main thread.
- # NOTE(jhesketh): Ideally we'd just remove the change from the
- # cache to denote that it needs updating. However the change
- # object is already used by Items and hence BuildSets etc. and
- # we need to update those objects by reference so that they have
- # the correct/new information and also avoid hitting gerrit
- # multiple times.
- self.connection._getChange(event.change_number,
- event.patch_number,
- refresh=True)
+ self._getChange(event)
self.connection.logEvent(event)
self.connection.sched.addEvent(event)
+ def _getChange(self, event):
+ # Grab the change if we are managing the project or if it exists in the
+ # cache as it may be a dependency
+ if event.change_number:
+ refresh = True
+ if event.change_number not in self.connection._change_cache:
+ refresh = False
+ for tenant in self.connection.sched.abide.tenants.values():
+ # TODO(fungi): it would be better to have some simple means
+ # of inferring the hostname from the connection, or at
+ # least split this into separate method arguments, rather
+ # than assembling and passing in a baked string.
+ if (None, None) != tenant.getProject('/'.join((
+ self.connection.canonical_hostname,
+ event.project_name))):
+ refresh = True
+ break
+
+ if refresh:
+ # Call _getChange for the side effect of updating the
+ # cache. Note that this modifies Change objects outside
+ # the main thread.
+ # NOTE(jhesketh): Ideally we'd just remove the change from the
+ # cache to denote that it needs updating. However the change
+ # object is already used by Items and hence BuildSets etc. and
+ # we need to update those objects by reference so that they
+ # have the correct/new information and also avoid hitting
+ # gerrit multiple times.
+ self.connection._getChange(event.change_number,
+ event.patch_number,
+ refresh=True)
+
def run(self):
while True:
if self._stopped:
@@ -298,12 +315,17 @@
# This lets the user supply a list of change objects that are
# still in use. Anything in our cache that isn't in the supplied
# list should be safe to remove from the cache.
- remove = []
- for key, change in self._change_cache.items():
- if change not in relevant:
- remove.append(key)
- for key in remove:
- del self._change_cache[key]
+ remove = {}
+ for change_number, patchsets in self._change_cache.items():
+ for patchset, change in patchsets.items():
+ if change not in relevant:
+ remove.setdefault(change_number, [])
+ remove[change_number].append(patchset)
+ for change_number, patchsets in remove.items():
+ for patchset in patchsets:
+ del self._change_cache[change_number][patchset]
+ if not self._change_cache[change_number]:
+ del self._change_cache[change_number]
def getChange(self, event, refresh=False):
if event.change_number:
@@ -349,21 +371,22 @@
return change
def _getChange(self, number, patchset, refresh=False, history=None):
- key = '%s,%s' % (number, patchset)
- change = self._change_cache.get(key)
+ change = self._change_cache.get(number, {}).get(patchset)
if change and not refresh:
return change
if not change:
change = GerritChange(None)
change.number = number
change.patchset = patchset
- key = '%s,%s' % (change.number, change.patchset)
- self._change_cache[key] = change
+ self._change_cache.setdefault(change.number, {})
+ self._change_cache[change.number][change.patchset] = change
try:
self._updateChange(change, history)
except Exception:
- if key in self._change_cache:
- del self._change_cache[key]
+ if self._change_cache.get(change.number, {}).get(change.patchset):
+ del self._change_cache[change.number][change.patchset]
+ if not self._change_cache[change.number]:
+ del self._change_cache[change.number]
raise
return change
diff --git a/zuul/driver/github/githubconnection.py b/zuul/driver/github/githubconnection.py
index 0ce6ef5..fca36c8 100644
--- a/zuul/driver/github/githubconnection.py
+++ b/zuul/driver/github/githubconnection.py
@@ -17,6 +17,8 @@
import logging
import hmac
import hashlib
+import queue
+import threading
import time
import re
@@ -80,11 +82,10 @@
delivery=delivery))
self._validate_signature(request)
+ # TODO(jlk): Validate project in the request is a project we know
try:
self.__dispatch_event(request)
- except webob.exc.HTTPNotFound:
- raise
except:
self.log.exception("Exception handling Github event:")
@@ -98,20 +99,58 @@
'header.')
try:
- method = getattr(self, '_event_' + event)
- except AttributeError:
- message = "Unhandled X-Github-Event: {0}".format(event)
- self.log.debug(message)
- # Returns empty 200 on unhandled events
- raise webob.exc.HTTPOk()
-
- try:
json_body = request.json_body
+ self.connection.addEvent(json_body, event)
except:
message = 'Exception deserializing JSON body'
self.log.exception(message)
raise webob.exc.HTTPBadRequest(message)
+ def _validate_signature(self, request):
+ secret = self.connection.connection_config.get('webhook_token', None)
+ if secret is None:
+ raise RuntimeError("webhook_token is required")
+
+ body = request.body
+ try:
+ request_signature = request.headers['X-Hub-Signature']
+ except KeyError:
+ raise webob.exc.HTTPUnauthorized(
+ 'Please specify a X-Hub-Signature header with secret.')
+
+ payload_signature = _sign_request(body, secret)
+
+ self.log.debug("Payload Signature: {0}".format(str(payload_signature)))
+ self.log.debug("Request Signature: {0}".format(str(request_signature)))
+ if not hmac.compare_digest(
+ str(payload_signature), str(request_signature)):
+ raise webob.exc.HTTPUnauthorized(
+ 'Request signature does not match calculated payload '
+ 'signature. Check that secret is correct.')
+
+ return True
+
+
+class GithubEventConnector(threading.Thread):
+ """Move events from GitHub into the scheduler"""
+
+ log = logging.getLogger("zuul.GithubEventConnector")
+
+ def __init__(self, connection):
+ super(GithubEventConnector, self).__init__()
+ self.daemon = True
+ self.connection = connection
+ self._stopped = False
+
+ def stop(self):
+ self._stopped = True
+ self.connection.addEvent(None)
+
+ def _handleEvent(self):
+ json_body, event_type = self.connection.getEvent()
+ if self._stopped:
+ return
+
# If there's any installation mapping information in the body then
# update the project mapping before any requests are made.
installation_id = json_body.get('installation', {}).get('id')
@@ -127,9 +166,17 @@
self.connection.installation_map[project_name] = installation_id
try:
+ method = getattr(self, '_event_' + event_type)
+ except AttributeError:
+ # TODO(jlk): Gracefully handle event types we don't care about
+ # instead of logging an exception.
+ message = "Unhandled X-Github-Event: {0}".format(event_type)
+ self.log.debug(message)
+ # Returns empty on unhandled events
+ return
+
+ try:
event = method(json_body)
- except webob.exc.HTTPNotFound:
- raise
except:
self.log.exception('Exception when handling event:')
event = None
@@ -240,14 +287,6 @@
event.action = body.get('action')
return event
- def _event_ping(self, body):
- project_name = body['repository']['full_name']
- if not self.connection.getProject(project_name):
- self.log.warning("Ping received for unknown project %s" %
- project_name)
- raise webob.exc.HTTPNotFound("Sorry, this project is not "
- "registered")
-
def _event_status(self, body):
action = body.get('action')
if action == 'pending':
@@ -277,30 +316,6 @@
(number, project_name))
return pr_body
- def _validate_signature(self, request):
- secret = self.connection.connection_config.get('webhook_token', None)
- if secret is None:
- raise RuntimeError("webhook_token is required")
-
- body = request.body
- try:
- request_signature = request.headers['X-Hub-Signature']
- except KeyError:
- raise webob.exc.HTTPUnauthorized(
- 'Please specify a X-Hub-Signature header with secret.')
-
- payload_signature = _sign_request(body, secret)
-
- self.log.debug("Payload Signature: {0}".format(str(payload_signature)))
- self.log.debug("Request Signature: {0}".format(str(request_signature)))
- if not hmac.compare_digest(
- str(payload_signature), str(request_signature)):
- raise webob.exc.HTTPUnauthorized(
- 'Request signature does not match calculated payload '
- 'signature. Check that secret is correct.')
-
- return True
-
def _pull_request_to_event(self, pr_body):
event = GithubTriggerEvent()
event.trigger_name = 'github'
@@ -327,6 +342,17 @@
if login:
return self.connection.getUser(login)
+ def run(self):
+ while True:
+ if self._stopped:
+ return
+ try:
+ self._handleEvent()
+ except:
+ self.log.exception("Exception moving GitHub event:")
+ finally:
+ self.connection.eventDone()
+
class GithubUser(collections.Mapping):
log = logging.getLogger('zuul.GithubUser')
@@ -376,6 +402,7 @@
self.canonical_hostname = self.connection_config.get(
'canonical_hostname', self.server)
self.source = driver.getSource(self)
+ self.event_queue = queue.Queue()
# ssl verification must default to true
verify_ssl = self.connection_config.get('verify_ssl', 'true')
@@ -408,9 +435,20 @@
self.registerHttpHandler(self.payload_path,
webhook_listener.handle_request)
self._authenticateGithubAPI()
+ self._start_event_connector()
def onStop(self):
self.unregisterHttpHandler(self.payload_path)
+ self._stop_event_connector()
+
+ def _start_event_connector(self):
+ self.github_event_connector = GithubEventConnector(self)
+ self.github_event_connector.start()
+
+ def _stop_event_connector(self):
+ if self.github_event_connector:
+ self.github_event_connector.stop()
+ self.github_event_connector.join()
def _createGithubClient(self):
if self.server != 'github.com':
@@ -504,6 +542,15 @@
return token
+ def addEvent(self, data, event=None):
+ return self.event_queue.put((data, event))
+
+ def getEvent(self):
+ return self.event_queue.get()
+
+ def eventDone(self):
+ self.event_queue.task_done()
+
def getGithubClient(self,
project=None,
user_id=None,
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index de928db..daa029f 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -36,7 +36,7 @@
import gear
import zuul.merger.merger
-import zuul.ansible
+import zuul.ansible.logconfig
from zuul.lib import commandsocket
BUFFER_LINES_FOR_SYNTAX = 200
diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py
index 8282f86..98c7350 100644
--- a/zuul/manager/__init__.py
+++ b/zuul/manager/__init__.py
@@ -444,7 +444,9 @@
loader.createDynamicLayout(
item.pipeline.layout.tenant,
build_set.files,
- include_config_projects=True)
+ include_config_projects=True,
+ scheduler=self.sched,
+ connections=self.sched.connections)
# Then create the config a second time but without changes
# to config repos so that we actually use this config.
@@ -527,11 +529,12 @@
if not item.job_graph:
try:
+ self.log.debug("Freezing job graph for %s" % (item,))
item.freezeJobGraph()
except Exception as e:
# TODOv3(jeblair): nicify this exception as it will be reported
self.log.exception("Error freezing job graph for %s" %
- item)
+ (item,))
item.setConfigError("Unable to freeze job graph: %s" %
(str(e)))
return False
@@ -540,6 +543,7 @@
def _processOneItem(self, item, nnfi):
changed = False
ready = False
+ dequeued = False
failing_reasons = [] # Reasons this item is failing
item_ahead = item.item_ahead
@@ -594,8 +598,14 @@
item.reported_start = True
if item.current_build_set.unable_to_merge:
failing_reasons.append("it has a merge conflict")
+ if (not item.live) and (not dequeued):
+ self.dequeueItem(item)
+ changed = dequeued = True
if item.current_build_set.config_error:
failing_reasons.append("it has an invalid configuration")
+ if (not item.live) and (not dequeued):
+ self.dequeueItem(item)
+ changed = dequeued = True
if ready and self.provisionNodes(item):
changed = True
if ready and self.executeJobs(item):
@@ -603,10 +613,10 @@
if item.didAnyJobFail():
failing_reasons.append("at least one job failed")
- if (not item.live) and (not item.items_behind):
+ if (not item.live) and (not item.items_behind) and (not dequeued):
failing_reasons.append("is a non-live item with no items behind")
self.dequeueItem(item)
- changed = True
+ changed = dequeued = True
if ((not item_ahead) and item.areAllJobsComplete() and item.live):
try:
self.reportItem(item)
@@ -618,7 +628,7 @@
(item_behind.change, item))
self.cancelJobs(item_behind)
self.dequeueItem(item)
- changed = True
+ changed = dequeued = True
elif not failing_reasons and item.live:
nnfi = item
item.current_build_set.failing_reasons = failing_reasons
@@ -743,9 +753,12 @@
layout = (item.current_build_set.layout or
self.pipeline.layout)
- if not layout.hasProject(item.change.project):
+ project_in_pipeline = True
+ if not layout.getProjectPipelineConfig(item.change.project,
+ self.pipeline):
self.log.debug("Project %s not in pipeline %s for change %s" % (
item.change.project, self.pipeline, item.change))
+ project_in_pipeline = False
actions = []
elif item.getConfigError():
self.log.debug("Invalid config for change %s" % item.change)
@@ -771,7 +784,7 @@
actions = self.pipeline.failure_actions
item.setReportedResult('FAILURE')
self.pipeline._consecutive_failures += 1
- if layout.hasProject(item.change.project) and self.pipeline._disabled:
+ if project_in_pipeline and self.pipeline._disabled:
actions = self.pipeline.disabled_actions
# Check here if we should disable so that we only use the disabled
# reporters /after/ the last disable_at failure is still reported as
diff --git a/zuul/model.py b/zuul/model.py
index 850bbe2..1ef8d3a 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -21,6 +21,7 @@
import time
from uuid import uuid4
import urllib.parse
+import textwrap
MERGER_MERGE = 1 # "git merge"
MERGER_MERGE_RESOLVE = 2 # "git merge -s resolve"
@@ -1374,6 +1375,7 @@
self.quiet = False
self.active = False # Whether an item is within an active window
self.live = True # Whether an item is intended to be processed at all
+ # TODO(jeblair): move job_graph to buildset
self.job_graph = None
def __repr__(self):
@@ -1391,6 +1393,7 @@
old.next_build_set = self.current_build_set
self.current_build_set.previous_build_set = old
self.build_sets.append(self.current_build_set)
+ self.job_graph = None
def addBuild(self, build):
self.current_build_set.addBuild(build)
@@ -2093,6 +2096,83 @@
self.private_key_file = None
+class ConfigItemNotListError(Exception):
+ def __init__(self):
+ message = textwrap.dedent("""\
+ Configuration file is not a list. Each zuul.yaml configuration
+ file must be a list of items, for example:
+
+ - job:
+ name: foo
+
+ - project:
+ name: bar
+
+ Ensure that every item starts with "- " so that it is parsed as a
+ YAML list.
+ """)
+ super(ConfigItemNotListError, self).__init__(message)
+
+
+class ConfigItemNotDictError(Exception):
+ def __init__(self):
+ message = textwrap.dedent("""\
+ Configuration item is not a dictionary. Each zuul.yaml
+ configuration file must be a list of dictionaries, for
+ example:
+
+ - job:
+ name: foo
+
+ - project:
+ name: bar
+
+ Ensure that every item in the list is a dictionary with one
+ key (in this example, 'job' and 'project').
+ """)
+ super(ConfigItemNotDictError, self).__init__(message)
+
+
+class ConfigItemMultipleKeysError(Exception):
+ def __init__(self):
+ message = textwrap.dedent("""\
+ Configuration item has more than one key. Each zuul.yaml
+ configuration file must be a list of dictionaries with a
+ single key, for example:
+
+ - job:
+ name: foo
+
+ - project:
+ name: bar
+
+ Ensure that every item in the list is a dictionary with only
+ one key (in this example, 'job' and 'project'). This error
+ may be caused by insufficient indentation of the keys under
+ the configuration item ('name' in this example).
+ """)
+ super(ConfigItemMultipleKeysError, self).__init__(message)
+
+
+class ConfigItemUnknownError(Exception):
+ def __init__(self):
+ message = textwrap.dedent("""\
+ Configuration item not recognized. Each zuul.yaml
+ configuration file must be a list of dictionaries, for
+ example:
+
+ - job:
+ name: foo
+
+ - project:
+ name: bar
+
+ The dictionary keys must match one of the configuration item
+ types recognized by zuul (for example, 'job' or 'project').
+ """)
+ super(ConfigItemUnknownError, self).__init__(message)
+
+
class UnparsedAbideConfig(object):
"""A collection of yaml lists that has not yet been parsed into objects.
@@ -2109,25 +2189,18 @@
return
if not isinstance(conf, list):
- raise Exception("Configuration items must be in the form of "
- "a list of dictionaries (when parsing %s)" %
- (conf,))
+ raise ConfigItemNotListError()
+
for item in conf:
if not isinstance(item, dict):
- raise Exception("Configuration items must be in the form of "
- "a list of dictionaries (when parsing %s)" %
- (conf,))
+ raise ConfigItemNotDictError()
if len(item.keys()) > 1:
- raise Exception("Configuration item dictionaries must have "
- "a single key (when parsing %s)" %
- (conf,))
+ raise ConfigItemMultipleKeysError()
key, value = list(item.items())[0]
if key == 'tenant':
self.tenants.append(value)
else:
- raise Exception("Configuration item not recognized "
- "(when parsing %s)" %
- (conf,))
+ raise ConfigItemUnknownError()
class UnparsedTenantConfig(object):
@@ -2166,19 +2239,13 @@
return
if not isinstance(conf, list):
- raise Exception("Configuration items must be in the form of "
- "a list of dictionaries (when parsing %s)" %
- (conf,))
+ raise ConfigItemNotListError()
for item in conf:
if not isinstance(item, dict):
- raise Exception("Configuration items must be in the form of "
- "a list of dictionaries (when parsing %s)" %
- (conf,))
+ raise ConfigItemNotDictError()
if len(item.keys()) > 1:
- raise Exception("Configuration item dictionaries must have "
- "a single key (when parsing %s)" %
- (conf,))
+ raise ConfigItemMultipleKeysError()
key, value = list(item.items())[0]
if key == 'project':
name = value['name']
@@ -2196,9 +2263,7 @@
elif key == 'semaphore':
self.semaphores.append(value)
else:
- raise Exception("Configuration item `%s` not recognized "
- "(when parsing %s)" %
- (item, conf,))
+ raise ConfigItemUnknownError()
class Layout(object):
@@ -2331,19 +2396,21 @@
job_graph.addJob(frozen_job)
def createJobGraph(self, item):
- project_config = self.project_configs.get(
- item.change.project.canonical_name, None)
- ret = JobGraph()
# NOTE(pabelanger): It is possible for a foreign project not to have a
# configured pipeline, if so return an empty JobGraph.
- if project_config and item.pipeline.name in project_config.pipelines:
- project_job_list = \
- project_config.pipelines[item.pipeline.name].job_list
- self._createJobGraph(item, project_job_list, ret)
+ ret = JobGraph()
+ ppc = self.getProjectPipelineConfig(item.change.project,
+ item.pipeline)
+ if ppc:
+ self._createJobGraph(item, ppc.job_list, ret)
return ret
- def hasProject(self, project):
- return project.canonical_name in self.project_configs
+ def getProjectPipelineConfig(self, project, pipeline):
+ project_config = self.project_configs.get(
+ project.canonical_name, None)
+ if not project_config:
+ return None
+ return project_config.pipelines.get(pipeline.name, None)
class Semaphore(object):
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 52b34ec..269e049 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -585,8 +585,7 @@
self._reenqueueTenant(old_tenant, tenant)
- # TODOv3(jeblair): update for tenants
- # self.maintainConnectionCache()
+ self.maintainConnectionCache()
self.connections.reconfigureDrivers(tenant)
# TODOv3(jeblair): remove postconfig calls?
@@ -719,7 +718,6 @@
self.run_handler_lock.release()
def maintainConnectionCache(self):
- # TODOv3(jeblair): update for tenants
relevant = set()
for tenant in self.abide.tenants.values():
for pipeline in tenant.layout.pipelines.values():
@@ -729,7 +727,7 @@
for item in pipeline.getAllItems():
relevant.add(item.change)
relevant.update(item.change.getRelatedChanges())
- for connection in self.connections.values():
+ for connection in self.connections.connections.values():
connection.maintainCache(relevant)
self.log.debug(
"End maintain connection cache for: %s" % connection)