Merge "Fail execution if a role repo is a bare collection of roles" into feature/zuulv3
diff --git a/tests/base.py b/tests/base.py
index f210591..9709bf7 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -2148,6 +2148,8 @@
# Make sure we set up an RSA key for the project so that we
# don't spend time generating one:
+ if isinstance(project, dict):
+ project = list(project.keys())[0]
key_root = os.path.join(self.state_root, 'keys')
if not os.path.isdir(key_root):
os.mkdir(key_root, 0o700)
@@ -2340,6 +2342,11 @@
if count is not None and i >= count:
break
+ def getSortedBuilds(self):
+ "Return the list of currently running builds sorted by name"
+
+ return sorted(self.builds, key=lambda x: x.name)
+
def release(self, job):
if isinstance(job, FakeBuild):
job.release()
diff --git a/tests/fixtures/config/ansible/git/common-config/playbooks/hello-post.yaml b/tests/fixtures/config/ansible/git/common-config/playbooks/hello-post.yaml
index d528be1..36a22e4 100644
--- a/tests/fixtures/config/ansible/git/common-config/playbooks/hello-post.yaml
+++ b/tests/fixtures/config/ansible/git/common-config/playbooks/hello-post.yaml
@@ -10,3 +10,7 @@
that:
- st.stat.exists
- st.stat.isreg
+
+ - name: Simple shell task.
+ shell: |+
+ echo "Hello world"
diff --git a/tests/fixtures/config/in-repo/git/org_project1/.zuul.yaml b/tests/fixtures/config/in-repo/git/org_project1/.zuul.yaml
new file mode 100644
index 0000000..3fd423b
--- /dev/null
+++ b/tests/fixtures/config/in-repo/git/org_project1/.zuul.yaml
@@ -0,0 +1,5 @@
+- project:
+ name: org/project1
+ tenant-one-gate:
+ jobs:
+ - project-test1
diff --git a/tests/fixtures/config/shadow/git/local-config/playbooks/base.yaml b/tests/fixtures/config/shadow/git/local-config/playbooks/base.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/shadow/git/local-config/playbooks/base.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/shadow/git/local-config/playbooks/test2.yaml b/tests/fixtures/config/shadow/git/local-config/playbooks/test2.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/shadow/git/local-config/playbooks/test2.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/shadow/git/local-config/zuul.yaml b/tests/fixtures/config/shadow/git/local-config/zuul.yaml
new file mode 100644
index 0000000..756e843
--- /dev/null
+++ b/tests/fixtures/config/shadow/git/local-config/zuul.yaml
@@ -0,0 +1,25 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+- job:
+ name: base
+
+- job:
+ name: test2
+
+- project:
+ name: org/project
+ check:
+ jobs:
+ - test1
+ - test2
diff --git a/tests/fixtures/config/shadow/git/org_project/README b/tests/fixtures/config/shadow/git/org_project/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/shadow/git/org_project/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/shadow/git/stdlib/.zuul.yaml b/tests/fixtures/config/shadow/git/stdlib/.zuul.yaml
new file mode 100644
index 0000000..6a6f9c9
--- /dev/null
+++ b/tests/fixtures/config/shadow/git/stdlib/.zuul.yaml
@@ -0,0 +1,10 @@
+- job:
+ name: base
+
+- job:
+ name: test1
+ parent: base
+
+- job:
+ name: test2
+ parent: base
diff --git a/tests/fixtures/config/shadow/git/stdlib/README b/tests/fixtures/config/shadow/git/stdlib/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/shadow/git/stdlib/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/shadow/git/stdlib/playbooks/base.yaml b/tests/fixtures/config/shadow/git/stdlib/playbooks/base.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/shadow/git/stdlib/playbooks/base.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/shadow/git/stdlib/playbooks/test1.yaml b/tests/fixtures/config/shadow/git/stdlib/playbooks/test1.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/shadow/git/stdlib/playbooks/test1.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/shadow/git/stdlib/playbooks/test2.yaml b/tests/fixtures/config/shadow/git/stdlib/playbooks/test2.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/shadow/git/stdlib/playbooks/test2.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/shadow/main.yaml b/tests/fixtures/config/shadow/main.yaml
new file mode 100644
index 0000000..f148a84
--- /dev/null
+++ b/tests/fixtures/config/shadow/main.yaml
@@ -0,0 +1,10 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - local-config
+ untrusted-projects:
+ - stdlib:
+ shadow: local-config
+ - org/project
diff --git a/tests/unit/test_configloader.py b/tests/unit/test_configloader.py
index f0e606a..573ccbf 100644
--- a/tests/unit/test_configloader.py
+++ b/tests/unit/test_configloader.py
@@ -38,12 +38,16 @@
[x.name for x in tenant.config_projects])
self.assertEqual(['org/project1', 'org/project2'],
[x.name for x in tenant.untrusted_projects])
- self.assertEqual(self.CONFIG_SET,
- tenant.config_projects[0].load_classes)
- self.assertEqual(self.UNTRUSTED_SET,
- tenant.untrusted_projects[0].load_classes)
- self.assertEqual(self.UNTRUSTED_SET,
- tenant.untrusted_projects[1].load_classes)
+
+ project = tenant.config_projects[0]
+ tpc = tenant.project_configs[project.canonical_name]
+ self.assertEqual(self.CONFIG_SET, tpc.load_classes)
+ project = tenant.untrusted_projects[0]
+ tpc = tenant.project_configs[project.canonical_name]
+ self.assertEqual(self.UNTRUSTED_SET, tpc.load_classes)
+ project = tenant.untrusted_projects[1]
+ tpc = tenant.project_configs[project.canonical_name]
+ self.assertEqual(self.UNTRUSTED_SET, tpc.load_classes)
self.assertTrue('common-config-job' in tenant.layout.jobs)
self.assertTrue('project1-job' in tenant.layout.jobs)
self.assertTrue('project2-job' in tenant.layout.jobs)
@@ -70,12 +74,16 @@
[x.name for x in tenant.config_projects])
self.assertEqual(['org/project1', 'org/project2'],
[x.name for x in tenant.untrusted_projects])
- self.assertEqual(self.CONFIG_SET,
- tenant.config_projects[0].load_classes)
+ project = tenant.config_projects[0]
+ tpc = tenant.project_configs[project.canonical_name]
+ self.assertEqual(self.CONFIG_SET, tpc.load_classes)
+ project = tenant.untrusted_projects[0]
+ tpc = tenant.project_configs[project.canonical_name]
self.assertEqual(self.UNTRUSTED_SET - set(['project']),
- tenant.untrusted_projects[0].load_classes)
- self.assertEqual(set(['job']),
- tenant.untrusted_projects[1].load_classes)
+ tpc.load_classes)
+ project = tenant.untrusted_projects[1]
+ tpc = tenant.project_configs[project.canonical_name]
+ self.assertEqual(set(['job']), tpc.load_classes)
self.assertTrue('common-config-job' in tenant.layout.jobs)
self.assertTrue('project1-job' in tenant.layout.jobs)
self.assertTrue('project2-job' in tenant.layout.jobs)
@@ -102,12 +110,17 @@
[x.name for x in tenant.config_projects])
self.assertEqual(['org/project1', 'org/project2'],
[x.name for x in tenant.untrusted_projects])
- self.assertEqual(self.CONFIG_SET,
- tenant.config_projects[0].load_classes)
+ project = tenant.config_projects[0]
+ tpc = tenant.project_configs[project.canonical_name]
+ self.assertEqual(self.CONFIG_SET, tpc.load_classes)
+ project = tenant.untrusted_projects[0]
+ tpc = tenant.project_configs[project.canonical_name]
self.assertEqual(self.UNTRUSTED_SET - set(['project']),
- tenant.untrusted_projects[0].load_classes)
+ tpc.load_classes)
+ project = tenant.untrusted_projects[1]
+ tpc = tenant.project_configs[project.canonical_name]
self.assertEqual(self.UNTRUSTED_SET - set(['project']),
- tenant.untrusted_projects[1].load_classes)
+ tpc.load_classes)
self.assertTrue('common-config-job' in tenant.layout.jobs)
self.assertTrue('project1-job' in tenant.layout.jobs)
self.assertTrue('project2-job' in tenant.layout.jobs)
@@ -134,12 +147,17 @@
[x.name for x in tenant.config_projects])
self.assertEqual(['org/project1', 'org/project2'],
[x.name for x in tenant.untrusted_projects])
- self.assertEqual(self.CONFIG_SET,
- tenant.config_projects[0].load_classes)
+ project = tenant.config_projects[0]
+ tpc = tenant.project_configs[project.canonical_name]
+ self.assertEqual(self.CONFIG_SET, tpc.load_classes)
+ project = tenant.untrusted_projects[0]
+ tpc = tenant.project_configs[project.canonical_name]
self.assertEqual(self.UNTRUSTED_SET - set(['project']),
- tenant.untrusted_projects[0].load_classes)
+ tpc.load_classes)
+ project = tenant.untrusted_projects[1]
+ tpc = tenant.project_configs[project.canonical_name]
self.assertEqual(self.UNTRUSTED_SET - set(['project', 'job']),
- tenant.untrusted_projects[1].load_classes)
+ tpc.load_classes)
self.assertTrue('common-config-job' in tenant.layout.jobs)
self.assertTrue('project1-job' in tenant.layout.jobs)
self.assertFalse('project2-job' in tenant.layout.jobs)
@@ -166,12 +184,15 @@
[x.name for x in tenant.config_projects])
self.assertEqual(['org/project1', 'org/project2'],
[x.name for x in tenant.untrusted_projects])
- self.assertEqual(self.CONFIG_SET,
- tenant.config_projects[0].load_classes)
- self.assertEqual(set(['job']),
- tenant.untrusted_projects[0].load_classes)
- self.assertEqual(set(['project', 'job']),
- tenant.untrusted_projects[1].load_classes)
+ project = tenant.config_projects[0]
+ tpc = tenant.project_configs[project.canonical_name]
+ self.assertEqual(self.CONFIG_SET, tpc.load_classes)
+ project = tenant.untrusted_projects[0]
+ tpc = tenant.project_configs[project.canonical_name]
+ self.assertEqual(set(['job']), tpc.load_classes)
+ project = tenant.untrusted_projects[1]
+ tpc = tenant.project_configs[project.canonical_name]
+ self.assertEqual(set(['project', 'job']), tpc.load_classes)
self.assertTrue('common-config-job' in tenant.layout.jobs)
self.assertTrue('project1-job' in tenant.layout.jobs)
self.assertTrue('project2-job' in tenant.layout.jobs)
diff --git a/tests/unit/test_executor.py b/tests/unit/test_executor.py
old mode 100644
new mode 100755
index 39b6070..7b76802
--- a/tests/unit/test_executor.py
+++ b/tests/unit/test_executor.py
@@ -18,6 +18,9 @@
import logging
import time
+import zuul.executor.server
+import zuul.model
+
from tests.base import ZuulTestCase, simple_layout
@@ -305,3 +308,27 @@
]
self.assertBuildStates(states, projects)
+
+
+class TestAnsibleJob(ZuulTestCase):
+ tenant_config_file = 'config/ansible/main.yaml'
+
+ def setUp(self):
+ super(TestAnsibleJob, self).setUp()
+ job = zuul.model.Job('test')
+ job.unique = 'test'
+ self.test_job = zuul.executor.server.AnsibleJob(self.executor_server,
+ job)
+
+ def test_getHostList_host_keys(self):
+ # Test without ssh_port set
+ node = {'name': 'fake-host',
+ 'host_keys': ['fake-host-key'],
+ 'interface_ip': 'localhost'}
+ keys = self.test_job.getHostList({'nodes': [node]})[0]['host_keys']
+ self.assertEqual(keys[0], 'localhost fake-host-key')
+
+ # Test with custom ssh_port set
+ node['ssh_port'] = 22022
+ keys = self.test_job.getHostList({'nodes': [node]})[0]['host_keys']
+ self.assertEqual(keys[0], '[localhost]:22022 fake-host-key')
diff --git a/tests/unit/test_model.py b/tests/unit/test_model.py
index f4ca96f..7fe101e 100644
--- a/tests/unit/test_model.py
+++ b/tests/unit/test_model.py
@@ -40,9 +40,10 @@
self.source = Dummy(canonical_hostname='git.example.com',
connection=self.connection)
self.tenant = model.Tenant('tenant')
- self.layout = model.Layout()
+ self.layout = model.Layout(self.tenant)
self.project = model.Project('project', self.source)
- self.tenant.addUntrustedProject(self.project)
+ self.tpc = model.TenantProjectConfig(self.project)
+ self.tenant.addUntrustedProject(self.tpc)
self.pipeline = model.Pipeline('gate', self.layout)
self.layout.addPipeline(self.pipeline)
self.queue = model.ChangeQueue(self.pipeline)
@@ -58,7 +59,7 @@
@property
def job(self):
tenant = model.Tenant('tenant')
- layout = model.Layout()
+ layout = model.Layout(tenant)
job = configloader.JobParser.fromYaml(tenant, layout, {
'_source_context': self.context,
'_start_mark': self.start_mark,
@@ -169,13 +170,14 @@
def test_job_inheritance_configloader(self):
# TODO(jeblair): move this to a configloader test
tenant = model.Tenant('tenant')
- layout = model.Layout()
+ layout = model.Layout(tenant)
pipeline = model.Pipeline('gate', layout)
layout.addPipeline(pipeline)
queue = model.ChangeQueue(pipeline)
project = model.Project('project', self.source)
- tenant.addUntrustedProject(project)
+ tpc = model.TenantProjectConfig(project)
+ tenant.addUntrustedProject(tpc)
base = configloader.JobParser.fromYaml(tenant, layout, {
'_source_context': self.context,
@@ -331,8 +333,8 @@
'playbooks/base'])
def test_job_auth_inheritance(self):
- tenant = model.Tenant('tenant')
- layout = model.Layout()
+ tenant = self.tenant
+ layout = self.layout
conf = yaml.safe_load('''
- secret:
@@ -357,7 +359,7 @@
secret = configloader.SecretParser.fromYaml(layout, conf)
layout.addSecret(secret)
- base = configloader.JobParser.fromYaml(tenant, layout, {
+ base = configloader.JobParser.fromYaml(self.tenant, self.layout, {
'_source_context': self.context,
'_start_mark': self.start_mark,
'name': 'base',
@@ -441,8 +443,9 @@
def test_job_inheritance_job_tree(self):
tenant = model.Tenant('tenant')
- layout = model.Layout()
- tenant.addUntrustedProject(self.project)
+ layout = model.Layout(tenant)
+ tpc = model.TenantProjectConfig(self.project)
+ tenant.addUntrustedProject(tpc)
pipeline = model.Pipeline('gate', layout)
layout.addPipeline(pipeline)
@@ -517,13 +520,14 @@
def test_inheritance_keeps_matchers(self):
tenant = model.Tenant('tenant')
- layout = model.Layout()
+ layout = model.Layout(tenant)
pipeline = model.Pipeline('gate', layout)
layout.addPipeline(pipeline)
queue = model.ChangeQueue(pipeline)
project = model.Project('project', self.source)
- tenant.addUntrustedProject(project)
+ tpc = model.TenantProjectConfig(project)
+ tenant.addUntrustedProject(tpc)
base = configloader.JobParser.fromYaml(tenant, layout, {
'_source_context': self.context,
@@ -567,11 +571,13 @@
self.assertEqual([], item.getJobs())
def test_job_source_project(self):
- tenant = model.Tenant('tenant')
- layout = model.Layout()
+ tenant = self.tenant
+ layout = self.layout
base_project = model.Project('base_project', self.source)
base_context = model.SourceContext(base_project, 'master',
'test', True)
+ tpc = model.TenantProjectConfig(base_project)
+ tenant.addUntrustedProject(tpc)
base = configloader.JobParser.fromYaml(tenant, layout, {
'_source_context': base_context,
@@ -583,6 +589,8 @@
other_project = model.Project('other_project', self.source)
other_context = model.SourceContext(other_project, 'master',
'test', True)
+ tpc = model.TenantProjectConfig(other_project)
+ tenant.addUntrustedProject(tpc)
base2 = configloader.JobParser.fromYaml(tenant, layout, {
'_source_context': other_context,
'_start_mark': self.start_mark,
@@ -604,7 +612,8 @@
self.layout.addJob(job)
project2 = model.Project('project2', self.source)
- self.tenant.addUntrustedProject(project2)
+ tpc2 = model.TenantProjectConfig(project2)
+ self.tenant.addUntrustedProject(tpc2)
context2 = model.SourceContext(project2, 'master',
'test', True)
@@ -805,7 +814,8 @@
connection=connection1)
source1_project1 = model.Project('project1', source1)
- tenant.addConfigProject(source1_project1)
+ source1_project1_tpc = model.TenantProjectConfig(source1_project1)
+ tenant.addConfigProject(source1_project1_tpc)
d = {'project1':
{'git1.example.com': source1_project1}}
self.assertEqual(d, tenant.projects)
@@ -815,7 +825,8 @@
tenant.getProject('git1.example.com/project1'))
source1_project2 = model.Project('project2', source1)
- tenant.addUntrustedProject(source1_project2)
+ tpc = model.TenantProjectConfig(source1_project2)
+ tenant.addUntrustedProject(tpc)
d = {'project1':
{'git1.example.com': source1_project1},
'project2':
@@ -832,7 +843,8 @@
connection=connection2)
source2_project1 = model.Project('project1', source2)
- tenant.addUntrustedProject(source2_project1)
+ tpc = model.TenantProjectConfig(source2_project1)
+ tenant.addUntrustedProject(tpc)
d = {'project1':
{'git1.example.com': source1_project1,
'git2.example.com': source2_project1},
@@ -851,7 +863,8 @@
tenant.getProject('git2.example.com/project1'))
source2_project2 = model.Project('project2', source2)
- tenant.addConfigProject(source2_project2)
+ tpc = model.TenantProjectConfig(source2_project2)
+ tenant.addConfigProject(tpc)
d = {'project1':
{'git1.example.com': source1_project1,
'git2.example.com': source2_project1},
@@ -877,7 +890,8 @@
tenant.getProject('git2.example.com/project2'))
source1_project2b = model.Project('subpath/project2', source1)
- tenant.addConfigProject(source1_project2b)
+ tpc = model.TenantProjectConfig(source1_project2b)
+ tenant.addConfigProject(tpc)
d = {'project1':
{'git1.example.com': source1_project1,
'git2.example.com': source2_project1},
@@ -898,7 +912,8 @@
tenant.getProject('git1.example.com/subpath/project2'))
source2_project2b = model.Project('subpath/project2', source2)
- tenant.addConfigProject(source2_project2b)
+ tpc = model.TenantProjectConfig(source2_project2b)
+ tenant.addConfigProject(tpc)
d = {'project1':
{'git1.example.com': source1_project1,
'git2.example.com': source2_project1},
@@ -927,4 +942,4 @@
with testtools.ExpectedException(
Exception,
"Project project1 is already in project index"):
- tenant._addProject(source1_project1)
+ tenant._addProject(source1_project1_tpc)
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index 1d24585..aa061ff 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -3367,8 +3367,9 @@
# Only C's test jobs are queued because window is still 1.
self.assertEqual(len(self.builds), 2)
- self.assertEqual(self.builds[0].name, 'project-test1')
- self.assertEqual(self.builds[1].name, 'project-test2')
+ builds = self.getSortedBuilds()
+ self.assertEqual(builds[0].name, 'project-test1')
+ self.assertEqual(builds[1].name, 'project-test2')
self.executor_server.release('project-.*')
self.waitUntilSettled()
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 327f745..2b865cf 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -486,6 +486,62 @@
self.assertIn('appears multiple times', A.messages[0],
"A should have a syntax error reported")
+ def test_multi_repo(self):
+ downstream_repo_conf = textwrap.dedent(
+ """
+ - project:
+ name: org/project1
+ tenant-one-gate:
+ jobs:
+ - project-test1
+
+ - job:
+ name: project1-test1
+ parent: project-test1
+ """)
+
+ file_dict = {'.zuul.yaml': downstream_repo_conf}
+ A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A',
+ files=file_dict)
+ A.addApproval('code-review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'MERGED')
+ self.fake_gerrit.addEvent(A.getChangeMergedEvent())
+ self.waitUntilSettled()
+
+ upstream_repo_conf = textwrap.dedent(
+ """
+ - job:
+ name: project-test1
+
+ - job:
+ name: project-test2
+
+ - project:
+ name: org/project
+ tenant-one-gate:
+ jobs:
+ - project-test1
+ """)
+
+ file_dict = {'.zuul.yaml': upstream_repo_conf}
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B',
+ files=file_dict)
+ B.addApproval('code-review', 2)
+ self.fake_gerrit.addEvent(B.addApproval('approved', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(B.data['status'], 'MERGED')
+ self.fake_gerrit.addEvent(B.getChangeMergedEvent())
+ self.waitUntilSettled()
+
+ tenant = self.sched.abide.tenants.get('tenant-one')
+ # Ensure the latest change is reflected in the config; if it
+ # isn't this will raise an exception.
+ tenant.layout.getJob('project-test2')
+
class TestAnsible(AnsibleZuulTestCase):
# A temporary class to hold new tests while others are disabled
@@ -630,3 +686,17 @@
self.assertHistory([
dict(name='project-test', result='SUCCESS', changes='1,1 2,1'),
])
+
+
+class TestShadow(ZuulTestCase):
+ tenant_config_file = 'config/shadow/main.yaml'
+
+ def test_shadow(self):
+ # Test that a repo is allowed to shadow another's job definitions.
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='test1', result='SUCCESS', changes='1,1'),
+ dict(name='test2', result='SUCCESS', changes='1,1'),
+ ])
diff --git a/zuul/ansible/library/command.py b/zuul/ansible/library/command.py
index 00020c7..f701b48 100644
--- a/zuul/ansible/library/command.py
+++ b/zuul/ansible/library/command.py
@@ -356,6 +356,10 @@
if umask:
old_umask = os.umask(umask)
+ t = None
+ fail_json_kwargs = None
+ ret = None
+
try:
if self._debug:
self.log('Executing: ' + clean_args)
@@ -394,11 +398,27 @@
except (OSError, IOError):
e = get_exception()
self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(e)))
- self.fail_json(rc=e.errno, msg=to_native(e), cmd=clean_args)
+ fail_json_kwargs=dict(rc=e.errno, msg=str(e), cmd=clean_args)
except Exception:
e = get_exception()
self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(traceback.format_exc())))
- self.fail_json(rc=257, msg=to_native(e), exception=traceback.format_exc(), cmd=clean_args)
+ fail_json_kwargs = dict(rc=257, msg=str(e), exception=traceback.format_exc(), cmd=clean_args)
+ finally:
+ if t:
+ with Console(zuul_log_id) as console:
+ if t.isAlive():
+ console.addLine("[Zuul] standard output/error still open "
+ "after child exited")
+ if ret is None and fail_json_kwargs:
+ ret = fail_json_kwargs['rc']
+ elif ret is None and not fail_json_kwargs:
+ ret = -1
+ console.addLine("[Zuul] Task exit code: %s\n" % ret)
+ if ret == -1 and not fail_json_kwargs:
+ self.fail_json(rc=ret, msg="Something went horribly wrong during task execution")
+
+ if fail_json_kwargs:
+ self.fail_json(**fail_json_kwargs)
# Restore env settings
for key, val in old_env_vals.items():
diff --git a/zuul/configloader.py b/zuul/configloader.py
index 627ebdd..3c9ecf7 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -887,6 +887,7 @@
project_dict = {str: {
'include': to_list(classes),
'exclude': to_list(classes),
+ 'shadow': to_list(str),
}}
project = vs.Any(str, project_dict)
@@ -932,13 +933,18 @@
tenant = model.Tenant(conf['name'])
tenant.unparsed_config = conf
unparsed_config = model.UnparsedTenantConfig()
- config_projects, untrusted_projects = \
+ # tpcs is TenantProjectConfigs
+ config_tpcs, untrusted_tpcs = \
TenantParser._loadTenantProjects(
project_key_dir, connections, conf)
- for project in config_projects:
- tenant.addConfigProject(project)
- for project in untrusted_projects:
- tenant.addUntrustedProject(project)
+ for tpc in config_tpcs:
+ tenant.addConfigProject(tpc)
+ for tpc in untrusted_tpcs:
+ tenant.addUntrustedProject(tpc)
+
+ for tpc in config_tpcs + untrusted_tpcs:
+ TenantParser._resolveShadowProjects(tenant, tpc)
+
tenant.config_projects_config, tenant.untrusted_projects_config = \
TenantParser._loadTenantInRepoLayouts(merger, connections,
tenant.config_projects,
@@ -953,6 +959,13 @@
return tenant
@staticmethod
+ def _resolveShadowProjects(tenant, tpc):
+ shadow_projects = []
+ for sp in tpc.shadow_projects:
+ shadow_projects.append(tenant.getProject(sp)[1])
+ tpc.shadow_projects = frozenset(shadow_projects)
+
+ @staticmethod
def _loadProjectKeys(project_key_dir, connection_name, project):
project.private_key_file = (
os.path.join(project_key_dir, connection_name,
@@ -1007,9 +1020,11 @@
# Return a project object whether conf is a dict or a str
project = source.getProject(conf)
project_include = current_include
+ shadow_projects = []
else:
project_name = list(conf.keys())[0]
project = source.getProject(project_name)
+ shadow_projects = as_list(conf[project_name].get('shadow', []))
project_include = frozenset(
as_list(conf[project_name].get('include', [])))
@@ -1020,8 +1035,11 @@
if project_exclude:
project_include = frozenset(project_include - project_exclude)
- project.load_classes = frozenset(project_include)
- return project
+ tenant_project_config = model.TenantProjectConfig(project)
+ tenant_project_config.load_classes = frozenset(project_include)
+ tenant_project_config.shadow_projects = shadow_projects
+
+ return tenant_project_config
@staticmethod
def _getProjects(source, conf, current_include):
@@ -1065,21 +1083,22 @@
current_include = default_include
for conf_repo in conf_source.get('config-projects', []):
- projects = TenantParser._getProjects(source, conf_repo,
- current_include)
- for project in projects:
+ # tpcs = TenantProjectConfigs
+ tpcs = TenantParser._getProjects(source, conf_repo,
+ current_include)
+ for tpc in tpcs:
TenantParser._loadProjectKeys(
- project_key_dir, source_name, project)
- config_projects.append(project)
+ project_key_dir, source_name, tpc.project)
+ config_projects.append(tpc)
current_include = frozenset(default_include - set(['pipeline']))
for conf_repo in conf_source.get('untrusted-projects', []):
- projects = TenantParser._getProjects(source, conf_repo,
- current_include)
- for project in projects:
+ tpcs = TenantParser._getProjects(source, conf_repo,
+ current_include)
+ for tpc in tpcs:
TenantParser._loadProjectKeys(
- project_key_dir, source_name, project)
- untrusted_projects.append(project)
+ project_key_dir, source_name, tpc.project)
+ untrusted_projects.append(tpc)
return config_projects, untrusted_projects
@@ -1090,13 +1109,20 @@
untrusted_projects_config = model.UnparsedTenantConfig()
jobs = []
+ # In some cases, we can use cached data, but it's still
+ # important that we process that in the same order along with
+ # any jobs that we run. This class is used to hold the cached
+ # data and is inserted in the ordered jobs list for later
+ # processing.
+ class CachedDataJob(object):
+ def __init__(self, config_project, project):
+ self.config_project = config_project
+ self.project = project
+
for project in config_projects:
# If we have cached data (this is a reconfiguration) use it.
if cached and project.unparsed_config:
- TenantParser.log.info(
- "Loading previously parsed configuration from %s" %
- (project,))
- config_projects_config.extend(project.unparsed_config)
+ jobs.append(CachedDataJob(True, project))
continue
# Otherwise, prepare an empty unparsed config object to
# hold cached data later.
@@ -1115,10 +1141,7 @@
for project in untrusted_projects:
# If we have cached data (this is a reconfiguration) use it.
if cached and project.unparsed_config:
- TenantParser.log.info(
- "Loading previously parsed configuration from %s" %
- (project,))
- untrusted_projects_config.extend(project.unparsed_config)
+ jobs.append(CachedDataJob(False, project))
continue
# Otherwise, prepare an empty unparsed config object to
# hold cached data later.
@@ -1146,8 +1169,21 @@
# complete in the order they were executed which is the
# same order they were defined in the main config file.
# This is important for correct inheritance.
+ if isinstance(job, CachedDataJob):
+ TenantParser.log.info(
+ "Loading previously parsed configuration from %s" %
+ (job.project,))
+ if job.config_project:
+ config_projects_config.extend(
+ job.project.unparsed_config)
+ else:
+ untrusted_projects_config.extend(
+ job.project.unparsed_config)
+ continue
TenantParser.log.debug("Waiting for cat job %s" % (job,))
job.wait()
+ TenantParser.log.debug("Cat job %s got files %s" %
+ (job, job.files))
loaded = False
files = sorted(job.files.keys())
for conf_root in ['zuul.yaml', '.zuul.yaml', 'zuul.d', '.zuul.d']:
@@ -1198,12 +1234,18 @@
return config
@staticmethod
+ def _getLoadClasses(tenant, conf_object):
+ project = conf_object['_source_context'].project
+ tpc = tenant.project_configs[project.canonical_name]
+ return tpc.load_classes
+
+ @staticmethod
def _parseLayoutItems(layout, tenant, data, scheduler, connections,
skip_pipelines=False, skip_semaphores=False):
if not skip_pipelines:
for config_pipeline in data.pipelines:
- classes = config_pipeline['_source_context'].\
- project.load_classes
+ classes = TenantParser._getLoadClasses(
+ tenant, config_pipeline)
if 'pipeline' not in classes:
continue
layout.addPipeline(PipelineParser.fromYaml(
@@ -1211,7 +1253,7 @@
scheduler, config_pipeline))
for config_nodeset in data.nodesets:
- classes = config_nodeset['_source_context'].project.load_classes
+ classes = TenantParser._getLoadClasses(tenant, config_nodeset)
if 'nodeset' not in classes:
continue
with configuration_exceptions('nodeset', config_nodeset):
@@ -1219,29 +1261,33 @@
layout, config_nodeset))
for config_secret in data.secrets:
- classes = config_secret['_source_context'].project.load_classes
+ classes = TenantParser._getLoadClasses(tenant, config_secret)
if 'secret' not in classes:
continue
layout.addSecret(SecretParser.fromYaml(layout, config_secret))
for config_job in data.jobs:
- classes = config_job['_source_context'].project.load_classes
+ classes = TenantParser._getLoadClasses(tenant, config_job)
if 'job' not in classes:
continue
with configuration_exceptions('job', config_job):
job = JobParser.fromYaml(tenant, layout, config_job)
- layout.addJob(job)
+ added = layout.addJob(job)
+ if not added:
+ TenantParser.log.debug(
+ "Skipped adding job %s which shadows an existing job" %
+ (job,))
if not skip_semaphores:
for config_semaphore in data.semaphores:
- classes = config_semaphore['_source_context'].\
- project.load_classes
+ classes = TenantParser._getLoadClasses(
+ tenant, config_semaphore)
if 'semaphore' not in classes:
continue
layout.addSemaphore(SemaphoreParser.fromYaml(config_semaphore))
for config_template in data.project_templates:
- classes = config_template['_source_context'].project.load_classes
+ classes = TenantParser._getLoadClasses(tenant, config_template)
if 'project-template' not in classes:
continue
layout.addProjectTemplate(ProjectTemplateParser.fromYaml(
@@ -1255,10 +1301,11 @@
# each of the project stanzas. Each one may be (should
# be!) from a different repo, so filter them according to
# the include/exclude rules before parsing them.
- filtered_projects = [
- p for p in config_projects if
- 'project' in p['_source_context'].project.load_classes
- ]
+ filtered_projects = []
+ for config_project in config_projects:
+ classes = TenantParser._getLoadClasses(tenant, config_project)
+ if 'project' in classes:
+ filtered_projects.append(config_project)
if not filtered_projects:
continue
@@ -1268,13 +1315,11 @@
@staticmethod
def _parseLayout(base, tenant, data, scheduler, connections):
- layout = model.Layout()
+ layout = model.Layout(tenant)
TenantParser._parseLayoutItems(layout, tenant, data,
scheduler, connections)
- layout.tenant = tenant
-
for pipeline in layout.pipelines.values():
pipeline.manager._postConfig(layout)
@@ -1390,7 +1435,7 @@
for project in tenant.untrusted_projects:
self._loadDynamicProjectData(config, project, files, False)
- layout = model.Layout()
+ layout = model.Layout(tenant)
# NOTE: the actual pipeline objects (complete with queues and
# enqueued items) are copied by reference here. This allows
# our shadow dynamic configuration to continue to interact
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index 3612eae..442d1c5 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -284,6 +284,7 @@
host_keys=node.host_keys,
provider=node.provider,
region=node.region,
+ ssh_port=node.ssh_port,
interface_ip=node.interface_ip,
public_ipv6=node.public_ipv6,
public_ipv4=node.public_ipv4))
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index d90ba92..fc9acb3 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -118,12 +118,12 @@
env.update(self.env)
key_path = os.path.expanduser(key_path)
self.log.debug('Adding SSH Key {}'.format(key_path))
- output = ''
try:
- output = subprocess.check_output(['ssh-add', key_path], env=env,
- stderr=subprocess.PIPE)
- except subprocess.CalledProcessError:
- self.log.error('ssh-add failed: {}'.format(output))
+ subprocess.check_output(['ssh-add', key_path], env=env,
+ stderr=subprocess.PIPE)
+ except subprocess.CalledProcessError as e:
+ self.log.error('ssh-add failed. stdout: %s, stderr: %s',
+ e.output, e.stderr)
raise
self.log.info('Added SSH Key {}'.format(key_path))
@@ -613,7 +613,12 @@
def executeJob(self, job):
self.job_workers[job.unique] = AnsibleJob(self, job)
- self.job_workers[job.unique].run()
+
+ try:
+ self.job_workers[job.unique].run()
+ except Exception:
+ del self.job_workers[job.unique]
+ raise
def finishJob(self, unique):
del(self.job_workers[unique])
@@ -704,7 +709,13 @@
def run(self):
self.ssh_agent.start()
- self.ssh_agent.add(self.private_key_file)
+
+ try:
+ self.ssh_agent.add(self.private_key_file)
+ except Exception:
+ self.ssh_agent.stop()
+ raise
+
self.running = True
self.thread = threading.Thread(target=self.execute)
self.thread.start()
@@ -884,10 +895,10 @@
result = None
pre_failed = False
- for playbook in self.jobdir.pre_playbooks:
+ for count, playbook in enumerate(self.jobdir.pre_playbooks):
# TODOv3(pabelanger): Implement pre-run timeout setting.
pre_status, pre_code = self.runAnsiblePlaybook(
- playbook, args['timeout'])
+ playbook, args['timeout'], phase='pre', count=count)
if pre_status != self.RESULT_NORMAL or pre_code != 0:
# These should really never fail, so return None and have
# zuul try again
@@ -897,7 +908,7 @@
if not pre_failed:
job_status, job_code = self.runAnsiblePlaybook(
- self.jobdir.playbook, args['timeout'])
+ self.jobdir.playbook, args['timeout'], phase='run')
if job_status == self.RESULT_TIMED_OUT:
return 'TIMED_OUT'
if job_status == self.RESULT_ABORTED:
@@ -913,10 +924,10 @@
else:
result = 'FAILURE'
- for playbook in self.jobdir.post_playbooks:
+ for count, playbook in enumerate(self.jobdir.post_playbooks):
# TODOv3(pabelanger): Implement post-run timeout setting.
post_status, post_code = self.runAnsiblePlaybook(
- playbook, args['timeout'], success)
+ playbook, args['timeout'], success, phase='post', count=count)
if post_status != self.RESULT_NORMAL or post_code != 0:
# If we encountered a pre-failure, that takes
# precedence over the post result.
@@ -936,9 +947,11 @@
# results in the wrong thing being in interface_ip
# TODO(jeblair): Move this notice to the docs.
ip = node.get('interface_ip')
+ port = node.get('ssh_port', 22)
host_vars = dict(
ansible_host=ip,
ansible_user=self.executor_server.default_username,
+ ansible_port=port,
nodepool=dict(
az=node.get('az'),
provider=node.get('provider'),
@@ -946,7 +959,10 @@
host_keys = []
for key in node.get('host_keys'):
- host_keys.append("%s %s" % (ip, key))
+ if port != 22:
+ host_keys.append("[%s]:%s %s" % (ip, port, key))
+ else:
+ host_keys.append("%s %s" % (ip, key))
hosts.append(dict(
name=node['name'],
@@ -1368,7 +1384,8 @@
return (self.RESULT_NORMAL, ret)
- def runAnsiblePlaybook(self, playbook, timeout, success=None):
+ def runAnsiblePlaybook(self, playbook, timeout, success=None,
+ phase=None, count=None):
env_copy = os.environ.copy()
env_copy['LOGNAME'] = 'zuul'
@@ -1382,6 +1399,12 @@
if success is not None:
cmd.extend(['-e', 'success=%s' % str(bool(success))])
+ if phase:
+ cmd.extend(['-e', 'zuul_execution_phase=%s' % phase])
+
+ if count is not None:
+ cmd.extend(['-e', 'zuul_execution_phase_count=%s' % count])
+
result, code = self.runAnsible(
cmd=cmd, timeout=timeout, trusted=playbook.trusted)
self.log.debug("Ansible complete, result %s code %s" % (
diff --git a/zuul/model.py b/zuul/model.py
index f901c55..4744bbe 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -331,9 +331,6 @@
self.foreign = foreign
self.unparsed_config = None
self.unparsed_branch_config = {} # branch -> UnparsedTenantConfig
- # Configuration object classes to include or exclude when
- # loading zuul config files.
- self.load_classes = frozenset()
def __str__(self):
return self.name
@@ -361,6 +358,7 @@
self.public_ipv4 = None
self.private_ipv4 = None
self.public_ipv6 = None
+ self.ssh_port = 22
self._keys = []
self.az = None
self.provider = None
@@ -2000,6 +1998,20 @@
self.merge_mode = None
+class TenantProjectConfig(object):
+ """A project in the context of a tenant.
+
+ A Project is globally unique in the system, however, when used in
+ a tenant, some metadata about the project local to the tenant is
+ stored in a TenantProjectConfig.
+ """
+
+ def __init__(self, project):
+ self.project = project
+ self.load_classes = set()
+ self.shadow_projects = set()
+
+
class ProjectConfig(object):
# Represents a project cofiguration
def __init__(self, name):
@@ -2011,6 +2023,7 @@
class UnparsedAbideConfig(object):
+
"""A collection of yaml lists that has not yet been parsed into objects.
An Abide is a collection of tenants.
@@ -2120,8 +2133,8 @@
class Layout(object):
"""Holds all of the Pipelines."""
- def __init__(self):
- self.tenant = None
+ def __init__(self, tenant):
+ self.tenant = tenant
self.project_configs = {}
self.project_templates = {}
self.pipelines = OrderedDict()
@@ -2150,6 +2163,18 @@
prior_jobs = [j for j in self.getJobs(job.name) if
j.source_context.project !=
job.source_context.project]
+ # Unless the repo is permitted to shadow another. If so, and
+ # the job we are adding is from a repo that is permitted to
+ # shadow the one with the older jobs, skip adding this job.
+ job_project = job.source_context.project
+ job_tpc = self.tenant.project_configs[job_project.canonical_name]
+ skip_add = False
+ for prior_job in prior_jobs[:]:
+ prior_project = prior_job.source_context.project
+ if prior_project in job_tpc.shadow_projects:
+ prior_jobs.remove(prior_job)
+ skip_add = True
+
if prior_jobs:
raise Exception("Job %s in %s is not permitted to shadow "
"job %s in %s" % (
@@ -2157,11 +2182,13 @@
job.source_context.project,
prior_jobs[0],
prior_jobs[0].source_context.project))
-
+ if skip_add:
+ return False
if job.name in self.jobs:
self.jobs[job.name].append(job)
else:
self.jobs[job.name] = [job]
+ return True
def addNodeSet(self, nodeset):
if nodeset.name in self.nodesets:
@@ -2358,6 +2385,9 @@
# The unparsed config from those projects.
self.untrusted_projects_config = None
self.semaphore_handler = SemaphoreHandler()
+ # Metadata about projects for this tenant
+ # canonical project name -> TenantProjectConfig
+ self.project_configs = {}
# A mapping of project names to projects. project_name ->
# VALUE where VALUE is a further dictionary of
@@ -2365,17 +2395,21 @@
self.projects = {}
self.canonical_hostnames = set()
- def _addProject(self, project):
+ def _addProject(self, tpc):
"""Add a project to the project index
- :arg Project project: The project to add.
+ :arg TenantProjectConfig tpc: The TenantProjectConfig (with
+ associated project) to add.
+
"""
+ project = tpc.project
self.canonical_hostnames.add(project.canonical_hostname)
hostname_dict = self.projects.setdefault(project.name, {})
if project.canonical_hostname in hostname_dict:
raise Exception("Project %s is already in project index" %
(project,))
hostname_dict[project.canonical_hostname] = project
+ self.project_configs[project.canonical_name] = tpc
def getProject(self, name):
"""Return a project given its name.
@@ -2422,13 +2456,13 @@
raise Exception("Project %s is neither trusted nor untrusted" %
(project,))
- def addConfigProject(self, project):
- self.config_projects.append(project)
- self._addProject(project)
+ def addConfigProject(self, tpc):
+ self.config_projects.append(tpc.project)
+ self._addProject(tpc)
- def addUntrustedProject(self, project):
- self.untrusted_projects.append(project)
- self._addProject(project)
+ def addUntrustedProject(self, tpc):
+ self.untrusted_projects.append(tpc.project)
+ self._addProject(tpc)
def getSafeAttributes(self):
return Attributes(name=self.name)
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index fe6a673..dd0846d 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -779,6 +779,7 @@
self.log.error("Unable to handle event %s" % event)
event.done()
except Exception:
+ self.log.exception("Exception in management event:")
event.exception(sys.exc_info())
self.management_event_queue.task_done()
diff --git a/zuul/web.py b/zuul/web.py
index 2ef65fe..6ca491a 100644
--- a/zuul/web.py
+++ b/zuul/web.py
@@ -138,6 +138,9 @@
break
elif msg.type == aiohttp.WSMsgType.CLOSED:
break
+ except asyncio.CancelledError:
+ self.log.debug("Websocket request handling cancelled")
+ pass
except Exception as e:
self.log.exception("Websocket exception:")
await ws.close(code=4009, message=str(e).encode('utf-8'))