Merge "Remove ansible files at startup before copy" into feature/zuulv3
diff --git a/doc/source/developer/datamodel.rst b/doc/source/developer/datamodel.rst
index acb8612..c4ff4a0 100644
--- a/doc/source/developer/datamodel.rst
+++ b/doc/source/developer/datamodel.rst
@@ -8,7 +8,7 @@
Pipelines have a configured
:py:class:`~zuul.manager.PipelineManager` which controlls how
-the :py:class:`Change <zuul.model.Changeish>` objects are enqueued and
+the :py:class:`Ref <zuul.model.Ref>` objects are enqueued and
processed.
There are currently two,
@@ -35,7 +35,7 @@
.. autoclass:: zuul.model.Build
The :py:class:`~zuul.manager.base.PipelineManager` enqueues each
-:py:class:`Change <zuul.model.Changeish>` into the
+:py:class:`Ref <zuul.model.Ref>` into the
:py:class:`~zuul.model.ChangeQueue` in a :py:class:`~zuul.model.QueueItem`.
.. autoclass:: zuul.model.QueueItem
diff --git a/etc/zuul.conf-sample b/etc/zuul.conf-sample
index bf19895..1065cec 100644
--- a/etc/zuul.conf-sample
+++ b/etc/zuul.conf-sample
@@ -18,6 +18,9 @@
;git_user_name=zuul
zuul_url=http://zuul.example.com/p
+[executor]
+default_username=zuul
+
[webapp]
listen_address=0.0.0.0
port=8001
diff --git a/tests/base.py b/tests/base.py
index 65ded50..d8f88b7 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -69,6 +69,7 @@
import zuul.merger.client
import zuul.merger.merger
import zuul.merger.server
+import zuul.model
import zuul.nodepool
import zuul.zk
from zuul.exceptions import MergeFailure
@@ -531,6 +532,18 @@
def _start_watcher_thread(self, *args, **kw):
pass
+ def _uploadPack(self, project):
+ ret = ('00a31270149696713ba7e06f1beb760f20d359c4abed HEAD\x00'
+ 'multi_ack thin-pack side-band side-band-64k ofs-delta '
+ 'shallow no-progress include-tag multi_ack_detailed no-done\n')
+ path = os.path.join(self.upstream_root, project.name)
+ repo = git.Repo(path)
+ for ref in repo.refs:
+ r = ref.object.hexsha + ' ' + ref.path + '\n'
+ ret += '%04x%s' % (len(r) + 4, r)
+ ret += '0000'
+ return ret
+
def getGitUrl(self, project):
return os.path.join(self.upstream_root, project.name)
@@ -1046,28 +1059,6 @@
(self.result, self.name, self.uuid, self.changes))
-class FakeURLOpener(object):
- def __init__(self, upstream_root, url):
- self.upstream_root = upstream_root
- self.url = url
-
- def read(self):
- res = urllib.parse.urlparse(self.url)
- path = res.path
- project = '/'.join(path.split('/')[2:-2])
- ret = '001e# service=git-upload-pack\n'
- ret += ('000000a31270149696713ba7e06f1beb760f20d359c4abed HEAD\x00'
- 'multi_ack thin-pack side-band side-band-64k ofs-delta '
- 'shallow no-progress include-tag multi_ack_detailed no-done\n')
- path = os.path.join(self.upstream_root, project)
- repo = git.Repo(path)
- for ref in repo.refs:
- r = ref.object.hexsha + ' ' + ref.path + '\n'
- ret += '%04x%s' % (len(r) + 4, r)
- ret += '0000'
- return ret
-
-
class FakeStatsd(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
@@ -1927,14 +1918,6 @@
self.configure_connections()
self.sched.registerConnections(self.connections, self.webapp)
- def URLOpenerFactory(*args, **kw):
- if isinstance(args[0], urllib.request.Request):
- return old_urlopen(*args, **kw)
- return FakeURLOpener(self.upstream_root, *args, **kw)
-
- old_urlopen = urllib.request.urlopen
- urllib.request.urlopen = URLOpenerFactory
-
self.executor_server = RecordingExecutorServer(
self.config, self.connections,
jobdir_root=self.test_root,
@@ -2457,6 +2440,12 @@
jobs = filter(lambda x: x.result == result, jobs)
return len(list(jobs))
+ def getBuildByName(self, name):
+ for build in self.builds:
+ if build.name == name:
+ return build
+ raise Exception("Unable to find build %s" % name)
+
def getJobFromHistory(self, name, project=None):
for job in self.history:
if (job.name == name and
diff --git a/tests/fixtures/config/inventory/git/common-config/playbooks/group-inventory.yaml b/tests/fixtures/config/inventory/git/common-config/playbooks/group-inventory.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/common-config/playbooks/group-inventory.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/inventory/git/common-config/playbooks/single-inventory.yaml b/tests/fixtures/config/inventory/git/common-config/playbooks/single-inventory.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/common-config/playbooks/single-inventory.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/inventory/git/common-config/zuul.yaml b/tests/fixtures/config/inventory/git/common-config/zuul.yaml
new file mode 100644
index 0000000..184bd80
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/common-config/zuul.yaml
@@ -0,0 +1,42 @@
+- pipeline:
+ name: check
+ manager: independent
+ allow-secrets: true
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+- nodeset:
+ name: nodeset1
+ nodes:
+ - name: controller
+ image: controller-image
+ - name: compute1
+ image: compute-image
+ - name: compute2
+ image: compute-image
+ groups:
+ - name: ceph-osd
+ nodes:
+ - controller
+ - name: ceph-monitor
+ nodes:
+ - controller
+ - compute1
+ - compute2
+
+- job:
+ name: single-inventory
+ nodes:
+ - name: ubuntu-xenial
+ image: ubuntu-xenial
+
+- job:
+ name: group-inventory
+ nodes: nodeset1
diff --git a/tests/fixtures/config/inventory/git/org_project/.zuul.yaml b/tests/fixtures/config/inventory/git/org_project/.zuul.yaml
new file mode 100644
index 0000000..26310a0
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/org_project/.zuul.yaml
@@ -0,0 +1,6 @@
+- project:
+ name: org/project
+ check:
+ jobs:
+ - single-inventory
+ - group-inventory
diff --git a/tests/fixtures/config/inventory/git/org_project/README b/tests/fixtures/config/inventory/git/org_project/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/inventory/git/org_project/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/inventory/main.yaml b/tests/fixtures/config/inventory/main.yaml
new file mode 100644
index 0000000..208e274
--- /dev/null
+++ b/tests/fixtures/config/inventory/main.yaml
@@ -0,0 +1,8 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - common-config
+ untrusted-projects:
+ - org/project
diff --git a/tests/unit/test_executor.py b/tests/unit/test_executor.py
index 100e4ec..39b6070 100644
--- a/tests/unit/test_executor.py
+++ b/tests/unit/test_executor.py
@@ -61,6 +61,19 @@
'not have change %s' % (
project, build, number, change.subject))
+ def assertBuildStates(self, states, projects):
+ for number, build in enumerate(self.builds):
+ work = build.getWorkspaceRepos(projects)
+ state = states[number]
+
+ for project in projects:
+ self.assertRepoState(work[project], state[project],
+ project, build, number)
+
+ self.executor_server.hold_jobs_in_build = False
+ self.executor_server.release()
+ self.waitUntilSettled()
+
@simple_layout('layouts/repo-checkout-two-project.yaml')
def test_one_branch(self):
self.executor_server.hold_jobs_in_build = True
@@ -90,18 +103,7 @@
},
]
- for number, build in enumerate(self.builds):
- self.log.debug("Build parameters: %s", build.parameters)
- work = build.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertRepoState(work[project], state[project],
- project, build, number)
-
- self.executor_server.hold_jobs_in_build = False
- self.executor_server.release()
- self.waitUntilSettled()
+ self.assertBuildStates(states, projects)
@simple_layout('layouts/repo-checkout-four-project.yaml')
def test_multi_branch(self):
@@ -156,18 +158,7 @@
},
]
- for number, build in enumerate(self.builds):
- self.log.debug("Build parameters: %s", build.parameters)
- work = build.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertRepoState(work[project], state[project],
- project, build, number)
-
- self.executor_server.hold_jobs_in_build = False
- self.executor_server.release()
- self.waitUntilSettled()
+ self.assertBuildStates(states, projects)
@simple_layout('layouts/repo-checkout-six-project.yaml')
def test_project_override(self):
@@ -252,18 +243,7 @@
},
]
- for number, build in enumerate(self.builds):
- self.log.debug("Build parameters: %s", build.parameters)
- work = build.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertRepoState(work[project], state[project],
- project, build, number)
-
- self.executor_server.hold_jobs_in_build = False
- self.executor_server.release()
- self.waitUntilSettled()
+ self.assertBuildStates(states, projects)
def test_periodic(self):
# This test can not use simple_layout because it must start
@@ -300,18 +280,7 @@
},
]
- for number, build in enumerate(self.builds):
- self.log.debug("Build parameters: %s", build.parameters)
- work = build.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertRepoState(work[project], state[project],
- project, build, number)
-
- self.executor_server.hold_jobs_in_build = False
- self.executor_server.release()
- self.waitUntilSettled()
+ self.assertBuildStates(states, projects)
@simple_layout('layouts/repo-checkout-post.yaml')
def test_post_and_master_checkout(self):
@@ -335,15 +304,4 @@
},
]
- for number, build in enumerate(self.builds):
- self.log.debug("Build parameters: %s", build.parameters)
- work = build.getWorkspaceRepos(projects)
- state = states[number]
-
- for project in projects:
- self.assertRepoState(work[project], state[project],
- project, build, number)
-
- self.executor_server.hold_jobs_in_build = False
- self.executor_server.release()
- self.waitUntilSettled()
+ self.assertBuildStates(states, projects)
diff --git a/tests/unit/test_inventory.py b/tests/unit/test_inventory.py
new file mode 100644
index 0000000..2835d30
--- /dev/null
+++ b/tests/unit/test_inventory.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import yaml
+
+from tests.base import ZuulTestCase
+
+
+class TestInventory(ZuulTestCase):
+
+ tenant_config_file = 'config/inventory/main.yaml'
+
+ def setUp(self):
+ super(TestInventory, self).setUp()
+ self.executor_server.hold_jobs_in_build = True
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ def _get_build_inventory(self, name):
+ build = self.getBuildByName(name)
+ inv_path = os.path.join(build.jobdir.root, 'ansible', 'inventory.yaml')
+ return yaml.safe_load(open(inv_path, 'r'))
+
+ def test_single_inventory(self):
+
+ inventory = self._get_build_inventory('single-inventory')
+
+ all_nodes = ('ubuntu-xenial',)
+ self.assertIn('all', inventory)
+ self.assertIn('hosts', inventory['all'])
+ self.assertIn('vars', inventory['all'])
+ for node_name in all_nodes:
+ self.assertIn(node_name, inventory['all']['hosts'])
+ self.assertIn('zuul', inventory['all']['vars'])
+ z_vars = inventory['all']['vars']['zuul']
+ self.assertIn('executor', z_vars)
+ self.assertIn('src_root', z_vars['executor'])
+ self.assertIn('job', z_vars)
+ self.assertEqual(z_vars['job'], 'single-inventory')
+
+ self.executor_server.release()
+ self.waitUntilSettled()
+
+ def test_group_inventory(self):
+
+ inventory = self._get_build_inventory('group-inventory')
+
+ all_nodes = ('controller', 'compute1', 'compute2')
+ self.assertIn('all', inventory)
+ self.assertIn('hosts', inventory['all'])
+ self.assertIn('vars', inventory['all'])
+ for group_name in ('ceph-osd', 'ceph-monitor'):
+ self.assertIn(group_name, inventory)
+ for node_name in all_nodes:
+ self.assertIn(node_name, inventory['all']['hosts'])
+ self.assertIn(node_name,
+ inventory['ceph-monitor']['hosts'])
+ self.assertIn('zuul', inventory['all']['vars'])
+ z_vars = inventory['all']['vars']['zuul']
+ self.assertIn('executor', z_vars)
+ self.assertIn('src_root', z_vars['executor'])
+ self.assertIn('job', z_vars)
+ self.assertEqual(z_vars['job'], 'group-inventory')
+
+ self.executor_server.release()
+ self.waitUntilSettled()
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index 1f057ee..0ac42c1 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -63,7 +63,11 @@
self.assertIsNone(self.getJobFromHistory('project-test2').node)
# TODOv3(jeblair): we may want to report stats by tenant (also?).
- self.assertReportedStat('gerrit.event.comment-added', value='1|c')
+ # Per-driver
+ self.assertReportedStat('zuul.event.gerrit.comment-added', value='1|c')
+ # Per-driver per-connection
+ self.assertReportedStat('zuul.event.gerrit.gerrit.comment-added',
+ value='1|c')
self.assertReportedStat('zuul.pipeline.gate.current_changes',
value='1|g')
self.assertReportedStat('zuul.pipeline.gate.job.project-merge.SUCCESS',
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 18a49db..707515a 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -328,6 +328,46 @@
self.assertIn('not permitted to shadow', A.messages[0],
"A should have a syntax error reported")
+ def test_untrusted_pipeline_error(self):
+ in_repo_conf = textwrap.dedent(
+ """
+ - pipeline:
+ name: test
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf}
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+ files=file_dict)
+ A.addApproval('code-review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(A.reported, 1,
+ "A should report failure")
+ self.assertIn('Pipelines may not be defined', A.messages[0],
+ "A should have a syntax error reported")
+
+ def test_untrusted_project_error(self):
+ in_repo_conf = textwrap.dedent(
+ """
+ - project:
+ name: org/project1
+ """)
+
+ file_dict = {'.zuul.yaml': in_repo_conf}
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+ files=file_dict)
+ A.addApproval('code-review', 2)
+ self.fake_gerrit.addEvent(A.addApproval('approved', 1))
+ self.waitUntilSettled()
+
+ self.assertEqual(A.data['status'], 'NEW')
+ self.assertEqual(A.reported, 1,
+ "A should report failure")
+ self.assertIn('the only project definition permitted', A.messages[0],
+ "A should have a syntax error reported")
+
class TestAnsible(AnsibleZuulTestCase):
# A temporary class to hold new tests while others are disabled
diff --git a/zuul/configloader.py b/zuul/configloader.py
index c0267ed..f78e8a4 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -47,6 +47,16 @@
pass
+class NodeFromGroupNotFoundError(Exception):
+ def __init__(self, nodeset, node, group):
+ message = textwrap.dedent("""\
+ In nodeset {nodeset} the group {group} contains a
+ node named {node} which is not defined in the nodeset.""")
+ message = textwrap.fill(message.format(nodeset=nodeset,
+ node=node, group=group))
+ super(NodeFromGroupNotFoundError, self).__init__(message)
+
+
class ProjectNotFoundError(Exception):
def __init__(self, project):
message = textwrap.dedent("""\
@@ -58,6 +68,24 @@
super(ProjectNotFoundError, self).__init__(message)
+class PipelineNotPermittedError(Exception):
+ def __init__(self):
+ message = textwrap.dedent("""\
+ Pipelines may not be defined in untrusted repos,
+ they may only be defined in config repos.""")
+ message = textwrap.fill(message)
+ super(PipelineNotPermittedError, self).__init__(message)
+
+
+class ProjectNotPermittedError(Exception):
+ def __init__(self):
+ message = textwrap.dedent("""\
+ Within an untrusted project, the only project definition
+ permitted is that of the project itself.""")
+ message = textwrap.fill(message)
+ super(ProjectNotPermittedError, self).__init__(message)
+
+
def indent(s):
return '\n'.join([' ' + x for x in s.split('\n')])
@@ -169,8 +197,13 @@
vs.Required('image'): str,
}
+ group = {vs.Required('name'): str,
+ vs.Required('nodes'): [str]
+ }
+
nodeset = {vs.Required('name'): str,
vs.Required('nodes'): [node],
+ 'groups': [group],
'_source_context': model.SourceContext,
'_start_mark': yaml.Mark,
}
@@ -182,9 +215,18 @@
with configuration_exceptions('nodeset', conf):
NodeSetParser.getSchema()(conf)
ns = model.NodeSet(conf['name'])
+ node_names = []
for conf_node in as_list(conf['nodes']):
node = model.Node(conf_node['name'], conf_node['image'])
ns.addNode(node)
+ node_names.append(conf_node['name'])
+ for conf_group in as_list(conf.get('groups', [])):
+ for node_name in conf_group['nodes']:
+ if node_name not in node_names:
+ raise NodeFromGroupNotFoundError(conf['name'], node_name,
+ conf_group['name'])
+ group = model.Group(conf_group['name'], conf_group['nodes'])
+ ns.addGroup(group)
return ns
@@ -562,6 +604,11 @@
configs = []
for conf in conf_list:
+ with configuration_exceptions('project', conf):
+ if not conf['_source_context'].trusted:
+ if project != conf['_source_context'].project:
+ raise ProjectNotPermittedError()
+
# Make a copy since we modify this later via pop
conf = copy.deepcopy(conf)
conf_templates = conf.pop('templates', [])
@@ -773,12 +820,12 @@
for source_name, require_config in conf.get('require', {}).items():
source = connections.getSource(source_name)
- manager.changeish_filters.extend(
+ manager.ref_filters.extend(
source.getRequireFilters(require_config))
for source_name, reject_config in conf.get('reject', {}).items():
source = connections.getSource(source_name)
- manager.changeish_filters.extend(
+ manager.ref_filters.extend(
source.getRejectFilters(reject_config))
for trigger_name, trigger_config in conf.get('trigger').items():
@@ -879,7 +926,7 @@
key_dir = os.path.dirname(project.private_key_file)
if not os.path.isdir(key_dir):
- os.makedirs(key_dir)
+ os.makedirs(key_dir, 0o700)
TenantParser.log.info(
"Generating RSA keypair for project %s" % (project.name,)
@@ -896,6 +943,9 @@
with open(project.private_key_file, 'wb') as f:
f.write(pem_private_key)
+ # Ensure private key is read/write for zuul user only.
+ os.chmod(project.private_key_file, 0o600)
+
@staticmethod
def _loadKeys(project):
# Check the key files specified are there
@@ -1032,10 +1082,11 @@
@staticmethod
def _parseUntrustedProjectLayout(data, source_context):
- # TODOv3(jeblair): this should implement some rules to protect
- # aspects of the config that should not be changed in-repo
config = model.UnparsedTenantConfig()
config.extend(safe_load_yaml(data, source_context))
+ if config.pipelines:
+ with configuration_exceptions('pipeline', config.pipelines[0]):
+ raise PipelineNotPermittedError()
return config
@staticmethod
diff --git a/zuul/connection/__init__.py b/zuul/connection/__init__.py
index 49624d7..90ab39c 100644
--- a/zuul/connection/__init__.py
+++ b/zuul/connection/__init__.py
@@ -14,6 +14,7 @@
import abc
+import extras
import six
@@ -43,6 +44,26 @@
self.driver = driver
self.connection_name = connection_name
self.connection_config = connection_config
+ self.statsd = extras.try_import('statsd.statsd')
+
+ def logEvent(self, event):
+ self.log.debug(
+ 'Scheduling {driver} event from {connection}: {event}'.format(
+ driver=self.driver.name,
+ connection=self.connection_name,
+ event=event.type))
+ try:
+ if self.statsd:
+ self.statsd.incr(
+ 'zuul.event.{driver}.{event}'.format(
+ driver=self.driver.name, event=event.type))
+ self.statsd.incr(
+ 'zuul.event.{driver}.{connection}.{event}'.format(
+ driver=self.driver.name,
+ connection=self.connection_name,
+ event=event.type))
+ except:
+ self.log.exception("Exception reporting event stats")
def onLoad(self):
pass
diff --git a/zuul/driver/gerrit/gerritconnection.py b/zuul/driver/gerrit/gerritconnection.py
index bc587b9..a5e1f22 100644
--- a/zuul/driver/gerrit/gerritconnection.py
+++ b/zuul/driver/gerrit/gerritconnection.py
@@ -19,7 +19,6 @@
import threading
import time
from six.moves import queue as Queue
-from six.moves import urllib
from six.moves import shlex_quote
import paramiko
import logging
@@ -143,6 +142,7 @@
self.connection._getChange(event.change_number,
event.patch_number,
refresh=True)
+ self.connection.logEvent(event)
self.connection.sched.addEvent(event)
def run(self):
@@ -699,6 +699,11 @@
chunk, more_changes = _query_chunk("%s %s" % (query, resume))
return alldata
+ def _uploadPack(self, project_name):
+ cmd = "git-upload-pack %s" % project_name
+ out, err = self._ssh(cmd, "0000")
+ return out
+
def _open(self):
client = paramiko.SSHClient()
client.load_system_host_keys()
@@ -738,19 +743,13 @@
return (out, err)
def getInfoRefs(self, project):
- url = "%s/p/%s/info/refs?service=git-upload-pack" % (
- self.baseurl, project.name)
try:
- data = urllib.request.urlopen(url).read()
+ data = self._uploadPack(project)
except:
- self.log.error("Cannot get references from %s" % url)
- raise # keeps urllib error informations
+ self.log.error("Cannot get references from %s" % project)
+ raise # keeps error information
ret = {}
- read_headers = False
read_advertisement = False
- if data[4] != '#':
- raise Exception("Gerrit repository does not support "
- "git-upload-pack")
i = 0
while i < len(data):
if len(data) - i < 4:
@@ -766,10 +765,6 @@
raise Exception("Invalid data in info/refs")
line = data[i:i + plen]
i += plen
- if not read_headers:
- if plen == 0:
- read_headers = True
- continue
if not read_advertisement:
read_advertisement = True
continue
diff --git a/zuul/driver/gerrit/gerritreporter.py b/zuul/driver/gerrit/gerritreporter.py
index f8e8b03..90c95e3 100644
--- a/zuul/driver/gerrit/gerritreporter.py
+++ b/zuul/driver/gerrit/gerritreporter.py
@@ -25,7 +25,7 @@
name = 'gerrit'
log = logging.getLogger("zuul.GerritReporter")
- def report(self, pipeline, item):
+ def report(self, item):
"""Send a message to gerrit."""
# If the source is no GerritSource we cannot report anything here.
@@ -38,7 +38,7 @@
self.connection.canonical_hostname:
return
- message = self._formatItemReport(pipeline, item)
+ message = self._formatItemReport(item)
self.log.debug("Report change %s, params %s, message: %s" %
(item.change, self.config, message))
diff --git a/zuul/driver/github/githubconnection.py b/zuul/driver/github/githubconnection.py
index 02c795e..6a3c09e 100644
--- a/zuul/driver/github/githubconnection.py
+++ b/zuul/driver/github/githubconnection.py
@@ -119,7 +119,7 @@
if event:
event.project_hostname = self.connection.canonical_hostname
- self.log.debug('Scheduling github event: {0}'.format(event.type))
+ self.connection.logEvent(event)
self.connection.sched.addEvent(event)
def _event_push(self, body):
diff --git a/zuul/driver/github/githubreporter.py b/zuul/driver/github/githubreporter.py
index fc3b64d..29edb8a 100644
--- a/zuul/driver/github/githubreporter.py
+++ b/zuul/driver/github/githubreporter.py
@@ -39,25 +39,25 @@
if not isinstance(self._unlabels, list):
self._unlabels = [self._unlabels]
- def report(self, pipeline, item):
+ def report(self, item):
"""Comment on PR and set commit status."""
if self._create_comment:
- self.addPullComment(pipeline, item)
+ self.addPullComment(item)
if (self._commit_status is not None and
hasattr(item.change, 'patchset') and
item.change.patchset is not None):
- self.setPullStatus(pipeline, item)
+ self.setPullStatus(item)
if (self._merge and
hasattr(item.change, 'number')):
self.mergePull(item)
if not item.change.is_merged:
- msg = self._formatItemReportMergeFailure(pipeline, item)
- self.addPullComment(pipeline, item, msg)
+ msg = self._formatItemReportMergeFailure(item)
+ self.addPullComment(item, msg)
if self._labels or self._unlabels:
self.setLabels(item)
- def addPullComment(self, pipeline, item, comment=None):
- message = comment or self._formatItemReport(pipeline, item)
+ def addPullComment(self, item, comment=None):
+ message = comment or self._formatItemReport(item)
project = item.change.project.name
pr_number = item.change.number
self.log.debug(
@@ -65,10 +65,11 @@
(item.change, self.config, message))
self.connection.commentPull(project, pr_number, message)
- def setPullStatus(self, pipeline, item):
+ def setPullStatus(self, item):
project = item.change.project.name
sha = item.change.patchset
- context = '%s/%s' % (pipeline.layout.tenant.name, pipeline.name)
+ context = '%s/%s' % (item.pipeline.layout.tenant.name,
+ item.pipeline.name)
state = self._commit_status
url_pattern = self.config.get('status-url')
@@ -79,8 +80,8 @@
url = item.formatUrlPattern(url_pattern) if url_pattern else ''
description = ''
- if pipeline.description:
- description = pipeline.description
+ if item.pipeline.description:
+ description = item.pipeline.description
self.log.debug(
'Reporting change %s, params %s, status:\n'
diff --git a/zuul/driver/smtp/smtpreporter.py b/zuul/driver/smtp/smtpreporter.py
index 35eb69f..1f232e9 100644
--- a/zuul/driver/smtp/smtpreporter.py
+++ b/zuul/driver/smtp/smtpreporter.py
@@ -24,9 +24,9 @@
name = 'smtp'
log = logging.getLogger("zuul.SMTPReporter")
- def report(self, pipeline, item):
+ def report(self, item):
"""Send the compiled report message via smtp."""
- message = self._formatItemReport(pipeline, item)
+ message = self._formatItemReport(item)
self.log.debug("Report change %s, params %s, message: %s" %
(item.change, self.config, message))
diff --git a/zuul/driver/sql/sqlconnection.py b/zuul/driver/sql/sqlconnection.py
index 4b1b1a2..e478d33 100644
--- a/zuul/driver/sql/sqlconnection.py
+++ b/zuul/driver/sql/sqlconnection.py
@@ -43,6 +43,8 @@
self.engine = sa.create_engine(self.dburi)
self._migrate()
self._setup_tables()
+ self.zuul_buildset_table, self.zuul_build_table \
+ = self._setup_tables()
self.tables_established = True
except sa.exc.NoSuchModuleError:
self.log.exception(
@@ -68,10 +70,11 @@
alembic.command.upgrade(config, 'head')
- def _setup_tables(self):
+ @staticmethod
+ def _setup_tables():
metadata = sa.MetaData()
- self.zuul_buildset_table = sa.Table(
+ zuul_buildset_table = sa.Table(
BUILDSET_TABLE, metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('zuul_ref', sa.String(255)),
@@ -84,7 +87,7 @@
sa.Column('message', sa.TEXT()),
)
- self.zuul_build_table = sa.Table(
+ zuul_build_table = sa.Table(
BUILD_TABLE, metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('buildset_id', sa.Integer,
@@ -99,6 +102,8 @@
sa.Column('node_name', sa.String(255)),
)
+ return zuul_buildset_table, zuul_build_table
+
def getSchema():
sql_connection = v.Any(str, v.Schema(dict))
diff --git a/zuul/driver/sql/sqlreporter.py b/zuul/driver/sql/sqlreporter.py
index 349abe8..5f93ce8 100644
--- a/zuul/driver/sql/sqlreporter.py
+++ b/zuul/driver/sql/sqlreporter.py
@@ -31,7 +31,7 @@
# TODO(jeblair): document this is stored as NULL if unspecified
self.result_score = config.get('score', None)
- def report(self, pipeline, item):
+ def report(self, item):
"""Create an entry into a database."""
if not self.connection.tables_established:
@@ -51,7 +51,7 @@
ref=refspec,
score=self.result_score,
message=self._formatItemReport(
- pipeline, item, with_jobs=False),
+ item, with_jobs=False),
)
buildset_ins_result = conn.execute(buildset_ins)
build_inserts = []
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index 5a1820e..cf8d973 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -274,8 +274,9 @@
params['post_playbooks'] = [x.toDict() for x in job.post_run]
params['roles'] = [x.toDict() for x in job.roles]
+ nodeset = item.current_build_set.getJobNodeSet(job.name)
nodes = []
- for node in item.current_build_set.getJobNodeSet(job.name).getNodes():
+ for node in nodeset.getNodes():
nodes.append(dict(name=node.name, image=node.image,
az=node.az,
host_keys=node.host_keys,
@@ -285,6 +286,7 @@
public_ipv6=node.public_ipv6,
public_ipv4=node.public_ipv4))
params['nodes'] = nodes
+ params['groups'] = [group.toDict() for group in nodeset.getGroups()]
params['vars'] = copy.deepcopy(job.variables)
if job.auth:
for secret in job.auth.secrets:
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index 36b17e1..f71bb92 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -182,8 +182,7 @@
self.ansible_root = os.path.join(self.root, 'ansible')
os.makedirs(self.ansible_root)
self.known_hosts = os.path.join(self.ansible_root, 'known_hosts')
- self.inventory = os.path.join(self.ansible_root, 'inventory')
- self.vars = os.path.join(self.ansible_root, 'vars.yaml')
+ self.inventory = os.path.join(self.ansible_root, 'inventory.yaml')
self.playbooks = [] # The list of candidate playbooks
self.playbook = None # A pointer to the candidate we have chosen
self.pre_playbooks = []
@@ -312,6 +311,31 @@
shutil.copy(os.path.join(library_path, fn), target_dir)
+def make_inventory_dict(nodes, groups, all_vars):
+
+ hosts = {}
+ for node in nodes:
+ hosts[node['name']] = node['host_vars']
+
+ inventory = {
+ 'all': {
+ 'hosts': hosts,
+ 'vars': all_vars,
+ }
+ }
+
+ for group in groups:
+ group_hosts = {}
+ for node_name in group['nodes']:
+ # children is a dict with None as values because we don't have
+ # and per-group variables. If we did, None would be a dict
+ # with the per-group variables
+ group_hosts[node_name] = None
+ inventory[group['name']] = {'hosts': group_hosts}
+
+ return inventory
+
+
class ExecutorMergeWorker(gear.TextWorker):
def __init__(self, executor_server, *args, **kw):
self.zuul_executor_server = executor_server
@@ -353,6 +377,12 @@
else:
self.merge_root = '/var/lib/zuul/executor-git'
+ if self.config.has_option('executor', 'default_username'):
+ self.default_username = self.config.get('executor',
+ 'default_username')
+ else:
+ self.default_username = 'zuul'
+
if self.config.has_option('merger', 'git_user_email'):
self.merge_email = self.config.get('merger', 'git_user_email')
else:
@@ -884,6 +914,7 @@
ip = node.get('interface_ip')
host_vars = dict(
ansible_host=ip,
+ ansible_user=self.executor_server.default_username,
nodepool_az=node.get('az'),
nodepool_provider=node.get('provider'),
nodepool_region=node.get('region'))
@@ -1122,28 +1153,24 @@
self.jobdir.trusted_roles_path.append(trusted_role_path)
def prepareAnsibleFiles(self, args):
- keys = []
- with open(self.jobdir.inventory, 'w') as inventory:
- for item in self.getHostList(args):
- inventory.write(item['name'])
- for k, v in item['host_vars'].items():
- inventory.write(' %s="%s"' % (k, v))
- inventory.write('\n')
- for key in item['host_keys']:
- keys.append(key)
+ all_vars = dict(args['vars'])
+ all_vars['zuul']['executor'] = dict(
+ hostname=self.executor_server.hostname,
+ src_root=self.jobdir.src_root,
+ log_root=self.jobdir.log_root)
+
+ nodes = self.getHostList(args)
+ inventory = make_inventory_dict(nodes, args['groups'], all_vars)
+
+ with open(self.jobdir.inventory, 'w') as inventory_yaml:
+ inventory_yaml.write(
+ yaml.safe_dump(inventory, default_flow_style=False))
with open(self.jobdir.known_hosts, 'w') as known_hosts:
- for key in keys:
- known_hosts.write('%s\n' % key)
+ for node in nodes:
+ for key in node['host_keys']:
+ known_hosts.write('%s\n' % key)
- with open(self.jobdir.vars, 'w') as vars_yaml:
- zuul_vars = dict(args['vars'])
- zuul_vars['zuul']['executor'] = dict(
- hostname=self.executor_server.hostname,
- src_root=self.jobdir.src_root,
- log_root=self.jobdir.log_root)
- vars_yaml.write(
- yaml.safe_dump(zuul_vars, default_flow_style=False))
self.writeAnsibleConfig(self.jobdir.untrusted_config)
self.writeAnsibleConfig(self.jobdir.trusted_config, trusted=True)
@@ -1297,12 +1324,10 @@
else:
verbose = '-v'
- cmd = ['ansible-playbook', playbook.path]
+ cmd = ['ansible-playbook', verbose, playbook.path]
if success is not None:
cmd.extend(['-e', 'success=%s' % str(bool(success))])
- cmd.extend(['-e@%s' % self.jobdir.vars, verbose])
-
return self.runAnsible(
cmd=cmd, timeout=timeout, trusted=playbook.trusted)
diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py
index 4005b01..c3958d7 100644
--- a/zuul/manager/__init__.py
+++ b/zuul/manager/__init__.py
@@ -47,7 +47,7 @@
self.sched = sched
self.pipeline = pipeline
self.event_filters = []
- self.changeish_filters = []
+ self.ref_filters = []
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.pipeline.name)
@@ -55,7 +55,7 @@
def _postConfig(self, layout):
self.log.info("Configured Pipeline Manager %s" % self.pipeline.name)
self.log.info(" Requirements:")
- for f in self.changeish_filters:
+ for f in self.ref_filters:
self.log.info(" %s" % f)
self.log.info(" Events:")
for e in self.event_filters:
@@ -165,7 +165,7 @@
report_errors = []
if len(action_reporters) > 0:
for reporter in action_reporters:
- ret = reporter.report(self.pipeline, item)
+ ret = reporter.report(item)
if ret:
report_errors.append(ret)
if len(report_errors) == 0:
@@ -281,7 +281,7 @@
return False
if not ignore_requirements:
- for f in self.changeish_filters:
+ for f in self.ref_filters:
if f.connection_name != change.project.connection_name:
self.log.debug("Filter %s skipped for change %s due "
"to mismatched connections" % (f, change))
diff --git a/zuul/model.py b/zuul/model.py
index b6c6366..0d92301 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -410,6 +410,37 @@
self._keys = keys
+class Group(object):
+ """A logical group of nodes for use by a job.
+
+ A Group is a named set of node names that will be provided to
+ jobs in the inventory to describe logical units where some subset of tasks
+ run.
+ """
+
+ def __init__(self, name, nodes):
+ self.name = name
+ self.nodes = nodes
+
+ def __repr__(self):
+ return '<Group %s %s>' % (self.name, str(self.nodes))
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __eq__(self, other):
+ if not isinstance(other, Group):
+ return False
+ return (self.name == other.name and
+ self.nodes == other.nodes)
+
+ def toDict(self):
+ return {
+ 'name': self.name,
+ 'nodes': self.nodes
+ }
+
+
class NodeSet(object):
"""A set of nodes.
@@ -423,6 +454,7 @@
def __init__(self, name=None):
self.name = name or ''
self.nodes = OrderedDict()
+ self.groups = OrderedDict()
def __ne__(self, other):
return not self.__eq__(other)
@@ -437,6 +469,8 @@
n = NodeSet(self.name)
for name, node in self.nodes.items():
n.addNode(Node(node.name, node.image))
+ for name, group in self.groups.items():
+ n.addGroup(Group(group.name, group.nodes[:]))
return n
def addNode(self, node):
@@ -447,12 +481,20 @@
def getNodes(self):
return list(self.nodes.values())
+ def addGroup(self, group):
+ if group.name in self.groups:
+ raise Exception("Duplicate group in %s" % (self,))
+ self.groups[group.name] = group
+
+ def getGroups(self):
+ return list(self.groups.values())
+
def __repr__(self):
if self.name:
name = self.name + ' '
else:
name = ''
- return '<NodeSet %s%s>' % (name, self.nodes)
+ return '<NodeSet %s%s%s>' % (name, self.nodes, self.groups)
class NodeRequest(object):
@@ -1280,7 +1322,7 @@
def __init__(self, queue, change):
self.pipeline = queue.pipeline
self.queue = queue
- self.change = change # a changeish
+ self.change = change # a ref
self.build_sets = []
self.dequeued_needing_change = False
self.current_build_set = BuildSet(self)
@@ -1589,15 +1631,14 @@
return (result, url)
def formatJSON(self):
- changeish = self.change
ret = {}
ret['active'] = self.active
ret['live'] = self.live
- if hasattr(changeish, 'url') and changeish.url is not None:
- ret['url'] = changeish.url
+ if hasattr(self.change, 'url') and self.change.url is not None:
+ ret['url'] = self.change.url
else:
ret['url'] = None
- ret['id'] = changeish._id()
+ ret['id'] = self.change._id()
if self.item_ahead:
ret['item_ahead'] = self.item_ahead.change._id()
else:
@@ -1605,8 +1646,8 @@
ret['items_behind'] = [i.change._id() for i in self.items_behind]
ret['failing_reasons'] = self.current_build_set.failing_reasons
ret['zuul_ref'] = self.current_build_set.ref
- if changeish.project:
- ret['project'] = changeish.project.name
+ if self.change.project:
+ ret['project'] = self.change.project.name
else:
# For cross-project dependencies with the depends-on
# project not known to zuul, the project is None
@@ -1614,8 +1655,8 @@
ret['project'] = "Unknown Project"
ret['enqueue_time'] = int(self.enqueue_time * 1000)
ret['jobs'] = []
- if hasattr(changeish, 'owner'):
- ret['owner'] = changeish.owner
+ if hasattr(self.change, 'owner'):
+ ret['owner'] = self.change.owner
else:
ret['owner'] = None
max_remaining = 0
@@ -1683,20 +1724,19 @@
return ret
def formatStatus(self, indent=0, html=False):
- changeish = self.change
indent_str = ' ' * indent
ret = ''
- if html and hasattr(changeish, 'url') and changeish.url is not None:
+ if html and getattr(self.change, 'url', None) is not None:
ret += '%sProject %s change <a href="%s">%s</a>\n' % (
indent_str,
- changeish.project.name,
- changeish.url,
- changeish._id())
+ self.change.project.name,
+ self.change.url,
+ self.change._id())
else:
ret += '%sProject %s change %s based on %s\n' % (
indent_str,
- changeish.project.name,
- changeish._id(),
+ self.change.project.name,
+ self.change._id(),
self.item_ahead)
for job in self.getJobs():
build = self.current_build_set.getBuild(job.name)
diff --git a/zuul/reporter/__init__.py b/zuul/reporter/__init__.py
index 9c8e953..dc99c8b 100644
--- a/zuul/reporter/__init__.py
+++ b/zuul/reporter/__init__.py
@@ -37,7 +37,7 @@
self._action = action
@abc.abstractmethod
- def report(self, pipeline, item):
+ def report(self, item):
"""Send the compiled report message."""
def getSubmitAllowNeeds(self):
@@ -61,57 +61,55 @@
}
return format_methods[self._action]
- # TODOv3(jeblair): Consider removing pipeline argument in favor of
- # item.pipeline
- def _formatItemReport(self, pipeline, item, with_jobs=True):
+ def _formatItemReport(self, item, with_jobs=True):
"""Format a report from the given items. Usually to provide results to
a reporter taking free-form text."""
- ret = self._getFormatter()(pipeline, item, with_jobs)
+ ret = self._getFormatter()(item, with_jobs)
- if pipeline.footer_message:
- ret += '\n' + pipeline.footer_message
+ if item.pipeline.footer_message:
+ ret += '\n' + item.pipeline.footer_message
return ret
- def _formatItemReportStart(self, pipeline, item, with_jobs=True):
+ def _formatItemReportStart(self, item, with_jobs=True):
status_url = ''
if self.connection.sched.config.has_option('zuul', 'status_url'):
status_url = self.connection.sched.config.get('zuul',
'status_url')
- return pipeline.start_message.format(pipeline=pipeline,
- status_url=status_url)
+ return item.pipeline.start_message.format(pipeline=item.pipeline,
+ status_url=status_url)
- def _formatItemReportSuccess(self, pipeline, item, with_jobs=True):
- msg = pipeline.success_message
+ def _formatItemReportSuccess(self, item, with_jobs=True):
+ msg = item.pipeline.success_message
if with_jobs:
- msg += '\n\n' + self._formatItemReportJobs(pipeline, item)
+ msg += '\n\n' + self._formatItemReportJobs(item)
return msg
- def _formatItemReportFailure(self, pipeline, item, with_jobs=True):
+ def _formatItemReportFailure(self, item, with_jobs=True):
if item.dequeued_needing_change:
msg = 'This change depends on a change that failed to merge.\n'
elif item.didMergerFail():
- msg = pipeline.merge_failure_message
+ msg = item.pipeline.merge_failure_message
elif item.getConfigError():
msg = item.getConfigError()
else:
- msg = pipeline.failure_message
+ msg = item.pipeline.failure_message
if with_jobs:
- msg += '\n\n' + self._formatItemReportJobs(pipeline, item)
+ msg += '\n\n' + self._formatItemReportJobs(item)
return msg
- def _formatItemReportMergeFailure(self, pipeline, item, with_jobs=True):
- return pipeline.merge_failure_message
+ def _formatItemReportMergeFailure(self, item, with_jobs=True):
+ return item.pipeline.merge_failure_message
- def _formatItemReportDisabled(self, pipeline, item, with_jobs=True):
+ def _formatItemReportDisabled(self, item, with_jobs=True):
if item.current_build_set.result == 'SUCCESS':
- return self._formatItemReportSuccess(pipeline, item)
+ return self._formatItemReportSuccess(item)
elif item.current_build_set.result == 'FAILURE':
- return self._formatItemReportFailure(pipeline, item)
+ return self._formatItemReportFailure(item)
else:
- return self._formatItemReport(pipeline, item)
+ return self._formatItemReport(item)
- def _formatItemReportJobs(self, pipeline, item):
+ def _formatItemReportJobs(self, item):
# Return the list of jobs portion of the report
ret = ''
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 40d5eb7..61f1e5f 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -258,11 +258,6 @@
def addEvent(self, event):
self.log.debug("Adding trigger event: %s" % event)
- try:
- if self.statsd:
- self.statsd.incr('gerrit.event.%s' % event.type)
- except:
- self.log.exception("Exception reporting event stats")
self.trigger_event_queue.put(event)
self.wake_event.set()
self.log.debug("Done adding trigger event: %s" % event)