Merge "Allow per-repo selection of configuration classes to load" into feature/zuulv3
diff --git a/doc/source/developer/datamodel.rst b/doc/source/developer/datamodel.rst
index acb8612..c4ff4a0 100644
--- a/doc/source/developer/datamodel.rst
+++ b/doc/source/developer/datamodel.rst
@@ -8,7 +8,7 @@
Pipelines have a configured
:py:class:`~zuul.manager.PipelineManager` which controlls how
-the :py:class:`Change <zuul.model.Changeish>` objects are enqueued and
+the :py:class:`Ref <zuul.model.Ref>` objects are enqueued and
processed.
There are currently two,
@@ -35,7 +35,7 @@
.. autoclass:: zuul.model.Build
The :py:class:`~zuul.manager.base.PipelineManager` enqueues each
-:py:class:`Change <zuul.model.Changeish>` into the
+:py:class:`Ref <zuul.model.Ref>` into the
:py:class:`~zuul.model.ChangeQueue` in a :py:class:`~zuul.model.QueueItem`.
.. autoclass:: zuul.model.QueueItem
diff --git a/tests/base.py b/tests/base.py
index d8f88b7..76d604f 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -605,21 +605,6 @@
def getPullRequestClosedEvent(self):
return self._getPullRequestEvent('closed')
- def getPushEvent(self, old_sha, ref='refs/heads/master'):
- name = 'push'
- data = {
- 'ref': ref,
- 'before': old_sha,
- 'after': self.head_sha,
- 'repository': {
- 'full_name': self.project
- },
- 'sender': {
- 'login': 'ghuser'
- }
- }
- return (name, data)
-
def addComment(self, message):
self.comments.append(message)
self._updateTimeStamp()
@@ -909,7 +894,8 @@
self.pull_requests.append(pull_request)
return pull_request
- def getPushEvent(self, project, ref, old_rev=None, new_rev=None):
+ def getPushEvent(self, project, ref, old_rev=None, new_rev=None,
+ added_files=[], removed_files=[], modified_files=[]):
if not old_rev:
old_rev = '00000000000000000000000000000000'
if not new_rev:
@@ -921,7 +907,14 @@
'after': new_rev,
'repository': {
'full_name': project
- }
+ },
+ 'commits': [
+ {
+ 'added': added_files,
+ 'removed': removed_files,
+ 'modified': modified_files
+ }
+ ]
}
return (name, data)
diff --git a/tests/fixtures/config/roles/git/bare-role/tasks/main.yaml b/tests/fixtures/config/roles/git/bare-role/tasks/main.yaml
new file mode 100644
index 0000000..75943b1
--- /dev/null
+++ b/tests/fixtures/config/roles/git/bare-role/tasks/main.yaml
@@ -0,0 +1,3 @@
+- file:
+ path: "{{zuul._test.test_root}}/{{zuul.uuid}}.bare-role.flag"
+ state: touch
diff --git a/tests/fixtures/config/roles/git/common-config/playbooks/common-config-test.yaml b/tests/fixtures/config/roles/git/common-config/playbooks/common-config-test.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/roles/git/common-config/playbooks/common-config-test.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/roles/git/common-config/zuul.yaml b/tests/fixtures/config/roles/git/common-config/zuul.yaml
new file mode 100644
index 0000000..1fdaf2e
--- /dev/null
+++ b/tests/fixtures/config/roles/git/common-config/zuul.yaml
@@ -0,0 +1,42 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+- pipeline:
+ name: tenant-one-gate
+ manager: dependent
+ success-message: Build succeeded (tenant-one-gate).
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - approved: 1
+ success:
+ gerrit:
+ verified: 2
+ submit: true
+ failure:
+ gerrit:
+ verified: -2
+ start:
+ gerrit:
+ verified: 0
+ precedence: high
+
+- job:
+ name: common-config-test
+
+- project:
+ name: common-config
+ tenant-one-gate:
+ jobs:
+ - common-config-test
diff --git a/tests/fixtures/config/roles/git/org_project/.zuul.yaml b/tests/fixtures/config/roles/git/org_project/.zuul.yaml
new file mode 100644
index 0000000..35c2153
--- /dev/null
+++ b/tests/fixtures/config/roles/git/org_project/.zuul.yaml
@@ -0,0 +1,10 @@
+- job:
+ name: project-test
+ roles:
+ - zuul: bare-role
+
+- project:
+ name: org/project
+ check:
+ jobs:
+ - project-test
diff --git a/tests/fixtures/config/roles/git/org_project/README b/tests/fixtures/config/roles/git/org_project/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/roles/git/org_project/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/roles/git/org_project/playbooks/project-test.yaml b/tests/fixtures/config/roles/git/org_project/playbooks/project-test.yaml
new file mode 100644
index 0000000..f679dce
--- /dev/null
+++ b/tests/fixtures/config/roles/git/org_project/playbooks/project-test.yaml
@@ -0,0 +1,2 @@
+- hosts: all
+ tasks: []
diff --git a/tests/fixtures/config/roles/main.yaml b/tests/fixtures/config/roles/main.yaml
new file mode 100644
index 0000000..9ccece9
--- /dev/null
+++ b/tests/fixtures/config/roles/main.yaml
@@ -0,0 +1,9 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - common-config
+ untrusted-projects:
+ - org/project
+ - bare-role
diff --git a/tests/fixtures/layout-merge-failure.yaml b/tests/fixtures/layout-merge-failure.yaml
deleted file mode 100644
index 72bc9c9..0000000
--- a/tests/fixtures/layout-merge-failure.yaml
+++ /dev/null
@@ -1,56 +0,0 @@
-pipelines:
- - name: check
- manager: IndependentPipelineManager
- trigger:
- gerrit:
- - event: patchset-created
- success:
- gerrit:
- verified: 1
- failure:
- gerrit:
- verified: -1
-
- - name: post
- manager: IndependentPipelineManager
- trigger:
- gerrit:
- - event: ref-updated
- ref: ^(?!refs/).*$
-
- - name: gate
- manager: DependentPipelineManager
- failure-message: Build failed. For information on how to proceed, see http://wiki.example.org/Test_Failures
- merge-failure-message: "The merge failed! For more information..."
- trigger:
- gerrit:
- - event: comment-added
- approval:
- - approved: 1
- success:
- gerrit:
- verified: 2
- submit: true
- failure:
- gerrit:
- verified: -2
- merge-failure:
- gerrit:
- verified: -1
- smtp:
- to: you@example.com
- start:
- gerrit:
- verified: 0
- precedence: high
-
-projects:
- - name: org/project
- check:
- - project-merge:
- - project-test1
- - project-test2
- gate:
- - project-merge:
- - project-test1
- - project-test2
diff --git a/tests/fixtures/layouts/merge-failure.yaml b/tests/fixtures/layouts/merge-failure.yaml
new file mode 100644
index 0000000..228963f
--- /dev/null
+++ b/tests/fixtures/layouts/merge-failure.yaml
@@ -0,0 +1,74 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+- pipeline:
+ name: post
+ manager: independent
+ trigger:
+ gerrit:
+ - event: ref-updated
+ ref: ^(?!refs/).*$
+
+- pipeline:
+ name: gate
+ manager: dependent
+ failure-message: Build failed. For information on how to proceed, see http://wiki.example.org/Test_Failures
+ merge-failure-message: "The merge failed! For more information..."
+ trigger:
+ gerrit:
+ - event: comment-added
+ approval:
+ - approved: 1
+ success:
+ gerrit:
+ verified: 2
+ submit: true
+ failure:
+ gerrit:
+ verified: -2
+ merge-failure:
+ gerrit:
+ verified: -1
+ smtp:
+ to: you@example.com
+ start:
+ gerrit:
+ verified: 0
+ precedence: high
+
+- job:
+ name: project-merge
+ hold-following-changes: true
+
+- job:
+ name: project-test1
+
+- job:
+ name: project-test2
+
+- project:
+ name: org/project
+ check:
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
+ gate:
+ jobs:
+ - project-merge
+ - project-test1:
+ dependencies: project-merge
+ - project-test2:
+ dependencies: project-merge
diff --git a/tests/fixtures/layouts/reporting-github.yaml b/tests/fixtures/layouts/reporting-github.yaml
index 8dd35b0..d054df7 100644
--- a/tests/fixtures/layouts/reporting-github.yaml
+++ b/tests/fixtures/layouts/reporting-github.yaml
@@ -29,7 +29,7 @@
github:
comment: false
status: 'success'
- status-url: http://logs.example.com/{pipeline.name}/{change.project}/{change.number}/{change.patchset}/
+ status-url: http://logs.example.com/{tenant.name}/{pipeline.name}/{change.project}/{change.number}/{buildset.uuid}/
failure:
github:
comment: false
diff --git a/tests/unit/test_github_driver.py b/tests/unit/test_github_driver.py
index 6cc010e..4979087 100644
--- a/tests/unit/test_github_driver.py
+++ b/tests/unit/test_github_driver.py
@@ -13,7 +13,7 @@
# under the License.
import re
-from testtools.matchers import MatchesRegex
+from testtools.matchers import MatchesRegex, StartsWith
import time
from tests.base import ZuulTestCase, simple_layout, random_sha1
@@ -300,9 +300,20 @@
self.assertEqual('tenant-one/reporting', report_status['context'])
self.assertEqual('success', report_status['state'])
self.assertEqual(2, len(A.comments))
- report_url = ('http://logs.example.com/reporting/%s/%s/%s/' %
- (A.project, A.number, A.head_sha))
- self.assertEqual(report_url, report_status['url'])
+
+ base = 'http://logs.example.com/tenant-one/reporting/%s/%s/' % (
+ A.project, A.number)
+
+ # Deconstructing the URL because we don't save the BuildSet UUID
+ # anywhere to do a direct comparison and doing regexp matches on a full
+ # URL is painful.
+
+ # The first part of the URL matches the easy base string
+ self.assertThat(report_status['url'], StartsWith(base))
+
+ # The rest of the URL is a UUID and a trailing slash.
+ self.assertThat(report_status['url'][len(base):],
+ MatchesRegex('^[a-fA-F0-9]{32}\/$'))
@simple_layout('layouts/merging-github.yaml', driver='github')
def test_report_pull_merge(self):
@@ -513,3 +524,18 @@
self.assertNotIn('merge', A.labels)
self.assertNotIn('merge', B.labels)
self.assertNotIn('merge', C.labels)
+
+ @simple_layout('layouts/basic-github.yaml', driver='github')
+ def test_push_event_reconfigure(self):
+ pevent = self.fake_github.getPushEvent(project='common-config',
+ ref='refs/heads/master',
+ modified_files=['zuul.yaml'])
+
+ # record previous tenant reconfiguration time, which may not be set
+ old = self.sched.tenant_last_reconfigured.get('tenant-one', 0)
+ time.sleep(1)
+ self.fake_github.emitEvent(pevent)
+ self.waitUntilSettled()
+ new = self.sched.tenant_last_reconfigured.get('tenant-one', 0)
+ # New timestamp should be greater than the old timestamp
+ self.assertLess(old, new)
diff --git a/tests/unit/test_push_reqs.py b/tests/unit/test_push_reqs.py
index 657d9b8..d3a1feb 100644
--- a/tests/unit/test_push_reqs.py
+++ b/tests/unit/test_push_reqs.py
@@ -28,7 +28,10 @@
# Create a github change, add a change and emit a push event
A = self.fake_github.openFakePullRequest('org/project1', 'master', 'A')
old_sha = A.head_sha
- self.fake_github.emitEvent(A.getPushEvent(old_sha))
+ pevent = self.fake_github.getPushEvent(project='org/project1',
+ ref='refs/heads/master',
+ old_rev=old_sha)
+ self.fake_github.emitEvent(pevent)
self.waitUntilSettled()
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index 0ac42c1..de8246c 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -29,6 +29,7 @@
import testtools
import zuul.change_matcher
+from zuul.driver.gerrit import gerritreporter
import zuul.scheduler
import zuul.rpcclient
import zuul.model
@@ -3333,7 +3334,6 @@
self.assertEqual(queue.window_floor, 1)
self.assertEqual(C.data['status'], 'MERGED')
- @skip("Disabled for early v3 development")
def test_worker_update_metadata(self):
"Test if a worker can send back metadata about itself"
self.executor_server.hold_jobs_in_build = True
@@ -3343,17 +3343,17 @@
self.fake_gerrit.addEvent(A.addApproval('approved', 1))
self.waitUntilSettled()
- self.assertEqual(len(self.executor.builds), 1)
+ self.assertEqual(len(self.executor_client.builds), 1)
self.log.debug('Current builds:')
- self.log.debug(self.executor.builds)
+ self.log.debug(self.executor_client.builds)
start = time.time()
while True:
if time.time() - start > 10:
raise Exception("Timeout waiting for gearman server to report "
+ "back to the client")
- build = list(self.executor.builds.values())[0]
+ build = list(self.executor_client.builds.values())[0]
if build.worker.name == "My Worker":
break
else:
@@ -3361,12 +3361,6 @@
self.log.debug(build)
self.assertEqual("My Worker", build.worker.name)
- self.assertEqual("localhost", build.worker.hostname)
- self.assertEqual(['127.0.0.1', '192.168.1.1'], build.worker.ips)
- self.assertEqual("zuul.example.org", build.worker.fqdn)
- self.assertEqual("FakeBuilder", build.worker.program)
- self.assertEqual("v1.1", build.worker.version)
- self.assertEqual({'something': 'else'}, build.worker.extra)
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
@@ -3410,49 +3404,45 @@
self.assertEqual(0, len(A.messages))
- @skip("Disabled for early v3 development")
+ @simple_layout('layouts/merge-failure.yaml')
def test_merge_failure_reporters(self):
"""Check that the config is set up correctly"""
- self.updateConfigLayout(
- 'tests/fixtures/layout-merge-failure.yaml')
- self.sched.reconfigure(self.config)
- self.registerJobs()
-
+ tenant = self.sched.abide.tenants.get('tenant-one')
self.assertEqual(
"Merge Failed.\n\nThis change or one of its cross-repo "
"dependencies was unable to be automatically merged with the "
"current state of its repository. Please rebase the change and "
"upload a new patchset.",
- self.sched.layout.pipelines['check'].merge_failure_message)
+ tenant.layout.pipelines['check'].merge_failure_message)
self.assertEqual(
"The merge failed! For more information...",
- self.sched.layout.pipelines['gate'].merge_failure_message)
+ tenant.layout.pipelines['gate'].merge_failure_message)
self.assertEqual(
- len(self.sched.layout.pipelines['check'].merge_failure_actions), 1)
+ len(tenant.layout.pipelines['check'].merge_failure_actions), 1)
self.assertEqual(
- len(self.sched.layout.pipelines['gate'].merge_failure_actions), 2)
+ len(tenant.layout.pipelines['gate'].merge_failure_actions), 2)
self.assertTrue(isinstance(
- self.sched.layout.pipelines['check'].merge_failure_actions[0],
- zuul.reporter.gerrit.GerritReporter))
+ tenant.layout.pipelines['check'].merge_failure_actions[0],
+ gerritreporter.GerritReporter))
self.assertTrue(
(
- isinstance(self.sched.layout.pipelines['gate'].
+ isinstance(tenant.layout.pipelines['gate'].
merge_failure_actions[0],
- zuul.reporter.smtp.SMTPReporter) and
- isinstance(self.sched.layout.pipelines['gate'].
+ zuul.driver.smtp.smtpreporter.SMTPReporter) and
+ isinstance(tenant.layout.pipelines['gate'].
merge_failure_actions[1],
- zuul.reporter.gerrit.GerritReporter)
+ gerritreporter.GerritReporter)
) or (
- isinstance(self.sched.layout.pipelines['gate'].
+ isinstance(tenant.layout.pipelines['gate'].
merge_failure_actions[0],
- zuul.reporter.gerrit.GerritReporter) and
- isinstance(self.sched.layout.pipelines['gate'].
+ gerritreporter.GerritReporter) and
+ isinstance(tenant.layout.pipelines['gate'].
merge_failure_actions[1],
- zuul.reporter.smtp.SMTPReporter)
+ zuul.driver.smtp.smtpreporter.SMTPReporter)
)
)
@@ -3563,7 +3553,7 @@
self.assertEqual(False, job['retry'])
self.assertEqual('https://server/job/project-merge/0/',
job['url'])
- self.assertEqual(7, len(job['worker']))
+ self.assertEqual(2, len(job['worker']))
self.assertEqual(False, job['canceled'])
self.assertEqual(True, job['voting'])
self.assertIsNone(job['result'])
@@ -3868,9 +3858,12 @@
self.assertEqual(B.reported, 0)
self.assertEqual(len(self.history), 0)
- # Simulate change B being gated outside this layout
- self.fake_gerrit.addEvent(B.addApproval('approved', 1))
+ # Simulate change B being gated outside this layout Set the
+ # change merged before submitting the event so that when the
+ # event triggers a gerrit query to update the change, we get
+ # the information that it was merged.
B.setMerged()
+ self.fake_gerrit.addEvent(B.addApproval('approved', 1))
self.waitUntilSettled()
self.assertEqual(len(self.history), 0)
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 707515a..3854804 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -456,3 +456,21 @@
# Make sure it's the right length
self.assertEqual(4096, private_key.key_size)
+
+
+class TestRoles(ZuulTestCase):
+ tenant_config_file = 'config/roles/main.yaml'
+
+ def test_role(self):
+ # This exercises a proposed change to a role being checked out
+ # and used.
+ A = self.fake_gerrit.addFakeChange('bare-role', 'master', 'A')
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ B.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
+ B.subject, A.data['id'])
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+ self.assertHistory([
+ dict(name='project-test', result='SUCCESS', changes='1,1 2,1'),
+ ])
diff --git a/zuul/ansible/callback/zuul_stream.py b/zuul/ansible/callback/zuul_stream.py
index 904316c..8882c33 100644
--- a/zuul/ansible/callback/zuul_stream.py
+++ b/zuul/ansible/callback/zuul_stream.py
@@ -13,10 +13,15 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-import os
+from __future__ import absolute_import
+
+import datetime
import multiprocessing
+import logging
+import os
import socket
import time
+import uuid
from ansible.plugins.callback import default
@@ -59,7 +64,7 @@
of cmd it'll echo what the command was for folks.
"""
- for key in ('changed', 'cmd',
+ for key in ('changed', 'cmd', 'zuul_log_id',
'stderr', 'stderr_lines',
'stdout', 'stdout_lines'):
result.pop(key, None)
@@ -82,25 +87,56 @@
super(CallbackModule, self).__init__()
self._task = None
self._daemon_running = False
- self._daemon_stamp = 'daemon-stamp-%s'
self._host_dict = {}
+ self._play = None
+ self._streamer = None
+ self.configure_logger()
- def _read_log(self, host, ip):
- self._display.display("[%s] starting to log" % host)
+ def configure_logger(self):
+ # ansible appends timestamp, user and pid to the log lines emitted
+ # to the log file. We're doing other things though, so we don't want
+ # this.
+ path = os.environ['ZUUL_JOB_OUTPUT_FILE']
+ if self._display.verbosity > 2:
+ level = logging.DEBUG
+ else:
+ level = logging.INFO
+ logging.basicConfig(filename=path, level=level, format='%(message)s')
+ self._log = logging.getLogger('zuul.executor.ansible')
+
+ def _read_log(self, host, ip, log_id):
+ self._log.debug("[%s] Starting to log" % host)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while True:
try:
s.connect((ip, LOG_STREAM_PORT))
except Exception:
- self._display.display("[%s] Waiting on logger" % host)
+ self._log.debug("[%s] Waiting on logger" % host)
time.sleep(0.1)
continue
+ s.send(log_id + '\n')
for line in linesplit(s):
- self._display.display("[%s] %s " % (host, line.strip()))
+ if "[Zuul] Task exit code" in line:
+ return
+ else:
+ ts, ln = line.strip().split(' | ', 1)
+
+ self._log.info("%s | %s | %s " % (ts, host, ln))
+
+ def v2_playbook_on_start(self, playbook):
+ self._playbook_name = os.path.splitext(playbook._file_name)[0]
def v2_playbook_on_play_start(self, play):
self._play = play
- super(CallbackModule, self).v2_playbook_on_play_start(play)
+ name = play.get_name().strip()
+ now = datetime.datetime.now()
+ if not name:
+ msg = u"{now} | PLAY".format(now=now)
+ else:
+ msg = u"{now} | PLAY [{playbook} : {name}]".format(
+ playbook=self._playbook_name, now=now, name=name)
+
+ self._log.info(msg)
def v2_playbook_on_task_start(self, task, is_conditional):
self._task = task
@@ -108,6 +144,8 @@
if self._play.strategy != 'free':
self._print_task_banner(task)
if task.action == 'command':
+ log_id = uuid.uuid4().hex
+ task.args['zuul_log_id'] = log_id
play_vars = self._play._variable_manager._hostvars
hosts = self._play.hosts
@@ -119,47 +157,109 @@
hosts = play_vars.keys()
for host in hosts:
- ip = play_vars[host]['ansible_host']
- daemon_stamp = self._daemon_stamp % host
- if not os.path.exists(daemon_stamp):
- self._host_dict[host] = ip
- # Touch stamp file
- open(daemon_stamp, 'w').close()
- p = multiprocessing.Process(
- target=self._read_log, args=(host, ip))
- p.daemon = True
- p.start()
+ ip = play_vars[host].get(
+ 'ansible_host', play_vars[host].get(
+ 'ansible_inventory_host'))
+ self._host_dict[host] = ip
+ self._streamer = multiprocessing.Process(
+ target=self._read_log, args=(host, ip, log_id))
+ self._streamer.daemon = True
+ self._streamer.start()
def v2_runner_on_failed(self, result, ignore_errors=False):
+ if self._streamer:
+ self._streamer.join()
if result._task.action in ('command', 'shell'):
zuul_filter_result(result._result)
- super(CallbackModule, self).v2_runner_on_failed(
- result, ignore_errors=ignore_errors)
-
- def v2_runner_on_ok(self, result):
- if result._task.action in ('command', 'shell'):
- zuul_filter_result(result._result)
- else:
- return super(CallbackModule, self).v2_runner_on_ok(result)
-
- if self._play.strategy == 'free':
- return super(CallbackModule, self).v2_runner_on_ok(result)
-
- delegated_vars = result._result.get('_ansible_delegated_vars', None)
-
- if delegated_vars:
- msg = "ok: [{host} -> {delegated_host} %s]".format(
- host=result._host.get_name(),
- delegated_host=delegated_vars['ansible_host'])
- else:
- msg = "ok: [{host}]".format(host=result._host.get_name())
+ self._handle_exception(result._result)
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
- msg += " Runtime: {delta} Start: {start} End: {end}".format(
- **result._result)
+ self._log_message(
+ result=result,
+ msg="Results: => {results}".format(
+ results=self._dump_results(result._result)),
+ status='ERROR')
+ if ignore_errors:
+ self._log_message(result, "Ignoring Errors", status="ERROR")
+
+ def v2_runner_on_ok(self, result):
+ if (self._play.strategy == 'free'
+ and self._last_task_banner != result._task._uuid):
+ self._print_task_banner(result._task)
+
+ self._clean_results(result._result, result._task.action)
+
+ if result._task.action in ('include', 'include_role'):
+ return
+
+ if self._streamer:
+ self._streamer.join()
+
+ if result._result.get('changed', False):
+ status = 'changed'
+ else:
+ status = 'ok'
+
+ if result._task.action in ('command', 'shell'):
+ zuul_filter_result(result._result)
+
+ if result._task.loop and 'results' in result._result:
+ self._process_items(result)
self._handle_warnings(result._result)
- self._display.display(msg)
+ if result._task.loop and 'results' in result._result:
+ self._process_items(result)
+ elif result._task.action not in ('command', 'shell'):
+ self._log_message(
+ result=result,
+ msg="Results: => {results}".format(
+ results=self._dump_results(result._result)),
+ status=status)
+ else:
+ self._log_message(
+ result,
+ "Runtime: {delta} Start: {start} End: {end}".format(
+ **result._result))
+
+ def _print_task_banner(self, task):
+
+ task_name = task.get_name().strip()
+
+ args = ''
+ task_args = task.args.copy()
+ is_shell = task_args.pop('_uses_shell', False)
+ if is_shell and task_name == 'command':
+ task_name = 'shell'
+ raw_params = task_args.pop('_raw_params', '').split('\n')
+ # If there's just a single line, go ahead and print it
+ if len(raw_params) == 1 and task_name in ('shell', 'command'):
+ task_name = '{name}: {command}'.format(
+ name=task_name, command=raw_params[0])
+
+ if not task.no_log and task_args:
+ args = u', '.join(u'%s=%s' % a for a in task_args.items())
+ args = u' %s' % args
+
+ msg = "{now} | TASK [{task}{args}]".format(
+ now=datetime.datetime.now(),
+ task=task_name,
+ args=args)
+ self._log.info(msg)
+
+ def _log_message(self, result, msg, status="ok"):
+ now = datetime.datetime.now()
+ hostname = self._get_hostname(result)
+ self._log.info("{now} | {host} | {status}: {msg}".format(
+ host=hostname, now=now, status=status, msg=msg))
+
+ def _get_hostname(self, result):
+ delegated_vars = result._result.get('_ansible_delegated_vars', None)
+ if delegated_vars:
+ return "{host} -> {delegated_host}".format(
+ host=result._host.get_name(),
+ delegated_host=delegated_vars['ansible_host'])
+ else:
+ return result._host.get_name()
diff --git a/zuul/ansible/library/command.py b/zuul/ansible/library/command.py
index 52de5a4..4b3a30f 100644
--- a/zuul/ansible/library/command.py
+++ b/zuul/ansible/library/command.py
@@ -121,15 +121,18 @@
from ast import literal_eval
-LOG_STREAM_FILE = '/tmp/console.log'
+LOG_STREAM_FILE = '/tmp/console-{log_uuid}.log'
PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
# List to save stdout log lines in as we collect them
_log_lines = []
class Console(object):
+ def __init__(self, log_uuid):
+ self.logfile_name = LOG_STREAM_FILE.format(log_uuid=log_uuid)
+
def __enter__(self):
- self.logfile = open(LOG_STREAM_FILE, 'a', 0)
+ self.logfile = open(self.logfile_name, 'a', 0)
return self
def __exit__(self, etype, value, tb):
@@ -145,9 +148,9 @@
self.logfile.write(outln)
-def follow(fd):
+def follow(fd, log_uuid):
newline_warning = False
- with Console() as console:
+ with Console(log_uuid) as console:
while True:
line = fd.readline()
if not line:
@@ -163,7 +166,7 @@
# Taken from ansible/module_utils/basic.py ... forking the method for now
# so that we can dive in and figure out how to make appropriate hook points
-def zuul_run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None, environ_update=None):
+def zuul_run_command(self, args, zuul_log_id, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None, environ_update=None):
'''
Execute a command, returns rc, stdout, and stderr.
@@ -312,7 +315,7 @@
self.log('Executing: ' + running)
# ZUUL: Replaced the excution loop with the zuul_runner run function
cmd = subprocess.Popen(args, **kwargs)
- t = threading.Thread(target=follow, args=(cmd.stdout,))
+ t = threading.Thread(target=follow, args=(cmd.stdout, zuul_log_id))
t.daemon = True
t.start()
ret = cmd.wait()
@@ -321,7 +324,7 @@
# likely stuck in readline() because it spawed a child that is
# holding stdout or stderr open.
t.join(10)
- with Console() as console:
+ with Console(zuul_log_id) as console:
if t.isAlive():
console.addLine("[Zuul] standard output/error still open "
"after child exited")
@@ -397,6 +400,7 @@
removes = dict(type='path'),
warn = dict(type='bool', default=True),
environ = dict(type='dict', default=None),
+ zuul_log_id = dict(type='str'),
)
)
@@ -408,6 +412,7 @@
removes = module.params['removes']
warn = module.params['warn']
environ = module.params['environ']
+ zuul_log_id = module.params['zuul_log_id']
if args.strip() == '':
module.fail_json(rc=256, msg="no command given")
@@ -448,7 +453,7 @@
args = shlex.split(args)
startd = datetime.datetime.now()
- rc, out, err = zuul_run_command(module, args, executable=executable, use_unsafe_shell=shell, environ_update=environ)
+ rc, out, err = zuul_run_command(module, args, zuul_log_id, executable=executable, use_unsafe_shell=shell, environ_update=environ)
endd = datetime.datetime.now()
delta = endd - startd
@@ -467,7 +472,8 @@
end = str(endd),
delta = str(delta),
changed = True,
- warnings = warnings
+ warnings = warnings,
+ zuul_log_id = zuul_log_id
)
if __name__ == '__main__':
diff --git a/zuul/ansible/library/zuul_console.py b/zuul/ansible/library/zuul_console.py
index b1dc2d9..7f8a1b6 100644
--- a/zuul/ansible/library/zuul_console.py
+++ b/zuul/ansible/library/zuul_console.py
@@ -22,7 +22,7 @@
import threading
import time
-LOG_STREAM_FILE = '/tmp/console.log'
+LOG_STREAM_FILE = '/tmp/console-{log_uuid}.log'
LOG_STREAM_PORT = 19885
@@ -63,6 +63,10 @@
class Server(object):
+
+ MAX_REQUEST_LEN = 1024
+ REQUEST_TIMEOUT = 10
+
def __init__(self, path, port):
self.path = path
@@ -85,9 +89,9 @@
t.daemon = True
t.start()
- def chunkConsole(self, conn):
+ def chunkConsole(self, conn, log_uuid):
try:
- console = Console(self.path)
+ console = Console(self.path.format(log_uuid=log_uuid))
except Exception:
return
while True:
@@ -132,7 +136,40 @@
return True
console.size = st.st_size
+ def get_command(self, conn):
+ poll = select.poll()
+ bitmask = (select.POLLIN | select.POLLERR |
+ select.POLLHUP | select.POLLNVAL)
+ poll.register(conn, bitmask)
+ buffer = b''
+ ret = None
+ start = time.time()
+ while True:
+ elapsed = time.time() - start
+ timeout = max(self.REQUEST_TIMEOUT - elapsed, 0)
+ if not timeout:
+ raise Exception("Timeout while waiting for input")
+ for fd, event in poll.poll(timeout):
+ if event & select.POLLIN:
+ buffer += conn.recv(self.MAX_REQUEST_LEN)
+ else:
+ raise Exception("Received error event")
+ if len(buffer) >= self.MAX_REQUEST_LEN:
+ raise Exception("Request too long")
+ try:
+ ret = buffer.decode('utf-8')
+ x = ret.find('\n')
+ if x > 0:
+ return ret[:x]
+ except UnicodeDecodeError:
+ pass
+
def handleOneConnection(self, conn):
+ log_uuid = self.get_command(conn)
+ # use path split to make use the input isn't trying to be clever
+ # and construct some path like /tmp/console-/../../something
+ log_uuid = os.path.split(log_uuid.rstrip())[-1]
+
# FIXME: this won't notice disconnects until it tries to send
console = None
try:
@@ -143,7 +180,7 @@
except:
pass
while True:
- console = self.chunkConsole(conn)
+ console = self.chunkConsole(conn, log_uuid)
if console:
break
time.sleep(0.5)
diff --git a/zuul/cmd/client.py b/zuul/cmd/client.py
index 487096f..3f67a38 100644
--- a/zuul/cmd/client.py
+++ b/zuul/cmd/client.py
@@ -284,22 +284,6 @@
'worker.hostname': {
'title': 'Worker Hostname'
},
- 'worker.ips': {
- 'title': 'Worker IPs',
- 'transform': self._format_list
- },
- 'worker.fqdn': {
- 'title': 'Worker Domain'
- },
- 'worker.program': {
- 'title': 'Worker Program'
- },
- 'worker.version': {
- 'title': 'Worker Version'
- },
- 'worker.extra': {
- 'title': 'Worker Extra'
- },
}
diff --git a/zuul/cmd/executor.py b/zuul/cmd/executor.py
index 931639f..bd14b74 100755
--- a/zuul/cmd/executor.py
+++ b/zuul/cmd/executor.py
@@ -121,6 +121,10 @@
if self.config.has_option('zuul', 'jobroot_dir'):
self.jobroot_dir = os.path.expanduser(
self.config.get('zuul', 'jobroot_dir'))
+ if not os.path.isdir(self.jobroot_dir):
+ print("Invalid jobroot_dir: {jobroot_dir}".format(
+ jobroot_dir=self.jobroot_dir))
+ sys.exit(1)
else:
self.jobroot_dir = tempfile.gettempdir()
diff --git a/zuul/configloader.py b/zuul/configloader.py
index b1b1c82..688bd2b 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -820,12 +820,12 @@
for source_name, require_config in conf.get('require', {}).items():
source = connections.getSource(source_name)
- manager.changeish_filters.extend(
+ manager.ref_filters.extend(
source.getRequireFilters(require_config))
for source_name, reject_config in conf.get('reject', {}).items():
source = connections.getSource(source_name)
- manager.changeish_filters.extend(
+ manager.ref_filters.extend(
source.getRejectFilters(reject_config))
for trigger_name, trigger_config in conf.get('trigger').items():
diff --git a/zuul/driver/gerrit/gerritconnection.py b/zuul/driver/gerrit/gerritconnection.py
index a5e1f22..fa43e66 100644
--- a/zuul/driver/gerrit/gerritconnection.py
+++ b/zuul/driver/gerrit/gerritconnection.py
@@ -75,6 +75,14 @@
time.sleep(max((ts + self.delay) - now, 0.0))
event = GerritTriggerEvent()
event.type = data.get('type')
+ # This catches when a change is merged, as it could potentially
+ # have merged layout info which will need to be read in.
+ # Ideally this would be done with a refupdate event so as to catch
+ # directly pushed things as well as full changes being merged.
+ # But we do not yet get files changed data for pure refupdate events.
+ # TODO(jlk): handle refupdated events instead of just changes
+ if event.type == 'change-merged':
+ event.branch_updated = True
event.trigger_name = 'gerrit'
change = data.get('change')
event.project_hostname = self.connection.canonical_hostname
diff --git a/zuul/driver/github/githubconnection.py b/zuul/driver/github/githubconnection.py
index 6a3c09e..659d88b 100644
--- a/zuul/driver/github/githubconnection.py
+++ b/zuul/driver/github/githubconnection.py
@@ -129,10 +129,12 @@
event.trigger_name = 'github'
event.project_name = base_repo.get('full_name')
event.type = 'push'
+ event.branch_updated = True
event.ref = body.get('ref')
event.oldrev = body.get('before')
event.newrev = body.get('after')
+ event.commits = body.get('commits')
ref_parts = event.ref.split('/') # ie, ['refs', 'heads', 'master']
@@ -490,6 +492,7 @@
change.newrev = event.newrev
change.url = self.getGitwebUrl(project, sha=event.newrev)
change.source_event = event
+ change.files = self.getPushedFileNames(event)
else:
change = Ref(project)
return change
@@ -728,6 +731,13 @@
pr = self.getPull(project, number)
return pr.get('head').get('sha') == sha
+ def getPushedFileNames(self, event):
+ files = set()
+ for c in event.commits:
+ for f in c.get('added') + c.get('modified') + c.get('removed'):
+ files.add(f)
+ return list(files)
+
def _ghTimestampToDate(self, timestamp):
return time.strptime(timestamp, '%Y-%m-%dT%H:%M:%SZ')
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index cf8d973..52074a1 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -193,9 +193,11 @@
canonical_name=item.change.project.canonical_name)
zuul_params = dict(uuid=uuid,
+ ref=item.current_build_set.ref,
pipeline=pipeline.name,
job=job.name,
project=project,
+ tenant=tenant.name,
tags=' '.join(sorted(job.tags)))
if hasattr(item.change, 'branch'):
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index 8d2d577..f44fd50 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -193,7 +193,7 @@
self.untrusted_config = os.path.join(
self.ansible_root, 'untrusted.cfg')
self.trusted_config = os.path.join(self.ansible_root, 'trusted.cfg')
- self.ansible_log = os.path.join(self.log_root, 'ansible_log.txt')
+ self.job_output_file = os.path.join(self.log_root, 'job-output.txt')
def addPrePlaybook(self):
count = len(self.pre_playbooks)
@@ -395,7 +395,7 @@
if self.config.has_option('executor', 'untrusted_wrapper'):
untrusted_wrapper_name = self.config.get(
- 'executor', 'untrusted_wrapper').split()
+ 'executor', 'untrusted_wrapper').strip()
else:
untrusted_wrapper_name = 'bubblewrap'
self.untrusted_wrapper = connections.drivers[untrusted_wrapper_name]
@@ -417,12 +417,13 @@
self.command_socket = commandsocket.CommandSocket(path)
ansible_dir = os.path.join(state_dir, 'ansible')
self.ansible_dir = ansible_dir
+ if os.path.exists(ansible_dir):
+ shutil.rmtree(ansible_dir)
zuul_dir = os.path.join(ansible_dir, 'zuul')
plugin_dir = os.path.join(zuul_dir, 'ansible')
- if not os.path.exists(plugin_dir):
- os.makedirs(plugin_dir)
+ os.makedirs(plugin_dir, mode=0o0755)
self.library_dir = os.path.join(plugin_dir, 'library')
self.action_dir = os.path.join(plugin_dir, 'action')
@@ -803,17 +804,6 @@
'worker_name': 'My Worker',
}
- # TODOv3:
- # 'name': self.name,
- # 'manager': self.executor_server.hostname,
- # 'worker_name': 'My Worker',
- # 'worker_hostname': 'localhost',
- # 'worker_ips': ['127.0.0.1', '192.168.1.1'],
- # 'worker_fqdn': 'zuul.example.org',
- # 'worker_program': 'FakeBuilder',
- # 'worker_version': 'v1.1',
- # 'worker_extra': {'something': 'else'}
-
self.job.sendWorkData(json.dumps(data))
self.job.sendWorkStatus(0, 100)
@@ -1183,7 +1173,6 @@
self.jobdir.root)
config.write('private_key_file = %s\n' % self.private_key_file)
config.write('retry_files_enabled = False\n')
- config.write('log_path = %s\n' % self.jobdir.ansible_log)
config.write('gathering = explicit\n')
config.write('library = %s\n'
% self.executor_server.library_dir)
@@ -1249,6 +1238,7 @@
env_copy = os.environ.copy()
env_copy.update(self.ssh_agent.env)
env_copy['LOGNAME'] = 'zuul'
+ env_copy['ZUUL_JOB_OUTPUT_FILE'] = self.jobdir.job_output_file
pythonpath = env_copy.get('PYTHONPATH')
if pythonpath:
pythonpath = [pythonpath]
diff --git a/zuul/lib/log_streamer.py b/zuul/lib/log_streamer.py
index de072b6..59d5240 100644
--- a/zuul/lib/log_streamer.py
+++ b/zuul/lib/log_streamer.py
@@ -95,7 +95,7 @@
return
# check if log file exists
- log_file = os.path.join(job_dir, 'ansible', 'ansible_log.txt')
+ log_file = os.path.join(job_dir, 'work', 'logs', 'job-output.txt')
if not os.path.exists(log_file):
msg = 'Log not found for build ID %s' % build_uuid
self.request.sendall(msg.encode("utf-8"))
diff --git a/zuul/manager/__init__.py b/zuul/manager/__init__.py
index 3728c73..c3958d7 100644
--- a/zuul/manager/__init__.py
+++ b/zuul/manager/__init__.py
@@ -47,7 +47,7 @@
self.sched = sched
self.pipeline = pipeline
self.event_filters = []
- self.changeish_filters = []
+ self.ref_filters = []
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.pipeline.name)
@@ -55,7 +55,7 @@
def _postConfig(self, layout):
self.log.info("Configured Pipeline Manager %s" % self.pipeline.name)
self.log.info(" Requirements:")
- for f in self.changeish_filters:
+ for f in self.ref_filters:
self.log.info(" %s" % f)
self.log.info(" Events:")
for e in self.event_filters:
@@ -281,7 +281,7 @@
return False
if not ignore_requirements:
- for f in self.changeish_filters:
+ for f in self.ref_filters:
if f.connection_name != change.project.connection_name:
self.log.debug("Filter %s skipped for change %s due "
"to mismatched connections" % (f, change))
diff --git a/zuul/model.py b/zuul/model.py
index 3c24ec5..25f69d7 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -1111,21 +1111,11 @@
def __init__(self):
self.name = "Unknown"
self.hostname = None
- self.ips = []
- self.fqdn = None
- self.program = None
- self.version = None
- self.extra = {}
def updateFromData(self, data):
"""Update worker information if contained in the WORK_DATA response."""
self.name = data.get('worker_name', self.name)
self.hostname = data.get('worker_hostname', self.hostname)
- self.ips = data.get('worker_ips', self.ips)
- self.fqdn = data.get('worker_fqdn', self.fqdn)
- self.program = data.get('worker_program', self.program)
- self.version = data.get('worker_version', self.version)
- self.extra = data.get('worker_extra', self.extra)
def __repr__(self):
return '<Worker %s>' % self.name
@@ -1197,7 +1187,7 @@
self.result = None
self.next_build_set = None
self.previous_build_set = None
- self.ref = None
+ self.uuid = None
self.commit = None
self.zuul_url = None
self.dependent_items = None
@@ -1213,6 +1203,13 @@
self.layout = None
self.tries = {}
+ @property
+ def ref(self):
+ # NOTE(jamielennox): The concept of buildset ref is to be removed and a
+ # buildset UUID identifier available instead. Currently the ref is
+ # checked to see if the BuildSet has been configured.
+ return 'Z' + self.uuid if self.uuid else None
+
def __repr__(self):
return '<BuildSet item: %s #builds: %s merge state: %s>' % (
self.item,
@@ -1230,8 +1227,8 @@
items.append(next_item)
next_item = next_item.item_ahead
self.dependent_items = items
- if not self.ref:
- self.ref = 'Z' + uuid4().hex
+ if not self.uuid:
+ self.uuid = uuid4().hex
if self.merger_items is None:
items = [self.item] + self.dependent_items
items.reverse()
@@ -1312,6 +1309,9 @@
return project_config.merge_mode
return MERGER_MERGE_RESOLVE
+ def getSafeAttributes(self):
+ return Attributes(uuid=self.uuid)
+
class QueueItem(object):
"""Represents the position of a Change in a ChangeQueue.
@@ -1325,7 +1325,7 @@
def __init__(self, queue, change):
self.pipeline = queue.pipeline
self.queue = queue
- self.change = change # a changeish
+ self.change = change # a ref
self.build_sets = []
self.dequeued_needing_change = False
self.current_build_set = BuildSet(self)
@@ -1591,11 +1591,15 @@
# secrets, etc.
safe_change = self.change.getSafeAttributes()
safe_pipeline = self.pipeline.getSafeAttributes()
+ safe_tenant = self.pipeline.layout.tenant.getSafeAttributes()
+ safe_buildset = self.current_build_set.getSafeAttributes()
safe_job = job.getSafeAttributes() if job else {}
safe_build = build.getSafeAttributes() if build else {}
try:
url = url_pattern.format(change=safe_change,
pipeline=safe_pipeline,
+ tenant=safe_tenant,
+ buildset=safe_buildset,
job=safe_job,
build=safe_build)
except KeyError as e:
@@ -1634,15 +1638,14 @@
return (result, url)
def formatJSON(self):
- changeish = self.change
ret = {}
ret['active'] = self.active
ret['live'] = self.live
- if hasattr(changeish, 'url') and changeish.url is not None:
- ret['url'] = changeish.url
+ if hasattr(self.change, 'url') and self.change.url is not None:
+ ret['url'] = self.change.url
else:
ret['url'] = None
- ret['id'] = changeish._id()
+ ret['id'] = self.change._id()
if self.item_ahead:
ret['item_ahead'] = self.item_ahead.change._id()
else:
@@ -1650,8 +1653,8 @@
ret['items_behind'] = [i.change._id() for i in self.items_behind]
ret['failing_reasons'] = self.current_build_set.failing_reasons
ret['zuul_ref'] = self.current_build_set.ref
- if changeish.project:
- ret['project'] = changeish.project.name
+ if self.change.project:
+ ret['project'] = self.change.project.name
else:
# For cross-project dependencies with the depends-on
# project not known to zuul, the project is None
@@ -1659,8 +1662,8 @@
ret['project'] = "Unknown Project"
ret['enqueue_time'] = int(self.enqueue_time * 1000)
ret['jobs'] = []
- if hasattr(changeish, 'owner'):
- ret['owner'] = changeish.owner
+ if hasattr(self.change, 'owner'):
+ ret['owner'] = self.change.owner
else:
ret['owner'] = None
max_remaining = 0
@@ -1691,11 +1694,6 @@
worker = {
'name': build.worker.name,
'hostname': build.worker.hostname,
- 'ips': build.worker.ips,
- 'fqdn': build.worker.fqdn,
- 'program': build.worker.program,
- 'version': build.worker.version,
- 'extra': build.worker.extra
}
if remaining and remaining > max_remaining:
max_remaining = remaining
@@ -1728,20 +1726,19 @@
return ret
def formatStatus(self, indent=0, html=False):
- changeish = self.change
indent_str = ' ' * indent
ret = ''
- if html and hasattr(changeish, 'url') and changeish.url is not None:
+ if html and getattr(self.change, 'url', None) is not None:
ret += '%sProject %s change <a href="%s">%s</a>\n' % (
indent_str,
- changeish.project.name,
- changeish.url,
- changeish._id())
+ self.change.project.name,
+ self.change.url,
+ self.change._id())
else:
ret += '%sProject %s change %s based on %s\n' % (
indent_str,
- changeish.project.name,
- changeish._id(),
+ self.change.project.name,
+ self.change._id(),
self.item_ahead)
for job in self.getJobs():
build = self.current_build_set.getBuild(job.name)
@@ -1812,6 +1809,8 @@
self.oldrev = None
self.newrev = None
+ self.files = []
+
def getBasePath(self):
base_path = ''
if hasattr(self, 'ref'):
@@ -1854,6 +1853,8 @@
return set()
def updatesConfig(self):
+ if 'zuul.yaml' in self.files or '.zuul.yaml' in self.files:
+ return True
return False
def getSafeAttributes(self):
@@ -1873,7 +1874,6 @@
self.patchset = None
self.refspec = None
- self.files = []
self.needs_changes = []
self.needed_by_changes = []
self.is_current_patchset = True
@@ -1921,11 +1921,6 @@
related.update(c.getRelatedChanges())
return related
- def updatesConfig(self):
- if 'zuul.yaml' in self.files or '.zuul.yaml' in self.files:
- return True
- return False
-
def getSafeAttributes(self):
return Attributes(project=self.project,
number=self.number,
@@ -1939,6 +1934,7 @@
self.data = None
# common
self.type = None
+ self.branch_updated = False
# For management events (eg: enqueue / promote)
self.tenant_name = None
self.project_hostname = None
@@ -2437,6 +2433,9 @@
self.untrusted_projects.append(project)
self._addProject(project)
+ def getSafeAttributes(self):
+ return Attributes(name=self.name)
+
class Abide(object):
def __init__(self):
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 61f1e5f..a63d270 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -228,6 +228,7 @@
self.zuul_version = zuul_version.version_info.release_string()
self.last_reconfigured = None
+ self.tenant_last_reconfigured = {}
def stop(self):
self._stopped = True
@@ -590,6 +591,7 @@
trigger.postConfig(pipeline)
for reporter in pipeline.actions:
reporter.postConfig()
+ self.tenant_last_reconfigured[tenant.name] = int(time.time())
if self.statsd:
try:
for pipeline in tenant.layout.pipelines.values():
@@ -747,7 +749,7 @@
"source %s",
e.change, project.source)
continue
- if (event.type == 'change-merged' and
+ if (event.branch_updated and
hasattr(change, 'files') and
change.updatesConfig()):
# The change that just landed updates the config.