Merge "Append ansible yaml parse errors to job log file" into feature/zuulv3
diff --git a/doc/source/executors.rst b/doc/source/executors.rst
index 5f85f58..f309a9c 100644
--- a/doc/source/executors.rst
+++ b/doc/source/executors.rst
@@ -273,26 +273,11 @@
metadata:
**worker_name** (optional)
- The name of the worker.
+ The unique name of the worker.
**worker_hostname** (optional)
The hostname of the worker.
-**worker_ips** (optional)
- A list of IPs for the worker.
-
-**worker_fqdn** (optional)
- The FQDN of the worker.
-
-**worker_program** (optional)
- The program name of the worker. For example Jenkins or turbo-hipster.
-
-**worker_version** (optional)
- The version of the software running the job.
-
-**worker_extra** (optional)
- A dictionary of any extra metadata you may want to pass along.
-
It should then immediately send a WORK_STATUS packet with a value of 0
percent complete. It may then optionally send subsequent WORK_STATUS
packets with updated completion values.
diff --git a/etc/zuul.conf-sample b/etc/zuul.conf-sample
index 1aee1fa..2909ea6 100644
--- a/etc/zuul.conf-sample
+++ b/etc/zuul.conf-sample
@@ -26,6 +26,8 @@
[executor]
default_username=zuul
+trusted_ro_dirs=/opt/zuul-scripts:/var/cache
+trusted_rw_dirs=/opt/zuul-logs
[webapp]
listen_address=0.0.0.0
diff --git a/requirements.txt b/requirements.txt
index 81f930e..5caa1b5 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -16,7 +16,7 @@
apscheduler>=3.0
PrettyTable>=0.6,<0.8
babel>=1.0
-ansible>=2.0.0.1
+ansible>=2.3.0.0
kazoo
sqlalchemy
alembic
diff --git a/tests/base.py b/tests/base.py
index d89469f..ff1f531 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -933,7 +933,8 @@
'full_name': pr.project
}
},
- 'files': pr.files
+ 'files': pr.files,
+ 'labels': pr.labels
}
return data
@@ -2021,6 +2022,8 @@
project = reponame.replace('_', '/')
self.copyDirToRepo(project,
os.path.join(git_path, reponame))
+ # Make test_root persist after ansible run for .flag test
+ self.config.set('executor', 'trusted_rw_dirs', self.test_root)
self.setupAllProjectKeys()
def setupSimpleLayout(self):
diff --git a/tests/fixtures/config/ansible/git/common-config/zuul.yaml b/tests/fixtures/config/ansible/git/common-config/zuul.yaml
index b31c148..fd3fc6d 100644
--- a/tests/fixtures/config/ansible/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/ansible/git/common-config/zuul.yaml
@@ -51,8 +51,8 @@
- job:
name: python27
- pre-run: pre
- post-run: post
+ pre-run: playbooks/pre
+ post-run: playbooks/post
vars:
flagpath: '{{zuul._test.test_root}}/{{zuul.uuid}}.flag'
roles:
@@ -75,4 +75,4 @@
- job:
name: hello
- post-run: hello-post
+ post-run: playbooks/hello-post
diff --git a/tests/fixtures/config/streamer/git/common-config/playbooks/python27.yaml b/tests/fixtures/config/streamer/git/common-config/playbooks/python27.yaml
new file mode 100644
index 0000000..753e7e2
--- /dev/null
+++ b/tests/fixtures/config/streamer/git/common-config/playbooks/python27.yaml
@@ -0,0 +1,11 @@
+# NOTE(Shrews): Do not run any tasks that will need zuul_console to stream
+# output because that will not work. Since we just need any output in our
+# ansible log, the test coordination tasks should be sufficient.
+- hosts: all
+ tasks:
+ - debug: var=waitpath
+
+ # Do not finish until test creates the flag file
+ - wait_for:
+ state: present
+ path: "{{waitpath}}"
diff --git a/tests/fixtures/config/streamer/git/common-config/zuul.yaml b/tests/fixtures/config/streamer/git/common-config/zuul.yaml
new file mode 100644
index 0000000..d8df96a
--- /dev/null
+++ b/tests/fixtures/config/streamer/git/common-config/zuul.yaml
@@ -0,0 +1,17 @@
+- pipeline:
+ name: check
+ manager: independent
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+- job:
+ name: python27
+ vars:
+ waitpath: '{{zuul._test.test_root}}/{{zuul.uuid}}/test_wait'
diff --git a/tests/fixtures/config/streamer/git/org_project/.zuul.yaml b/tests/fixtures/config/streamer/git/org_project/.zuul.yaml
new file mode 100644
index 0000000..89a5674
--- /dev/null
+++ b/tests/fixtures/config/streamer/git/org_project/.zuul.yaml
@@ -0,0 +1,5 @@
+- project:
+ name: org/project
+ check:
+ jobs:
+ - python27
diff --git a/tests/fixtures/config/streamer/git/org_project/README b/tests/fixtures/config/streamer/git/org_project/README
new file mode 100644
index 0000000..9daeafb
--- /dev/null
+++ b/tests/fixtures/config/streamer/git/org_project/README
@@ -0,0 +1 @@
+test
diff --git a/tests/fixtures/config/streamer/main.yaml b/tests/fixtures/config/streamer/main.yaml
new file mode 100644
index 0000000..208e274
--- /dev/null
+++ b/tests/fixtures/config/streamer/main.yaml
@@ -0,0 +1,8 @@
+- tenant:
+ name: tenant-one
+ source:
+ gerrit:
+ config-projects:
+ - common-config
+ untrusted-projects:
+ - org/project
diff --git a/tests/fixtures/layouts/requirements-github.yaml b/tests/fixtures/layouts/requirements-github.yaml
index 9933f27..891a366 100644
--- a/tests/fixtures/layouts/requirements-github.yaml
+++ b/tests/fixtures/layouts/requirements-github.yaml
@@ -168,6 +168,21 @@
github:
comment: true
+- pipeline:
+ name: require_label
+ manager: independent
+ require:
+ github:
+ label: approved
+ trigger:
+ github:
+ - event: pull_request
+ action: comment
+ comment: 'test me'
+ success:
+ github:
+ comment: true
+
- job:
name: project1-pipeline
- job:
@@ -186,6 +201,8 @@
name: project8-requireopen
- job:
name: project9-requirecurrent
+- job:
+ name: project10-label
- project:
name: org/project1
@@ -243,3 +260,9 @@
require_current:
jobs:
- project9-requirecurrent
+
+- project:
+ name: org/project10
+ require_label:
+ jobs:
+ - project10-label
diff --git a/tests/unit/test_bubblewrap.py b/tests/unit/test_bubblewrap.py
index b274944..675221e 100644
--- a/tests/unit/test_bubblewrap.py
+++ b/tests/unit/test_bubblewrap.py
@@ -15,6 +15,7 @@
import subprocess
import tempfile
import testtools
+import os
from zuul.driver import bubblewrap
from zuul.executor.server import SshAgent
@@ -30,17 +31,14 @@
def test_bubblewrap_wraps(self):
bwrap = bubblewrap.BubblewrapDriver()
work_dir = tempfile.mkdtemp()
- ansible_dir = tempfile.mkdtemp()
ssh_agent = SshAgent()
self.addCleanup(ssh_agent.stop)
ssh_agent.start()
po = bwrap.getPopen(work_dir=work_dir,
- ansible_dir=ansible_dir,
ssh_auth_sock=ssh_agent.env['SSH_AUTH_SOCK'])
self.assertTrue(po.passwd_r > 2)
self.assertTrue(po.group_r > 2)
self.assertTrue(work_dir in po.command)
- self.assertTrue(ansible_dir in po.command)
# Now run /usr/bin/id to verify passwd/group entries made it in
true_proc = po(['/usr/bin/id'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@@ -52,3 +50,23 @@
# Make sure the _r's are closed
self.assertIsNone(po.passwd_r)
self.assertIsNone(po.group_r)
+
+ def test_bubblewrap_leak(self):
+ bwrap = bubblewrap.BubblewrapDriver()
+ work_dir = tempfile.mkdtemp()
+ ansible_dir = tempfile.mkdtemp()
+ ssh_agent = SshAgent()
+ self.addCleanup(ssh_agent.stop)
+ ssh_agent.start()
+ po = bwrap.getPopen(work_dir=work_dir,
+ ansible_dir=ansible_dir,
+ ssh_auth_sock=ssh_agent.env['SSH_AUTH_SOCK'])
+ leak_time = 7
+ # Use hexadecimal notation to avoid false-positive
+ true_proc = po(['bash', '-c', 'sleep 0x%X & disown' % leak_time])
+ self.assertEqual(0, true_proc.wait())
+ cmdline = "sleep\x000x%X\x00" % leak_time
+ sleep_proc = [pid for pid in os.listdir("/proc") if
+ os.path.isfile("/proc/%s/cmdline" % pid) and
+ open("/proc/%s/cmdline" % pid).read() == cmdline]
+ self.assertEqual(len(sleep_proc), 0, "Processes leaked")
diff --git a/tests/unit/test_github_requirements.py b/tests/unit/test_github_requirements.py
index 43bdfc2..135f7ab 100644
--- a/tests/unit/test_github_requirements.py
+++ b/tests/unit/test_github_requirements.py
@@ -350,3 +350,28 @@
self.waitUntilSettled()
# Event hash is not current, should not trigger
self.assertEqual(len(self.history), 1)
+
+ @simple_layout('layouts/requirements-github.yaml', driver='github')
+ def test_pipeline_require_label(self):
+ "Test pipeline requirement: label"
+ A = self.fake_github.openFakePullRequest('org/project10', 'master',
+ 'A')
+ # A comment event that we will keep submitting to trigger
+ comment = A.getCommentAddedEvent('test me')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ # No label so should not be enqueued
+ self.assertEqual(len(self.history), 0)
+
+ # A derp label should not cause it to be enqueued
+ A.addLabel('derp')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 0)
+
+ # An approved label goes in
+ A.addLabel('approved')
+ self.fake_github.emitEvent(comment)
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 1)
+ self.assertEqual(self.history[0].name, 'project10-label')
diff --git a/tests/unit/test_log_streamer.py b/tests/unit/test_log_streamer.py
index 3ea5a8e..b0ef2c2 100644
--- a/tests/unit/test_log_streamer.py
+++ b/tests/unit/test_log_streamer.py
@@ -14,9 +14,12 @@
# License for the specific language governing permissions and limitations
# under the License.
-import logging
+import os
+import os.path
import socket
import tempfile
+import threading
+import time
import zuul.lib.log_streamer
import tests.base
@@ -24,8 +27,6 @@
class TestLogStreamer(tests.base.BaseTestCase):
- log = logging.getLogger("zuul.test.cloner")
-
def setUp(self):
super(TestLogStreamer, self).setUp()
self.host = '0.0.0.0'
@@ -51,3 +52,103 @@
self.addCleanup(s.close)
self.assertNotEqual(0, s.connect_ex((self.host, port)))
s.close()
+
+
+class TestStreaming(tests.base.AnsibleZuulTestCase):
+
+ tenant_config_file = 'config/streamer/main.yaml'
+
+ def setUp(self):
+ super(TestStreaming, self).setUp()
+ self.host = '0.0.0.0'
+ self.streamer = None
+ self.stop_streamer = False
+ self.streaming_data = ''
+ self.test_streaming_event = threading.Event()
+
+ def stopStreamer(self):
+ self.stop_streamer = True
+
+ def startStreamer(self, port, build_uuid, root=None):
+ if not root:
+ root = tempfile.gettempdir()
+ self.streamer = zuul.lib.log_streamer.LogStreamer(None, self.host,
+ port, root)
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ s.connect((self.host, port))
+ self.addCleanup(s.close)
+
+ req = '%s\n' % build_uuid
+ s.sendall(req.encode('utf-8'))
+ self.test_streaming_event.set()
+
+ while not self.stop_streamer:
+ data = s.recv(2048)
+ if not data:
+ break
+ self.streaming_data += data.decode('utf-8')
+
+ s.shutdown(socket.SHUT_RDWR)
+ s.close()
+ self.streamer.stop()
+
+ def test_streaming(self):
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+
+ # We don't have any real synchronization for the ansible jobs, so
+ # just wait until we get our running build.
+ while not len(self.builds):
+ time.sleep(0.1)
+ build = self.builds[0]
+ self.assertEqual(build.name, 'python27')
+
+ build_dir = os.path.join(self.executor_server.jobdir_root, build.uuid)
+ while not os.path.exists(build_dir):
+ time.sleep(0.1)
+
+ # Need to wait to make sure that jobdir gets set
+ while build.jobdir is None:
+ time.sleep(0.1)
+ build = self.builds[0]
+
+ # Wait for the job to begin running and create the ansible log file.
+ # The job waits to complete until the flag file exists, so we can
+ # safely access the log here. We only open it (to force a file handle
+ # to be kept open for it after the job finishes) but wait to read the
+ # contents until the job is done.
+ ansible_log = os.path.join(build.jobdir.log_root, 'job-output.txt')
+ while not os.path.exists(ansible_log):
+ time.sleep(0.1)
+ logfile = open(ansible_log, 'r')
+ self.addCleanup(logfile.close)
+
+ # Create a thread to stream the log. We need this to be happening
+ # before we create the flag file to tell the job to complete.
+ port = 7901
+ streamer_thread = threading.Thread(
+ target=self.startStreamer,
+ args=(port, build.uuid, self.executor_server.jobdir_root,)
+ )
+ streamer_thread.start()
+ self.addCleanup(self.stopStreamer)
+ self.test_streaming_event.wait()
+
+ # Allow the job to complete, which should close the streaming
+ # connection (and terminate the thread) as well since the log file
+ # gets closed/deleted.
+ flag_file = os.path.join(build_dir, 'test_wait')
+ open(flag_file, 'w').close()
+ self.waitUntilSettled()
+ streamer_thread.join()
+
+ # Now that the job is finished, the log file has been closed by the
+ # job and deleted. However, we still have a file handle to it, so we
+ # can make sure that we read the entire contents at this point.
+ # Compact the returned lines into a single string for easy comparison.
+ file_contents = ''.join(logfile.readlines())
+ logfile.close()
+
+ self.log.debug("\n\nFile contents: %s\n\n", file_contents)
+ self.log.debug("\n\nStreamed: %s\n\n", self.streaming_data)
+ self.assertEqual(file_contents, self.streaming_data)
diff --git a/tests/unit/test_model.py b/tests/unit/test_model.py
index 7a4d53e..f4ca96f 100644
--- a/tests/unit/test_model.py
+++ b/tests/unit/test_model.py
@@ -266,11 +266,11 @@
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].label, 'new')
self.assertEqual([x.path for x in job.pre_run],
- ['playbooks/base-pre',
- 'playbooks/py27-pre'])
+ ['base-pre',
+ 'py27-pre'])
self.assertEqual([x.path for x in job.post_run],
- ['playbooks/py27-post',
- 'playbooks/base-post'])
+ ['py27-post',
+ 'base-post'])
self.assertEqual([x.path for x in job.run],
['playbooks/python27',
'playbooks/base'])
@@ -294,15 +294,15 @@
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].label, 'old')
self.assertEqual([x.path for x in job.pre_run],
- ['playbooks/base-pre',
- 'playbooks/py27-pre',
- 'playbooks/py27-diablo-pre'])
+ ['base-pre',
+ 'py27-pre',
+ 'py27-diablo-pre'])
self.assertEqual([x.path for x in job.post_run],
- ['playbooks/py27-diablo-post',
- 'playbooks/py27-post',
- 'playbooks/base-post'])
+ ['py27-diablo-post',
+ 'py27-post',
+ 'base-post'])
self.assertEqual([x.path for x in job.run],
- ['playbooks/py27-diablo']),
+ ['py27-diablo']),
# Test essex
change.branch = 'stable/essex'
@@ -319,13 +319,13 @@
job = item.getJobs()[0]
self.assertEqual(job.name, 'python27')
self.assertEqual([x.path for x in job.pre_run],
- ['playbooks/base-pre',
- 'playbooks/py27-pre',
- 'playbooks/py27-essex-pre'])
+ ['base-pre',
+ 'py27-pre',
+ 'py27-essex-pre'])
self.assertEqual([x.path for x in job.post_run],
- ['playbooks/py27-essex-post',
- 'playbooks/py27-post',
- 'playbooks/base-post'])
+ ['py27-essex-post',
+ 'py27-post',
+ 'base-post'])
self.assertEqual([x.path for x in job.run],
['playbooks/python27',
'playbooks/base'])
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index eb17966..e402342 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -3380,13 +3380,13 @@
raise Exception("Timeout waiting for gearman server to report "
+ "back to the client")
build = list(self.executor_client.builds.values())[0]
- if build.worker.name == "My Worker":
+ if build.worker.name == self.executor_server.hostname:
break
else:
time.sleep(0)
self.log.debug(build)
- self.assertEqual("My Worker", build.worker.name)
+ self.assertEqual(self.executor_server.hostname, build.worker.name)
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
@@ -3553,7 +3553,7 @@
raise Exception("Timeout waiting for gearman server to report "
+ "back to the client")
build = list(self.executor_client.builds.values())[0]
- if build.worker.name == "My Worker":
+ if build.worker.name == self.executor_server.hostname:
break
else:
time.sleep(0)
diff --git a/zuul/cmd/client.py b/zuul/cmd/client.py
old mode 100644
new mode 100755
index 94414f2..dec15e7
--- a/zuul/cmd/client.py
+++ b/zuul/cmd/client.py
@@ -25,6 +25,7 @@
import zuul.rpcclient
import zuul.cmd
+from zuul.lib.config import get_default
class Client(zuul.cmd.ZuulApp):
@@ -122,22 +123,10 @@
self.setup_logging()
self.server = self.config.get('gearman', 'server')
- if self.config.has_option('gearman', 'port'):
- self.port = self.config.get('gearman', 'port')
- else:
- self.port = 4730
- if self.config.has_option('gearman', 'ssl_key'):
- self.ssl_key = self.config.get('gearman', 'ssl_key')
- else:
- self.ssl_key = None
- if self.config.has_option('gearman', 'ssl_cert'):
- self.ssl_cert = self.config.get('gearman', 'ssl_cert')
- else:
- self.ssl_cert = None
- if self.config.has_option('gearman', 'ssl_ca'):
- self.ssl_ca = self.config.get('gearman', 'ssl_ca')
- else:
- self.ssl_ca = None
+ self.port = get_default(self.config, 'gearman', 'port', 4730)
+ self.ssl_key = get_default(self.config, 'gearman', 'ssl_key')
+ self.ssl_cert = get_default(self.config, 'gearman', 'ssl_cert')
+ self.ssl_ca = get_default(self.config, 'gearman', 'ssl_ca')
if self.args.func():
sys.exit(0)
diff --git a/zuul/cmd/executor.py b/zuul/cmd/executor.py
index 7cc8dd8..44a7d3f 100755
--- a/zuul/cmd/executor.py
+++ b/zuul/cmd/executor.py
@@ -32,6 +32,7 @@
import zuul.cmd
import zuul.executor.server
+from zuul.lib.config import get_default
# No zuul imports that pull in paramiko here; it must not be
# imported until after the daemonization.
@@ -39,9 +40,6 @@
# Similar situation with gear and statsd.
-DEFAULT_FINGER_PORT = 79
-
-
class Executor(zuul.cmd.ZuulApp):
def parse_arguments(self):
@@ -63,11 +61,8 @@
self.args = parser.parse_args()
def send_command(self, cmd):
- if self.config.has_option('zuul', 'state_dir'):
- state_dir = os.path.expanduser(
- self.config.get('zuul', 'state_dir'))
- else:
- state_dir = '/var/lib/zuul'
+ state_dir = get_default(self.config, 'zuul', 'state_dir',
+ '/var/lib/zuul', expand_user=True)
path = os.path.join(state_dir, 'executor.socket')
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect(path)
@@ -114,10 +109,7 @@
def main(self, daemon=True):
# See comment at top of file about zuul imports
- if self.config.has_option('executor', 'user'):
- self.user = self.config.get('executor', 'user')
- else:
- self.user = 'zuul'
+ self.user = get_default(self.config, 'executor', 'user', 'zuul')
if self.config.has_option('zuul', 'jobroot_dir'):
self.jobroot_dir = os.path.expanduser(
@@ -132,10 +124,10 @@
self.setup_logging('executor', 'log_config')
self.log = logging.getLogger("zuul.Executor")
- if self.config.has_option('executor', 'finger_port'):
- self.finger_port = int(self.config.get('executor', 'finger_port'))
- else:
- self.finger_port = DEFAULT_FINGER_PORT
+ self.finger_port = int(
+ get_default(self.config, 'executor', 'finger_port',
+ zuul.executor.server.DEFAULT_FINGER_PORT)
+ )
self.start_log_streamer()
self.change_privs()
@@ -143,7 +135,8 @@
ExecutorServer = zuul.executor.server.ExecutorServer
self.executor = ExecutorServer(self.config, self.connections,
jobdir_root=self.jobroot_dir,
- keep_jobdir=self.args.keep_jobdir)
+ keep_jobdir=self.args.keep_jobdir,
+ log_streaming_port=self.finger_port)
self.executor.start()
signal.signal(signal.SIGUSR2, zuul.cmd.stack_dump_handler)
@@ -170,10 +163,9 @@
server.configure_connections(source_only=True)
- if server.config.has_option('executor', 'pidfile'):
- pid_fn = os.path.expanduser(server.config.get('executor', 'pidfile'))
- else:
- pid_fn = '/var/run/zuul-executor/zuul-executor.pid'
+ pid_fn = get_default(server.config, 'executor', 'pidfile',
+ '/var/run/zuul-executor/zuul-executor.pid',
+ expand_user=True)
pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
if server.args.nodaemon:
diff --git a/zuul/cmd/merger.py b/zuul/cmd/merger.py
index 686f34a..97f208c 100755
--- a/zuul/cmd/merger.py
+++ b/zuul/cmd/merger.py
@@ -27,6 +27,7 @@
import signal
import zuul.cmd
+from zuul.lib.config import get_default
# No zuul imports here because they pull in paramiko which must not be
# imported until after the daemonization.
@@ -79,10 +80,8 @@
server.read_config()
server.configure_connections(source_only=True)
- if server.config.has_option('zuul', 'state_dir'):
- state_dir = os.path.expanduser(server.config.get('zuul', 'state_dir'))
- else:
- state_dir = '/var/lib/zuul'
+ state_dir = get_default(server.config, 'zuul', 'state_dir',
+ '/var/lib/zuul', expand_user=True)
test_fn = os.path.join(state_dir, 'test')
try:
f = open(test_fn, 'w')
@@ -92,10 +91,9 @@
print("\nUnable to write to state directory: %s\n" % state_dir)
raise
- if server.config.has_option('merger', 'pidfile'):
- pid_fn = os.path.expanduser(server.config.get('merger', 'pidfile'))
- else:
- pid_fn = '/var/run/zuul-merger/zuul-merger.pid'
+ pid_fn = get_default(server.config, 'merger', 'pidfile',
+ '/var/run/zuul-merger/zuul-merger.pid',
+ expand_user=True)
pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
if server.args.nodaemon:
diff --git a/zuul/cmd/scheduler.py b/zuul/cmd/scheduler.py
index d16eb17..b32deaf 100755
--- a/zuul/cmd/scheduler.py
+++ b/zuul/cmd/scheduler.py
@@ -28,6 +28,7 @@
import signal
import zuul.cmd
+from zuul.lib.config import get_default
# No zuul imports here because they pull in paramiko which must not be
# imported until after the daemonization.
@@ -98,22 +99,10 @@
import zuul.lib.gearserver
statsd_host = os.environ.get('STATSD_HOST')
statsd_port = int(os.environ.get('STATSD_PORT', 8125))
- if self.config.has_option('gearman_server', 'listen_address'):
- host = self.config.get('gearman_server', 'listen_address')
- else:
- host = None
- if self.config.has_option('gearman_server', 'ssl_key'):
- ssl_key = self.config.get('gearman_server', 'ssl_key')
- else:
- ssl_key = None
- if self.config.has_option('gearman_server', 'ssl_cert'):
- ssl_cert = self.config.get('gearman_server', 'ssl_cert')
- else:
- ssl_cert = None
- if self.config.has_option('gearman_server', 'ssl_ca'):
- ssl_ca = self.config.get('gearman_server', 'ssl_ca')
- else:
- ssl_ca = None
+ host = get_default(self.config, 'gearman_server', 'listen_address')
+ ssl_key = get_default(self.config, 'gearman_server', 'ssl_key')
+ ssl_cert = get_default(self.config, 'gearman_server', 'ssl_cert')
+ ssl_ca = get_default(self.config, 'gearman_server', 'ssl_ca')
zuul.lib.gearserver.GearServer(4730,
ssl_key=ssl_key,
ssl_cert=ssl_cert,
@@ -161,27 +150,16 @@
nodepool = zuul.nodepool.Nodepool(self.sched)
zookeeper = zuul.zk.ZooKeeper()
- if self.config.has_option('zuul', 'zookeeper_hosts'):
- zookeeper_hosts = self.config.get('zuul', 'zookeeper_hosts')
- else:
- zookeeper_hosts = '127.0.0.1:2181'
+ zookeeper_hosts = get_default(self.config, 'zuul', 'zookeeper_hosts',
+ '127.0.0.1:2181')
zookeeper.connect(zookeeper_hosts)
- if self.config.has_option('zuul', 'status_expiry'):
- cache_expiry = self.config.getint('zuul', 'status_expiry')
- else:
- cache_expiry = 1
+ cache_expiry = get_default(self.config, 'zuul', 'status_expiry', 1)
- if self.config.has_option('webapp', 'listen_address'):
- listen_address = self.config.get('webapp', 'listen_address')
- else:
- listen_address = '0.0.0.0'
-
- if self.config.has_option('webapp', 'port'):
- port = self.config.getint('webapp', 'port')
- else:
- port = 8001
+ listen_address = get_default(self.config, 'webapp', 'listen_address',
+ '0.0.0.0')
+ port = get_default(self.config, 'webapp', 'port', 8001)
webapp = zuul.webapp.WebApp(
self.sched, port=port, cache_expiry=cache_expiry,
@@ -230,10 +208,9 @@
if scheduler.args.validate:
sys.exit(scheduler.test_config())
- if scheduler.config.has_option('zuul', 'pidfile'):
- pid_fn = os.path.expanduser(scheduler.config.get('zuul', 'pidfile'))
- else:
- pid_fn = '/var/run/zuul-scheduler/zuul-scheduler.pid'
+ pid_fn = get_default(scheduler.config, 'zuul', 'pidfile',
+ '/var/run/zuul-scheduler/zuul-scheduler.pid',
+ expand_user=True)
pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
if scheduler.args.nodaemon:
diff --git a/zuul/configloader.py b/zuul/configloader.py
index d4f7c43..84227f8 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -401,18 +401,15 @@
job.inheritFrom(parent)
for pre_run_name in as_list(conf.get('pre-run')):
- full_pre_run_name = os.path.join('playbooks', pre_run_name)
pre_run = model.PlaybookContext(job.source_context,
- full_pre_run_name)
+ pre_run_name)
job.pre_run = job.pre_run + (pre_run,)
for post_run_name in as_list(conf.get('post-run')):
- full_post_run_name = os.path.join('playbooks', post_run_name)
post_run = model.PlaybookContext(job.source_context,
- full_post_run_name)
+ post_run_name)
job.post_run = (post_run,) + job.post_run
if 'run' in conf:
- run_name = os.path.join('playbooks', conf['run'])
- run = model.PlaybookContext(job.source_context, run_name)
+ run = model.PlaybookContext(job.source_context, conf['run'])
job.run = (run,)
else:
if not project_pipeline:
diff --git a/zuul/driver/__init__.py b/zuul/driver/__init__.py
index b6c34e7..c78283d 100644
--- a/zuul/driver/__init__.py
+++ b/zuul/driver/__init__.py
@@ -270,3 +270,13 @@
:rtype: Callable
"""
pass
+
+ @abc.abstractmethod
+ def setMountsMap(self, state_dir, ro_dirs=[], rw_dirs=[]):
+ """Add additional mount point to the execution environment.
+
+ :arg str state_dir: the state directory to be read write
+ :arg list ro_dirs: read only directories paths
+ :arg list rw_dirs: read write directories paths
+ """
+ pass
diff --git a/zuul/driver/bubblewrap/__init__.py b/zuul/driver/bubblewrap/__init__.py
index c165727..95b09e0 100644
--- a/zuul/driver/bubblewrap/__init__.py
+++ b/zuul/driver/bubblewrap/__init__.py
@@ -83,20 +83,20 @@
'--ro-bind', '/bin', '/bin',
'--ro-bind', '/sbin', '/sbin',
'--ro-bind', '/etc/resolv.conf', '/etc/resolv.conf',
- '--ro-bind', '{ansible_dir}', '{ansible_dir}',
'--ro-bind', '{ssh_auth_sock}', '{ssh_auth_sock}',
'--dir', '{work_dir}',
'--bind', '{work_dir}', '{work_dir}',
'--dev', '/dev',
- '--dir', '{user_home}',
'--chdir', '{work_dir}',
'--unshare-all',
'--share-net',
+ '--die-with-parent',
'--uid', '{uid}',
'--gid', '{gid}',
'--file', '{uid_fd}', '/etc/passwd',
'--file', '{gid_fd}', '/etc/group',
]
+ mounts_map = {'rw': [], 'ro': []}
def reconfigure(self, tenant):
pass
@@ -104,6 +104,9 @@
def stop(self):
pass
+ def setMountsMap(self, state_dir, ro_dirs=[], rw_dirs=[]):
+ self.mounts_map = {'ro': ro_dirs, 'rw': [state_dir] + rw_dirs}
+
def getPopen(self, **kwargs):
# Set zuul_dir if it was not passed in
if 'zuul_dir' in kwargs:
@@ -117,9 +120,16 @@
if not zuul_dir.startswith('/usr'):
bwrap_command.extend(['--ro-bind', zuul_dir, zuul_dir])
+ for mount_type in ('ro', 'rw'):
+ bind_arg = '--ro-bind' if mount_type == 'ro' else '--bind'
+ for bind in self.mounts_map[mount_type]:
+ bwrap_command.extend([bind_arg, bind, bind])
+
# Need users and groups
uid = os.getuid()
- passwd = pwd.getpwuid(uid)
+ passwd = list(pwd.getpwuid(uid))
+ # Replace our user's actual home directory with the work dir.
+ passwd = passwd[:5] + [kwargs['work_dir']] + passwd[6:]
passwd_bytes = b':'.join(
['{}'.format(x).encode('utf8') for x in passwd])
(passwd_r, passwd_w) = os.pipe()
@@ -141,7 +151,6 @@
kwargs['gid'] = gid
kwargs['uid_fd'] = passwd_r
kwargs['gid_fd'] = group_r
- kwargs['user_home'] = passwd.pw_dir
command = [x.format(**kwargs) for x in bwrap_command]
self.log.debug("Bubblewrap command: %s",
@@ -159,14 +168,12 @@
parser = argparse.ArgumentParser()
parser.add_argument('work_dir')
- parser.add_argument('ansible_dir')
parser.add_argument('run_args', nargs='+')
cli_args = parser.parse_args()
ssh_auth_sock = os.environ.get('SSH_AUTH_SOCK')
popen = driver.getPopen(work_dir=cli_args.work_dir,
- ansible_dir=cli_args.ansible_dir,
ssh_auth_sock=ssh_auth_sock)
x = popen(cli_args.run_args)
x.wait()
diff --git a/zuul/driver/gerrit/gerritconnection.py b/zuul/driver/gerrit/gerritconnection.py
index 39a81bc..924a42f 100644
--- a/zuul/driver/gerrit/gerritconnection.py
+++ b/zuul/driver/gerrit/gerritconnection.py
@@ -237,6 +237,7 @@
class GerritConnection(BaseConnection):
driver_name = 'gerrit'
log = logging.getLogger("zuul.GerritConnection")
+ iolog = logging.getLogger("zuul.GerritConnection.io")
depends_on_re = re.compile(r"^Depends-On: (I[0-9a-f]{40})\s*$",
re.MULTILINE | re.IGNORECASE)
replication_timeout = 300
@@ -631,8 +632,8 @@
data = json.loads(lines[0])
if not data:
return False
- self.log.debug("Received data from Gerrit query: \n%s" %
- (pprint.pformat(data)))
+ self.iolog.debug("Received data from Gerrit query: \n%s" %
+ (pprint.pformat(data)))
return data
def simpleQuery(self, query):
@@ -662,8 +663,8 @@
if not data:
return False, more_changes
- self.log.debug("Received data from Gerrit query: \n%s" %
- (pprint.pformat(data)))
+ self.iolog.debug("Received data from Gerrit query: \n%s" %
+ (pprint.pformat(data)))
return data, more_changes
# gerrit returns 500 results by default, so implement paging
@@ -717,14 +718,17 @@
stdin.write(stdin_data)
out = stdout.read().decode('utf-8')
- self.log.debug("SSH received stdout:\n%s" % out)
+ self.iolog.debug("SSH received stdout:\n%s" % out)
ret = stdout.channel.recv_exit_status()
self.log.debug("SSH exit status: %s" % ret)
err = stderr.read().decode('utf-8')
- self.log.debug("SSH received stderr:\n%s" % err)
+ if err.strip():
+ self.log.debug("SSH received stderr:\n%s" % err)
+
if ret:
+ self.log.debug("SSH received stdout:\n%s" % out)
raise Exception("Gerrit error executing %s" % command)
return (out, err)
diff --git a/zuul/driver/github/githubconnection.py b/zuul/driver/github/githubconnection.py
index f9f1c27..2e5d49a 100644
--- a/zuul/driver/github/githubconnection.py
+++ b/zuul/driver/github/githubconnection.py
@@ -427,7 +427,7 @@
data = {'iat': now, 'exp': expiry, 'iss': self.app_id}
app_token = jwt.encode(data,
self.app_key,
- algorithm='RS256')
+ algorithm='RS256').decode('utf-8')
url = ACCESS_TOKEN_URL % installation_id
headers = {'Accept': PREVIEW_JSON_ACCEPT,
@@ -524,6 +524,7 @@
change.patchset)
change.reviews = self.getPullReviews(change.project,
change.number)
+ change.labels = change.pr.get('labels')
return change
@@ -572,8 +573,11 @@
self.log.warning("Pull request #%s of %s/%s returned None!" % (
number, owner, proj))
time.sleep(1)
+ # Get the issue obj so we can get the labels (this is silly)
+ issueobj = probj.issue()
pr = probj.as_dict()
pr['files'] = [f.filename for f in probj.files()]
+ pr['labels'] = [l.name for l in issueobj.labels()]
log_rate_limit(self.log, github)
return pr
@@ -599,7 +603,7 @@
if not pr_url:
continue
# the issue provides no good description of the project :\
- owner, project, _, number = pr_url.split('/')[4:]
+ owner, project, _, number = pr_url.split('/')[-4:]
github = self.getGithubClient("%s/%s" % (owner, project))
pr = github.pull_request(owner, project, number)
if pr.head.sha != sha:
@@ -682,7 +686,8 @@
headers = {'Accept': 'application/vnd.github.korra-preview'}
# Create a repo object
- repository = github.repository(owner, project)
+ repository = github.repository(owner, proj)
+
# Build up a URL
url = repository._build_url('collaborators', login, 'permission',
base_url=repository._api)
diff --git a/zuul/driver/github/githubmodel.py b/zuul/driver/github/githubmodel.py
index cfd1bc0..db119f0 100644
--- a/zuul/driver/github/githubmodel.py
+++ b/zuul/driver/github/githubmodel.py
@@ -28,9 +28,13 @@
class PullRequest(Change):
def __init__(self, project):
super(PullRequest, self).__init__(project)
+ self.project = None
+ self.pr = None
self.updated_at = None
self.title = None
self.reviews = []
+ self.files = []
+ self.labels = []
def isUpdateOf(self, other):
if (hasattr(other, 'number') and self.number == other.number and
@@ -284,7 +288,8 @@
class GithubRefFilter(RefFilter, GithubCommonFilter):
def __init__(self, connection_name, statuses=[], required_reviews=[],
- reject_reviews=[], open=None, current_patchset=None):
+ reject_reviews=[], open=None, current_patchset=None,
+ labels=[]):
RefFilter.__init__(self, connection_name)
GithubCommonFilter.__init__(self, required_reviews=required_reviews,
@@ -293,6 +298,7 @@
self.statuses = statuses
self.open = open
self.current_patchset = current_patchset
+ self.labels = labels
def __repr__(self):
ret = '<GithubRefFilter'
@@ -310,6 +316,8 @@
ret += ' open: %s' % self.open
if self.current_patchset:
ret += ' current-patchset: %s' % self.current_patchset
+ if self.labels:
+ ret += ' labels: %s' % self.labels
ret += '>'
@@ -341,4 +349,9 @@
if not self.matchesReviews(change):
return False
+ # required labels are ANDed
+ for label in self.labels:
+ if label not in change.labels:
+ return False
+
return True
diff --git a/zuul/driver/github/githubsource.py b/zuul/driver/github/githubsource.py
index 519ebf1..1bd280f 100644
--- a/zuul/driver/github/githubsource.py
+++ b/zuul/driver/github/githubsource.py
@@ -97,6 +97,7 @@
required_reviews=to_list(config.get('review')),
open=config.get('open'),
current_patchset=config.get('current-patchset'),
+ labels=to_list(config.get('label')),
)
return [f]
@@ -121,7 +122,8 @@
require = {'status': scalar_or_list(str),
'review': scalar_or_list(review),
'open': bool,
- 'current-patchset': bool}
+ 'current-patchset': bool,
+ 'label': scalar_or_list(str)}
return require
diff --git a/zuul/driver/nullwrap/__init__.py b/zuul/driver/nullwrap/__init__.py
index ebcd1da..50fea27 100644
--- a/zuul/driver/nullwrap/__init__.py
+++ b/zuul/driver/nullwrap/__init__.py
@@ -26,3 +26,6 @@
def getPopen(self, **kwargs):
return subprocess.Popen
+
+ def setMountsMap(self, **kwargs):
+ pass
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index 6ecb27c..aaef34e 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -21,6 +21,7 @@
from uuid import uuid4
import zuul.model
+from zuul.lib.config import get_default
from zuul.model import Build
@@ -115,23 +116,10 @@
self.meta_jobs = {} # A list of meta-jobs like stop or describe
server = config.get('gearman', 'server')
- if config.has_option('gearman', 'port'):
- port = config.get('gearman', 'port')
- else:
- port = 4730
- if self.config.has_option('gearman', 'ssl_key'):
- ssl_key = self.config.get('gearman', 'ssl_key')
- else:
- ssl_key = None
- if self.config.has_option('gearman', 'ssl_cert'):
- ssl_cert = self.config.get('gearman', 'ssl_cert')
- else:
- ssl_cert = None
- if self.config.has_option('gearman', 'ssl_ca'):
- ssl_ca = self.config.get('gearman', 'ssl_ca')
- else:
- ssl_ca = None
-
+ port = get_default(self.config, 'gearman', 'port', 4730)
+ ssl_key = get_default(self.config, 'gearman', 'ssl_key')
+ ssl_cert = get_default(self.config, 'gearman', 'ssl_cert')
+ ssl_ca = get_default(self.config, 'gearman', 'ssl_ca')
self.gearman = ZuulGearmanClient(self)
self.gearman.addServer(server, port, ssl_key, ssl_cert, ssl_ca)
@@ -349,7 +337,7 @@
gearman_job = gear.TextJob('executor:execute', json.dumps(params),
unique=uuid)
build.__gearman_job = gearman_job
- build.__gearman_manager = None
+ build.__gearman_worker = None
self.builds[uuid] = build
# NOTE(pabelanger): Rather then looping forever, check to see if job
@@ -456,7 +444,7 @@
if not started:
self.log.info("Build %s started" % job)
- build.__gearman_manager = data.get('manager')
+ build.__gearman_worker = data.get('worker_name')
self.sched.onBuildStarted(build)
else:
self.log.error("Unable to find build %s" % job.unique)
@@ -485,12 +473,12 @@
return False
def cancelRunningBuild(self, build):
- if not build.__gearman_manager:
+ if not build.__gearman_worker:
self.log.error("Build %s has no manager while canceling" %
(build,))
stop_uuid = str(uuid4().hex)
data = dict(uuid=build.__gearman_job.unique)
- stop_job = gear.TextJob("executor:stop:%s" % build.__gearman_manager,
+ stop_job = gear.TextJob("executor:stop:%s" % build.__gearman_worker,
json.dumps(data), unique=stop_uuid)
self.meta_jobs[stop_uuid] = stop_job
self.log.debug("Submitting stop job: %s", stop_job)
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index 0a43ca8..e47230c 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -27,6 +27,7 @@
import time
import traceback
from zuul.lib.yamlutil import yaml
+from zuul.lib.config import get_default
import gear
@@ -36,7 +37,8 @@
BUFFER_LINES_FOR_SYNTAX = 200
COMMANDS = ['stop', 'pause', 'unpause', 'graceful', 'verbose',
- 'unverbose']
+ 'unverbose', 'keep', 'nokeep']
+DEFAULT_FINGER_PORT = 79
class Watchdog(object):
@@ -358,13 +360,14 @@
log = logging.getLogger("zuul.ExecutorServer")
def __init__(self, config, connections={}, jobdir_root=None,
- keep_jobdir=False):
+ keep_jobdir=False, log_streaming_port=DEFAULT_FINGER_PORT):
self.config = config
self.keep_jobdir = keep_jobdir
self.jobdir_root = jobdir_root
# TODOv3(mordred): make the executor name more unique --
# perhaps hostname+pid.
self.hostname = socket.gethostname()
+ self.log_streaming_port = log_streaming_port
self.zuul_url = config.get('merger', 'zuul_url')
self.merger_lock = threading.Lock()
self.verbose = False
@@ -375,35 +378,19 @@
graceful=self.graceful,
verbose=self.verboseOn,
unverbose=self.verboseOff,
+ keep=self.keep,
+ nokeep=self.nokeep,
)
- if self.config.has_option('executor', 'git_dir'):
- self.merge_root = self.config.get('executor', 'git_dir')
- else:
- self.merge_root = '/var/lib/zuul/executor-git'
-
- if self.config.has_option('executor', 'default_username'):
- self.default_username = self.config.get('executor',
- 'default_username')
- else:
- self.default_username = 'zuul'
-
- if self.config.has_option('merger', 'git_user_email'):
- self.merge_email = self.config.get('merger', 'git_user_email')
- else:
- self.merge_email = None
-
- if self.config.has_option('merger', 'git_user_name'):
- self.merge_name = self.config.get('merger', 'git_user_name')
- else:
- self.merge_name = None
-
- if self.config.has_option('executor', 'untrusted_wrapper'):
- untrusted_wrapper_name = self.config.get(
- 'executor', 'untrusted_wrapper').strip()
- else:
- untrusted_wrapper_name = 'bubblewrap'
- self.untrusted_wrapper = connections.drivers[untrusted_wrapper_name]
+ self.merge_root = get_default(self.config, 'executor', 'git_dir',
+ '/var/lib/zuul/executor-git')
+ self.default_username = get_default(self.config, 'executor',
+ 'default_username', 'zuul')
+ self.merge_email = get_default(self.config, 'merger', 'git_user_email')
+ self.merge_name = get_default(self.config, 'merger', 'git_user_name')
+ execution_wrapper_name = get_default(self.config, 'executor',
+ 'execution_wrapper', 'bubblewrap')
+ self.execution_wrapper = connections.drivers[execution_wrapper_name]
self.connections = connections
# This merger and its git repos are used to maintain
@@ -413,11 +400,8 @@
self.merger = self._getMerger(self.merge_root)
self.update_queue = DeduplicateQueue()
- if self.config.has_option('zuul', 'state_dir'):
- state_dir = os.path.expanduser(
- self.config.get('zuul', 'state_dir'))
- else:
- state_dir = '/var/lib/zuul'
+ state_dir = get_default(self.config, 'zuul', 'state_dir',
+ '/var/lib/zuul', expand_user=True)
path = os.path.join(state_dir, 'executor.socket')
self.command_socket = commandsocket.CommandSocket(path)
ansible_dir = os.path.join(state_dir, 'ansible')
@@ -459,22 +443,10 @@
self._running = True
self._command_running = True
server = self.config.get('gearman', 'server')
- if self.config.has_option('gearman', 'port'):
- port = self.config.get('gearman', 'port')
- else:
- port = 4730
- if self.config.has_option('gearman', 'ssl_key'):
- ssl_key = self.config.get('gearman', 'ssl_key')
- else:
- ssl_key = None
- if self.config.has_option('gearman', 'ssl_cert'):
- ssl_cert = self.config.get('gearman', 'ssl_cert')
- else:
- ssl_cert = None
- if self.config.has_option('gearman', 'ssl_ca'):
- ssl_ca = self.config.get('gearman', 'ssl_ca')
- else:
- ssl_ca = None
+ port = get_default(self.config, 'gearman', 'port', 4730)
+ ssl_key = get_default(self.config, 'gearman', 'ssl_key')
+ ssl_cert = get_default(self.config, 'gearman', 'ssl_cert')
+ ssl_ca = get_default(self.config, 'gearman', 'ssl_ca')
self.merger_worker = ExecutorMergeWorker(self, 'Zuul Executor Merger')
self.merger_worker.addServer(server, port, ssl_key, ssl_cert, ssl_ca)
self.executor_worker = gear.TextWorker('Zuul Executor Server')
@@ -544,6 +516,12 @@
def verboseOff(self):
self.verbose = False
+ def keep(self):
+ self.keep_jobdirs = True
+
+ def nokeep(self):
+ self.keep_jobdirs = False
+
def join(self):
self.update_thread.join()
self.merger_thread.join()
@@ -717,12 +695,9 @@
self.thread = None
self.ssh_agent = None
- if self.executor_server.config.has_option(
- 'executor', 'private_key_file'):
- self.private_key_file = self.executor_server.config.get(
- 'executor', 'private_key_file')
- else:
- self.private_key_file = '~/.ssh/id_rsa'
+ self.private_key_file = get_default(self.executor_server.config,
+ 'executor', 'private_key_file',
+ '~/.ssh/id_rsa')
self.ssh_agent = SshAgent()
def run(self):
@@ -771,11 +746,27 @@
(self.job.unique, self.jobdir.root))
args = json.loads(self.job.arguments)
tasks = []
+ projects = set()
+
+ # Make sure all projects used by the job are updated...
for project in args['projects']:
self.log.debug("Job %s: updating project %s" %
(self.job.unique, project))
tasks.append(self.executor_server.update(
project['connection'], project['name']))
+ projects.add((project['connection'], project['name']))
+
+ # ...as well as all playbook and role projects.
+ repos = (args['pre_playbooks'] + args['playbooks'] +
+ args['post_playbooks'] + args['roles'])
+ for repo in repos:
+ self.log.debug("Job %s: updating playbook or role %s" %
+ (self.job.unique, repo))
+ key = (repo['connection'], repo['project'])
+ if key not in projects:
+ tasks.append(self.executor_server.update(*key))
+ projects.add(key)
+
for task in tasks:
task.wait()
@@ -821,12 +812,24 @@
self.prepareAnsibleFiles(args)
data = {
- 'manager': self.executor_server.hostname,
- 'url': 'finger://{server}/{unique}'.format(
- unique=self.job.unique,
- server=self.executor_server.hostname),
- 'worker_name': 'My Worker',
+ # TODO(mordred) worker_name is needed as a unique name for the
+ # client to use for cancelling jobs on an executor. It's defaulting
+ # to the hostname for now, but in the future we should allow
+ # setting a per-executor override so that one can run more than
+ # one executor on a host.
+ 'worker_name': self.executor_server.hostname,
+ 'worker_hostname': self.executor_server.hostname,
+ 'worker_log_port': self.executor_server.log_streaming_port
}
+ if self.executor_server.log_streaming_port != DEFAULT_FINGER_PORT:
+ data['url'] = "finger://{hostname}:{port}/{uuid}".format(
+ hostname=data['worker_hostname'],
+ port=data['worker_log_port'],
+ uuid=self.job.unique)
+ else:
+ data['url'] = 'finger://{hostname}/{uuid}'.format(
+ hostname=data['worker_hostname'],
+ uuid=self.job.unique)
self.job.sendWorkData(json.dumps(data))
self.job.sendWorkStatus(0, 100)
@@ -1275,16 +1278,29 @@
if trusted:
config_file = self.jobdir.trusted_config
- popen = subprocess.Popen
+ opt_prefix = 'trusted'
else:
config_file = self.jobdir.untrusted_config
- driver = self.executor_server.untrusted_wrapper
- popen = driver.getPopen(
- work_dir=self.jobdir.root,
- ansible_dir=self.executor_server.ansible_dir,
- ssh_auth_sock=env_copy.get('SSH_AUTH_SOCK'))
+ opt_prefix = 'untrusted'
+ ro_dirs = get_default(self.executor_server.config, 'executor',
+ '%s_ro_dirs' % opt_prefix)
+ rw_dirs = get_default(self.executor_server.config, 'executor',
+ '%s_rw_dirs' % opt_prefix)
+ state_dir = get_default(self.executor_server.config, 'zuul',
+ 'state_dir', '/var/lib/zuul', expand_user=True)
+ ro_dirs = ro_dirs.split(":") if ro_dirs else []
+ rw_dirs = rw_dirs.split(":") if rw_dirs else []
+ self.executor_server.execution_wrapper.setMountsMap(state_dir, ro_dirs,
+ rw_dirs)
+
+ popen = self.executor_server.execution_wrapper.getPopen(
+ work_dir=self.jobdir.root,
+ ssh_auth_sock=env_copy.get('SSH_AUTH_SOCK'))
env_copy['ANSIBLE_CONFIG'] = config_file
+ # NOTE(pabelanger): Default HOME variable to jobdir.work_root, as it is
+ # possible we don't bind mount current zuul user home directory.
+ env_copy['HOME'] = self.jobdir.work_root
with self.proc_lock:
if self.aborted:
diff --git a/zuul/lib/config.py b/zuul/lib/config.py
new file mode 100644
index 0000000..9cdf66e
--- /dev/null
+++ b/zuul/lib/config.py
@@ -0,0 +1,23 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+
+def get_default(config, section, option, default=None, expand_user=False):
+ if config.has_option(section, option):
+ value = config.get(section, option)
+ else:
+ value = default
+ if expand_user and value:
+ return os.path.expanduser(value)
+ return value
diff --git a/zuul/lib/log_streamer.py b/zuul/lib/log_streamer.py
index 6695723..c76b057 100644
--- a/zuul/lib/log_streamer.py
+++ b/zuul/lib/log_streamer.py
@@ -21,14 +21,10 @@
import re
import select
import socket
+import socketserver
import threading
import time
-try:
- import SocketServer as ss # python 2.x
-except ImportError:
- import socketserver as ss # python 3
-
class Log(object):
@@ -39,7 +35,7 @@
self.size = self.stat.st_size
-class RequestHandler(ss.BaseRequestHandler):
+class RequestHandler(socketserver.BaseRequestHandler):
'''
Class to handle a single log streaming request.
@@ -165,7 +161,7 @@
return False
-class CustomForkingTCPServer(ss.ForkingTCPServer):
+class CustomForkingTCPServer(socketserver.ForkingTCPServer):
'''
Custom version that allows us to drop privileges after port binding.
'''
@@ -174,7 +170,7 @@
self.jobdir_root = kwargs.pop('jobdir_root')
# For some reason, setting custom attributes does not work if we
# call the base class __init__ first. Wha??
- ss.ForkingTCPServer.__init__(self, *args, **kwargs)
+ socketserver.ForkingTCPServer.__init__(self, *args, **kwargs)
def change_privs(self):
'''
@@ -190,7 +186,7 @@
def server_bind(self):
self.allow_reuse_address = True
- ss.ForkingTCPServer.server_bind(self)
+ socketserver.ForkingTCPServer.server_bind(self)
if self.user:
self.change_privs()
diff --git a/zuul/merger/client.py b/zuul/merger/client.py
index 4054df6..e92d9fd 100644
--- a/zuul/merger/client.py
+++ b/zuul/merger/client.py
@@ -20,6 +20,7 @@
import gear
import zuul.model
+from zuul.lib.config import get_default
def getJobData(job):
@@ -75,22 +76,10 @@
self.config = config
self.sched = sched
server = self.config.get('gearman', 'server')
- if self.config.has_option('gearman', 'port'):
- port = self.config.get('gearman', 'port')
- else:
- port = 4730
- if self.config.has_option('gearman', 'ssl_key'):
- ssl_key = self.config.get('gearman', 'ssl_key')
- else:
- ssl_key = None
- if self.config.has_option('gearman', 'ssl_cert'):
- ssl_cert = self.config.get('gearman', 'ssl_cert')
- else:
- ssl_cert = None
- if self.config.has_option('gearman', 'ssl_ca'):
- ssl_ca = self.config.get('gearman', 'ssl_ca')
- else:
- ssl_ca = None
+ port = get_default(self.config, 'gearman', 'port', 4730)
+ ssl_key = get_default(self.config, 'gearman', 'ssl_key')
+ ssl_cert = get_default(self.config, 'gearman', 'ssl_cert')
+ ssl_ca = get_default(self.config, 'gearman', 'ssl_ca')
self.log.debug("Connecting to gearman at %s:%s" % (server, port))
self.gearman = MergeGearmanClient(self)
self.gearman.addServer(server, port, ssl_key, ssl_cert, ssl_ca)
diff --git a/zuul/merger/server.py b/zuul/merger/server.py
index 7d7e771..cbc4cb8 100644
--- a/zuul/merger/server.py
+++ b/zuul/merger/server.py
@@ -19,6 +19,7 @@
import gear
+from zuul.lib.config import get_default
from zuul.merger import merger
@@ -29,20 +30,10 @@
self.config = config
self.zuul_url = config.get('merger', 'zuul_url')
- if self.config.has_option('merger', 'git_dir'):
- merge_root = self.config.get('merger', 'git_dir')
- else:
- merge_root = '/var/lib/zuul/merger-git'
-
- if self.config.has_option('merger', 'git_user_email'):
- merge_email = self.config.get('merger', 'git_user_email')
- else:
- merge_email = None
-
- if self.config.has_option('merger', 'git_user_name'):
- merge_name = self.config.get('merger', 'git_user_name')
- else:
- merge_name = None
+ merge_root = get_default(self.config, 'merger', 'git_dir',
+ '/var/lib/zuul/merger-git')
+ merge_email = get_default(self.config, 'merger', 'git_user_email')
+ merge_name = get_default(self.config, 'merger', 'git_user_name')
self.merger = merger.Merger(merge_root, connections, merge_email,
merge_name)
@@ -50,22 +41,10 @@
def start(self):
self._running = True
server = self.config.get('gearman', 'server')
- if self.config.has_option('gearman', 'port'):
- port = self.config.get('gearman', 'port')
- else:
- port = 4730
- if self.config.has_option('gearman', 'ssl_key'):
- ssl_key = self.config.get('gearman', 'ssl_key')
- else:
- ssl_key = None
- if self.config.has_option('gearman', 'ssl_cert'):
- ssl_cert = self.config.get('gearman', 'ssl_cert')
- else:
- ssl_cert = None
- if self.config.has_option('gearman', 'ssl_ca'):
- ssl_ca = self.config.get('gearman', 'ssl_ca')
- else:
- ssl_ca = None
+ port = get_default(self.config, 'gearman', 'port', 4730)
+ ssl_key = get_default(self.config, 'gearman', 'ssl_key')
+ ssl_cert = get_default(self.config, 'gearman', 'ssl_cert')
+ ssl_ca = get_default(self.config, 'gearman', 'ssl_ca')
self.worker = gear.TextWorker('Zuul Merger')
self.worker.addServer(server, port, ssl_key, ssl_cert, ssl_ca)
self.log.debug("Waiting for server")
diff --git a/zuul/model.py b/zuul/model.py
index a89c6d1..dc04e59 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -1105,11 +1105,13 @@
def __init__(self):
self.name = "Unknown"
self.hostname = None
+ self.log_port = None
def updateFromData(self, data):
"""Update worker information if contained in the WORK_DATA response."""
self.name = data.get('worker_name', self.name)
self.hostname = data.get('worker_hostname', self.hostname)
+ self.log_port = data.get('worker_log_port', self.log_port)
def __repr__(self):
return '<Worker %s>' % self.name
diff --git a/zuul/rpclistener.py b/zuul/rpclistener.py
index c1ee50e..be3b7d1 100644
--- a/zuul/rpclistener.py
+++ b/zuul/rpclistener.py
@@ -21,6 +21,7 @@
import gear
from zuul import model
+from zuul.lib.config import get_default
class RPCListener(object):
@@ -33,22 +34,10 @@
def start(self):
self._running = True
server = self.config.get('gearman', 'server')
- if self.config.has_option('gearman', 'port'):
- port = self.config.get('gearman', 'port')
- else:
- port = 4730
- if self.config.has_option('gearman', 'ssl_key'):
- ssl_key = self.config.get('gearman', 'ssl_key')
- else:
- ssl_key = None
- if self.config.has_option('gearman', 'ssl_cert'):
- ssl_cert = self.config.get('gearman', 'ssl_cert')
- else:
- ssl_cert = None
- if self.config.has_option('gearman', 'ssl_ca'):
- ssl_ca = self.config.get('gearman', 'ssl_ca')
- else:
- ssl_ca = None
+ port = get_default(self.config, 'gearman', 'port', 4730)
+ ssl_key = get_default(self.config, 'gearman', 'ssl_key')
+ ssl_cert = get_default(self.config, 'gearman', 'ssl_cert')
+ ssl_ca = get_default(self.config, 'gearman', 'ssl_ca')
self.worker = gear.TextWorker('Zuul RPC Listener')
self.worker.addServer(server, port, ssl_key, ssl_cert, ssl_ca)
self.log.debug("Waiting for server")
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 2076163..c762309 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -30,6 +30,7 @@
from zuul import model
from zuul import exceptions
from zuul import version as zuul_version
+from zuul.lib.config import get_default
class ManagementEvent(object):
@@ -371,30 +372,21 @@
self.log.debug("Waiting for exit")
def _get_queue_pickle_file(self):
- if self.config.has_option('zuul', 'state_dir'):
- state_dir = os.path.expanduser(self.config.get('zuul',
- 'state_dir'))
- else:
- state_dir = '/var/lib/zuul'
+ state_dir = get_default(self.config, 'zuul', 'state_dir',
+ '/var/lib/zuul', expand_user=True)
return os.path.join(state_dir, 'queue.pickle')
def _get_time_database_dir(self):
- if self.config.has_option('zuul', 'state_dir'):
- state_dir = os.path.expanduser(self.config.get('zuul',
- 'state_dir'))
- else:
- state_dir = '/var/lib/zuul'
+ state_dir = get_default(self.config, 'zuul', 'state_dir',
+ '/var/lib/zuul', expand_user=True)
d = os.path.join(state_dir, 'times')
if not os.path.exists(d):
os.mkdir(d)
return d
def _get_project_key_dir(self):
- if self.config.has_option('zuul', 'state_dir'):
- state_dir = os.path.expanduser(self.config.get('zuul',
- 'state_dir'))
- else:
- state_dir = '/var/lib/zuul'
+ state_dir = get_default(self.config, 'zuul', 'state_dir',
+ '/var/lib/zuul', expand_user=True)
key_dir = os.path.join(state_dir, 'keys')
if not os.path.exists(key_dir):
os.mkdir(key_dir, 0o700)