Merge "Make test-logs.sh more convenient to use"
diff --git a/.zuul.yaml b/.zuul.yaml
index caef296..8b5ccb9 100644
--- a/.zuul.yaml
+++ b/.zuul.yaml
@@ -44,6 +44,8 @@
- zuul/cmd/migrate.py
- playbooks/zuul-migrate/.*
- zuul-stream-functional
+ - nodepool-zuul-functional:
+ voting: false
gate:
jobs:
- build-sphinx-docs:
diff --git a/bindep.txt b/bindep.txt
index 3dcc3e7..11ebdf5 100644
--- a/bindep.txt
+++ b/bindep.txt
@@ -13,7 +13,7 @@
openssl-devel [platform:rpm]
libffi-dev [platform:dpkg]
libffi-devel [platform:rpm]
-python-dev [platform:dpkg]
-python-devel [platform:rpm]
+python3-dev [platform:dpkg]
+python3-devel [platform:rpm]
bubblewrap [platform:rpm]
redhat-rpm-config [platform:rpm]
diff --git a/doc/source/user/config.rst b/doc/source/user/config.rst
index 0932c56..36cd68c 100644
--- a/doc/source/user/config.rst
+++ b/doc/source/user/config.rst
@@ -877,14 +877,14 @@
same name will override a previously defined variable, but new
variable names will be added to the set of defined variables.
- .. attr:: host_vars
+ .. attr:: host-vars
A dictionary of host variables to supply to Ansible. The keys
of this dictionary are node names as defined in a
:ref:`nodeset`, and the values are dictionaries of variables,
just as in :attr:`job.vars`.
- .. attr:: group_vars
+ .. attr:: group-vars
A dictionary of group variables to supply to Ansible. The keys
of this dictionary are node groups as defined in a
@@ -912,10 +912,10 @@
- api2
vars:
foo: "this variable is visible to all nodes"
- host_vars:
+ host-vars:
controller:
bar: "this variable is visible only on the controller node"
- group_vars:
+ group-vars:
api:
baz: "this variable is visible on api1 and api2"
diff --git a/requirements.txt b/requirements.txt
index 47c0f5e..115b096 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,7 +4,6 @@
# is in a release
git+https://github.com/sigmavirus24/github3.py.git@develop#egg=Github3.py
PyYAML>=3.1.0
-Paste
paramiko>=2.0.1
GitPython>=2.1.8
python-daemon>=2.0.4,<2.1.0
diff --git a/tests/base.py b/tests/base.py
index be8c17b..013a6e1 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -1857,7 +1857,7 @@
# from libraries that zuul depends on such as gear.
log_defaults_from_env = os.environ.get(
'OS_LOG_DEFAULTS',
- 'git.cmd=INFO,kazoo.client=WARNING,gear=INFO,paste=INFO')
+ 'git.cmd=INFO,kazoo.client=WARNING,gear=INFO')
if log_defaults_from_env:
for default in log_defaults_from_env.split(','):
diff --git a/tests/fixtures/config/ansible/git/common-config/zuul.yaml b/tests/fixtures/config/ansible/git/common-config/zuul.yaml
index 13a19da..abd77ec 100644
--- a/tests/fixtures/config/ansible/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/ansible/git/common-config/zuul.yaml
@@ -138,10 +138,10 @@
- host3
vars:
allvar: all
- host_vars:
+ host-vars:
host1:
hostvar: host
- group_vars:
+ group-vars:
group1:
groupvar: group
diff --git a/tests/nodepool/test_nodepool_integration.py b/tests/nodepool/test_nodepool_integration.py
index bd22da3..2f36154 100644
--- a/tests/nodepool/test_nodepool_integration.py
+++ b/tests/nodepool/test_nodepool_integration.py
@@ -55,7 +55,7 @@
# Test a simple node request
nodeset = model.NodeSet()
- nodeset.addNode(model.Node('controller', 'fake-label'))
+ nodeset.addNode(model.Node(['controller'], 'fake-label'))
job = model.Job('testjob')
job.nodeset = nodeset
request = self.nodepool.requestNodes(None, job)
@@ -64,7 +64,7 @@
self.assertEqual(request.state, model.STATE_FULFILLED)
# Accept the nodes
- self.nodepool.acceptNodes(request)
+ self.nodepool.acceptNodes(request, request.id)
nodeset = request.nodeset
for node in nodeset.getNodes():
@@ -85,7 +85,7 @@
def test_invalid_node_request(self):
# Test requests with an invalid node type fail
nodeset = model.NodeSet()
- nodeset.addNode(model.Node('controller', 'invalid-label'))
+ nodeset.addNode(model.Node(['controller'], 'invalid-label'))
job = model.Job('testjob')
job.nodeset = nodeset
request = self.nodepool.requestNodes(None, job)
@@ -98,8 +98,8 @@
# Test that node requests are re-submitted after disconnect
nodeset = model.NodeSet()
- nodeset.addNode(model.Node('controller', 'ubuntu-xenial'))
- nodeset.addNode(model.Node('compute', 'ubuntu-xenial'))
+ nodeset.addNode(model.Node(['controller'], 'ubuntu-xenial'))
+ nodeset.addNode(model.Node(['compute'], 'ubuntu-xenial'))
job = model.Job('testjob')
job.nodeset = nodeset
self.fake_nodepool.paused = True
@@ -116,8 +116,8 @@
# Test that node requests can be canceled
nodeset = model.NodeSet()
- nodeset.addNode(model.Node('controller', 'ubuntu-xenial'))
- nodeset.addNode(model.Node('compute', 'ubuntu-xenial'))
+ nodeset.addNode(model.Node(['controller'], 'ubuntu-xenial'))
+ nodeset.addNode(model.Node(['compute'], 'ubuntu-xenial'))
job = model.Job('testjob')
job.nodeset = nodeset
self.fake_nodepool.paused = True
diff --git a/tests/unit/test_model.py b/tests/unit/test_model.py
index dcef666..6ec5232 100644
--- a/tests/unit/test_model.py
+++ b/tests/unit/test_model.py
@@ -23,6 +23,7 @@
from zuul import configloader
from zuul.lib import encryption
from zuul.lib import yamlutil as yaml
+import zuul.lib.connections
from tests.base import BaseTestCase, FIXTURE_DIR
@@ -36,6 +37,8 @@
class TestJob(BaseTestCase):
def setUp(self):
super(TestJob, self).setUp()
+ self.connections = zuul.lib.connections.ConnectionRegistry()
+ self.addCleanup(self.connections.stop)
self.connection = Dummy(connection_name='dummy_connection')
self.source = Dummy(canonical_hostname='git.example.com',
connection=self.connection)
@@ -48,7 +51,8 @@
self.layout.addPipeline(self.pipeline)
self.queue = model.ChangeQueue(self.pipeline)
self.pcontext = configloader.ParseContext(
- None, None, self.tenant, self.layout)
+ self.connections, None, self.tenant, self.layout)
+ self.pcontext.setPipelines()
private_key_file = os.path.join(FIXTURE_DIR, 'private.pem')
with open(private_key_file, "rb") as f:
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index c833fa2..b640e33 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -3577,6 +3577,56 @@
self.assertEqual(len(self.history), 0)
self.assertEqual(len(self.builds), 0)
+ def test_client_enqueue_ref_negative(self):
+ "Test that the RPC client returns errors"
+ client = zuul.rpcclient.RPCClient('127.0.0.1',
+ self.gearman_server.port)
+ self.addCleanup(client.shutdown)
+ with testtools.ExpectedException(zuul.rpcclient.RPCFailure,
+ "New rev must be 40 character sha1"):
+ r = client.enqueue_ref(
+ tenant='tenant-one',
+ pipeline='post',
+ project='org/project',
+ trigger='gerrit',
+ ref='master',
+ oldrev='90f173846e3af9154517b88543ffbd1691f31366',
+ newrev='10054041')
+ self.assertEqual(r, False)
+ with testtools.ExpectedException(zuul.rpcclient.RPCFailure,
+ "Old rev must be 40 character sha1"):
+ r = client.enqueue_ref(
+ tenant='tenant-one',
+ pipeline='post',
+ project='org/project',
+ trigger='gerrit',
+ ref='master',
+ oldrev='10054041',
+ newrev='90f173846e3af9154517b88543ffbd1691f31366')
+ self.assertEqual(r, False)
+ with testtools.ExpectedException(zuul.rpcclient.RPCFailure,
+ "New rev must be base16 hash"):
+ r = client.enqueue_ref(
+ tenant='tenant-one',
+ pipeline='post',
+ project='org/project',
+ trigger='gerrit',
+ ref='master',
+ oldrev='90f173846e3af9154517b88543ffbd1691f31366',
+ newrev='notbase16')
+ self.assertEqual(r, False)
+ with testtools.ExpectedException(zuul.rpcclient.RPCFailure,
+ "Old rev must be base16 hash"):
+ r = client.enqueue_ref(
+ tenant='tenant-one',
+ pipeline='post',
+ project='org/project',
+ trigger='gerrit',
+ ref='master',
+ oldrev='notbase16',
+ newrev='90f173846e3af9154517b88543ffbd1691f31366')
+ self.assertEqual(r, False)
+
def test_client_promote(self):
"Test that the RPC client can promote a change"
self.executor_server.hold_jobs_in_build = True
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index e36c8f6..f019ead 100755
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -2033,6 +2033,8 @@
tenant_config_file = 'config/ansible/main.yaml'
def test_playbook(self):
+ # This test runs a bit long and needs extra time.
+ self.wait_timeout = 120
# Keep the jobdir around so we can inspect contents if an
# assert fails.
self.executor_server.keep_jobdir = True
diff --git a/tools/nodepool-integration-setup.sh b/tools/nodepool-integration-setup.sh
deleted file mode 100755
index 58c39cf..0000000
--- a/tools/nodepool-integration-setup.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash -xe
-
-/usr/zuul-env/bin/zuul-cloner --workspace /tmp --cache-dir /opt/git \
- git://git.openstack.org openstack-infra/nodepool
-
-ln -s /tmp/nodepool/log $HOME/logs
-
-cd /tmp/openstack-infra/nodepool
-/usr/local/jenkins/slave_scripts/install-distro-packages.sh
-sudo pip install .
-
-bash -xe ./tools/zuul-nodepool-integration/start.sh
diff --git a/zuul/configloader.py b/zuul/configloader.py
index df6336d..d3f3236 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -388,6 +388,8 @@
def __init__(self, pcontext):
self.log = logging.getLogger("zuul.NodeSetParser")
self.pcontext = pcontext
+ self.schema = self.getSchema(False)
+ self.anon_schema = self.getSchema(True)
def getSchema(self, anonymous=False):
node = {vs.Required('name'): to_list(str),
@@ -409,7 +411,10 @@
return vs.Schema(nodeset)
def fromYaml(self, conf, anonymous=False):
- self.getSchema(anonymous)(conf)
+ if anonymous:
+ self.anon_schema(conf)
+ else:
+ self.schema(conf)
ns = model.NodeSet(conf.get('name'), conf.get('_source_context'))
node_names = set()
group_names = set()
@@ -507,8 +512,8 @@
'roles': to_list(role),
'required-projects': to_list(vs.Any(job_project, str)),
'vars': dict,
- 'host_vars': {str: dict},
- 'group_vars': {str: dict},
+ 'host-vars': {str: dict},
+ 'group-vars': {str: dict},
'dependencies': to_list(str),
'allowed-projects': to_list(str),
'override-branch': str,
@@ -743,14 +748,14 @@
raise Exception("Variables named 'zuul' or 'nodepool' "
"are not allowed.")
job.variables = variables
- host_variables = conf.get('host_vars', None)
+ host_variables = conf.get('host-vars', None)
if host_variables:
for host, hvars in host_variables.items():
if 'zuul' in hvars or 'nodepool' in hvars:
raise Exception("Variables named 'zuul' or 'nodepool' "
"are not allowed.")
job.host_variables = host_variables
- group_variables = conf.get('group_vars', None)
+ group_variables = conf.get('group-vars', None)
if group_variables:
for group, gvars in group_variables.items():
if 'zuul' in group_variables or 'nodepool' in gvars:
@@ -813,6 +818,7 @@
def __init__(self, pcontext):
self.log = logging.getLogger("zuul.ProjectTemplateParser")
self.pcontext = pcontext
+ self.schema = self.getSchema()
def getSchema(self):
project_template = {
@@ -840,7 +846,7 @@
def fromYaml(self, conf, validate=True):
if validate:
with configuration_exceptions('project-template', conf):
- self.getSchema()(conf)
+ self.schema(conf)
source_context = conf['_source_context']
project_template = model.ProjectConfig(conf['name'], source_context)
start_mark = conf['_start_mark']
@@ -884,6 +890,7 @@
def __init__(self, pcontext):
self.log = logging.getLogger("zuul.ProjectParser")
self.pcontext = pcontext
+ self.schema = self.getSchema()
def getSchema(self):
project = {
@@ -912,7 +919,7 @@
def fromYaml(self, conf_list):
for conf in conf_list:
with configuration_exceptions('project', conf):
- self.getSchema()(conf)
+ self.schema(conf)
with configuration_exceptions('project', conf_list[0]):
project_name = conf_list[0]['name']
@@ -1001,6 +1008,7 @@
def __init__(self, pcontext):
self.log = logging.getLogger("zuul.PipelineParser")
self.pcontext = pcontext
+ self.schema = self.getSchema()
def getDriverSchema(self, dtype):
methods = {
@@ -1063,7 +1071,7 @@
def fromYaml(self, conf):
with configuration_exceptions('pipeline', conf):
- self.getSchema()(conf)
+ self.schema(conf)
pipeline = model.Pipeline(conf['name'], self.pcontext.layout)
pipeline.description = conf.get('description')
@@ -1185,6 +1193,12 @@
self.secret_parser = SecretParser(self)
self.job_parser = JobParser(self)
self.semaphore_parser = SemaphoreParser(self)
+ self.project_template_parser = None
+ self.project_parser = None
+
+ def setPipelines(self):
+ # Call after pipelines are fixed in the layout to construct
+ # the project parser, which relies on them.
self.project_template_parser = ProjectTemplateParser(self)
self.project_parser = ProjectParser(self)
@@ -1616,6 +1630,7 @@
continue
layout.addPipeline(pcontext.pipeline_parser.fromYaml(
config_pipeline))
+ pcontext.setPipelines()
for config_nodeset in data.nodesets:
classes = self._getLoadClasses(tenant, config_nodeset)
diff --git a/zuul/rpclistener.py b/zuul/rpclistener.py
index f3f55f6..7c777fd 100644
--- a/zuul/rpclistener.py
+++ b/zuul/rpclistener.py
@@ -235,6 +235,22 @@
event.ref = args['ref']
event.oldrev = args['oldrev']
event.newrev = args['newrev']
+ try:
+ int(event.oldrev, 16)
+ if len(event.oldrev) != 40:
+ errors += 'Old rev must be 40 character sha1: ' \
+ '%s\n' % event.oldrev
+ except Exception:
+ errors += 'Old rev must be base16 hash: ' \
+ '%s\n' % event.oldrev
+ try:
+ int(event.newrev, 16)
+ if len(event.newrev) != 40:
+ errors += 'New rev must be 40 character sha1: ' \
+ '%s\n' % event.newrev
+ except Exception:
+ errors += 'New rev must be base16 hash: ' \
+ '%s\n' % event.newrev
if errors:
job.sendWorkException(errors.encode('utf8'))
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 606cd04..c58bfc7 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -1153,10 +1153,15 @@
tenant = self.abide.tenants.get(tenant_name)
if not tenant:
if tenant_name not in self.unparsed_abide.known_tenants:
- return json.dumps({"message": "Unknown tenant"})
+ return json.dumps({
+ "message": "Unknown tenant",
+ "code": 404
+ })
self.log.warning("Tenant %s isn't loaded" % tenant_name)
- return json.dumps(
- {"message": "Tenant %s isn't ready" % tenant_name})
+ return json.dumps({
+ "message": "Tenant %s isn't ready" % tenant_name,
+ "code": 204
+ })
for pipeline in tenant.layout.pipelines.values():
pipelines.append(pipeline.formatStatusJSON(websocket_url))
return json.dumps(data)
diff --git a/zuul/web/__init__.py b/zuul/web/__init__.py
index 7a1af30..31eac7d 100755
--- a/zuul/web/__init__.py
+++ b/zuul/web/__init__.py
@@ -172,8 +172,8 @@
self.cache[tenant] = json.loads(job.data[0])
self.cache_time[tenant] = time.time()
payload = self.cache[tenant]
- if payload.get('message') == 'Unknown tenant':
- return web.HTTPNotFound()
+ if payload.get('code') == 404:
+ return web.HTTPNotFound(reason=payload['message'])
if result_filter:
payload = result_filter.filterPayload(payload)
resp = web.json_response(payload)