Merge "Remove link to modindex"
diff --git a/bindep.txt b/bindep.txt
index a2cc02e..32c750a 100644
--- a/bindep.txt
+++ b/bindep.txt
@@ -1 +1,6 @@
+# This is a cross-platform list tracking distribution packages needed by tests;
+# see http://docs.openstack.org/infra/bindep/ for additional information.
+
+mysql-client [test]
+mysql-server [test]
libjpeg-dev [test]
diff --git a/doc/source/connections.rst b/doc/source/connections.rst
index f0820a6..298100a 100644
--- a/doc/source/connections.rst
+++ b/doc/source/connections.rst
@@ -38,6 +38,9 @@
Path to SSH key to use when logging into above server.
``sshkey=/home/zuul/.ssh/id_rsa``
+**keepalive**
+ Optional: Keepalive timeout, 0 means no keepalive.
+ ``keepalive=60``
Gerrit Configuration
~~~~~~~~~~~~~~~~~~~~
@@ -77,3 +80,15 @@
Who the report should be emailed to by default.
This can be overridden by individual pipelines.
``default_to=you@example.com``
+
+SQL
+----
+
+ Only one connection per a database is permitted.
+
+ **driver=sql**
+
+ **dburi**
+ Database connection information in the form of a URI understood by
+ sqlalchemy. eg http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html#database-urls
+ ``dburi=mysql://user:pass@localhost/db``
diff --git a/doc/source/reporters.rst b/doc/source/reporters.rst
index 97bed4a..b01c8d1 100644
--- a/doc/source/reporters.rst
+++ b/doc/source/reporters.rst
@@ -34,7 +34,7 @@
A simple email reporter is also available.
A :ref:`connection` that uses the smtp driver must be supplied to the
-trigger.
+reporter.
SMTP Configuration
~~~~~~~~~~~~~~~~~~
@@ -60,3 +60,42 @@
to: you@example.com
from: alternative@example.com
subject: Change {change} failed
+
+SQL
+---
+
+This reporter is used to store results in a database.
+
+A :ref:`connection` that uses the sql driver must be supplied to the
+reporter.
+
+SQL Configuration
+~~~~~~~~~~~~~~~~~
+
+zuul.conf contains the database connection and credentials. To store different
+reports in different databases you'll need to create a new connection per
+database.
+
+The sql reporter is used to store the results from individual builds rather
+than the change. As such the sql reporter does nothing on "start" or
+"merge-failure".
+
+**score**
+ A score to store for the result of the build. eg: -1 might indicate a failed
+ build similar to the vote posted back via the gerrit reporter.
+
+For example ::
+
+ pipelines:
+ - name: post-merge
+ manager: IndependentPipelineManager
+ source: my_gerrit
+ trigger:
+ my_gerrit:
+ - event: change-merged
+ success:
+ mydb_conn:
+ score: 1
+ failure:
+ mydb_conn:
+ score: -1
diff --git a/etc/status/public_html/jquery.zuul.js b/etc/status/public_html/jquery.zuul.js
index 9df44ce..d973948 100644
--- a/etc/status/public_html/jquery.zuul.js
+++ b/etc/status/public_html/jquery.zuul.js
@@ -148,11 +148,9 @@
case 'skipped':
$status.addClass('label-info');
break;
- case 'in progress':
- case 'queued':
- case 'lost':
+ // 'in progress' 'queued' 'lost' 'aborted' ...
+ default:
$status.addClass('label-default');
- break;
}
$status.text(result);
return $status;
diff --git a/etc/zuul.conf-sample b/etc/zuul.conf-sample
index d7b8eae..9998a70 100644
--- a/etc/zuul.conf-sample
+++ b/etc/zuul.conf-sample
@@ -36,6 +36,7 @@
;baseurl=https://review.example.com/r
user=jenkins
sshkey=/home/jenkins/.ssh/id_rsa
+;keepalive=60
[connection smtp]
driver=smtp
@@ -43,3 +44,7 @@
port=25
default_from=zuul@example.com
default_to=you@example.com
+
+[connection mydatabase]
+driver=sql
+dburi=mysql+pymysql://user@localhost/zuul
diff --git a/requirements.txt b/requirements.txt
index 77ac0a5..963c899 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,14 +4,16 @@
Paste
WebOb>=1.2.3
paramiko>=1.8.0,<2.0.0
-GitPython>=0.3.3
+GitPython>=0.3.3,<2.1.2
ordereddict
python-daemon>=2.0.4,<2.1.0
extras
statsd>=1.0.0,<3.0
-voluptuous>=0.7
+voluptuous>=0.10.2
gear>=0.5.7,<1.0.0
apscheduler>=3.0
PrettyTable>=0.6,<0.8
babel>=1.0
six>=1.6.0
+sqlalchemy
+alembic
diff --git a/setup.cfg b/setup.cfg
index 7ddeb84..4967cd0 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -31,3 +31,7 @@
source-dir = doc/source
build-dir = doc/build
all_files = 1
+
+[extras]
+mysql_reporter=
+ PyMySQL
diff --git a/test-requirements.txt b/test-requirements.txt
index aed9998..e43b7a1 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,4 +1,4 @@
-hacking>=0.9.2,<0.10
+hacking>=0.12.0,!=0.13.0,<0.14 # Apache-2.0
coverage>=3.6
sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3
@@ -11,3 +11,4 @@
testtools>=0.9.32
sphinxcontrib-programoutput
mock
+PyMySQL
diff --git a/tests/base.py b/tests/base.py
index a14b4a9..9dc412b 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -34,16 +34,20 @@
import swiftclient
import threading
import time
+import uuid
+
import git
import gear
import fixtures
+import pymysql
import statsd
import testtools
from git import GitCommandError
import zuul.connection.gerrit
import zuul.connection.smtp
+import zuul.connection.sql
import zuul.scheduler
import zuul.webapp
import zuul.rpclistener
@@ -262,6 +266,25 @@
"comment": "This is a comment"}
return event
+ def getRefUpdatedEvent(self):
+ path = os.path.join(self.upstream_root, self.project)
+ repo = git.Repo(path)
+ oldrev = repo.heads[self.branch].commit.hexsha
+
+ event = {
+ "type": "ref-updated",
+ "submitter": {
+ "name": "User Name",
+ },
+ "refUpdate": {
+ "oldRev": oldrev,
+ "newRev": self.patchsets[-1]['revision'],
+ "refName": self.branch,
+ "project": self.project,
+ }
+ }
+ return event
+
def addApproval(self, category, value, username='reviewer_john',
granted_on=None, message=''):
if not granted_on:
@@ -836,6 +859,43 @@
return endpoint, ''
+class MySQLSchemaFixture(fixtures.Fixture):
+ def setUp(self):
+ super(MySQLSchemaFixture, self).setUp()
+
+ random_bits = ''.join(random.choice(string.ascii_lowercase +
+ string.ascii_uppercase)
+ for x in range(8))
+ self.name = '%s_%s' % (random_bits, os.getpid())
+ self.passwd = uuid.uuid4().hex
+ db = pymysql.connect(host="localhost",
+ user="openstack_citest",
+ passwd="openstack_citest",
+ db="openstack_citest")
+ cur = db.cursor()
+ cur.execute("create database %s" % self.name)
+ cur.execute(
+ "grant all on %s.* to '%s'@'localhost' identified by '%s'" %
+ (self.name, self.name, self.passwd))
+ cur.execute("flush privileges")
+
+ self.dburi = 'mysql+pymysql://%s:%s@localhost/%s' % (self.name,
+ self.passwd,
+ self.name)
+ self.addDetail('dburi', testtools.content.text_content(self.dburi))
+ self.addCleanup(self.cleanup)
+
+ def cleanup(self):
+ db = pymysql.connect(host="localhost",
+ user="openstack_citest",
+ passwd="openstack_citest",
+ db="openstack_citest")
+ cur = db.cursor()
+ cur.execute("drop database %s" % self.name)
+ cur.execute("drop user '%s'@'localhost'" % self.name)
+ cur.execute("flush privileges")
+
+
class BaseTestCase(testtools.TestCase):
log = logging.getLogger("zuul.test")
@@ -1020,6 +1080,8 @@
self.addCleanup(self.shutdown)
def configure_connections(self):
+ # TODO(jhesketh): This should come from lib.connections for better
+ # coverage
# Register connections from the config
self.smtp_messages = []
@@ -1068,6 +1130,9 @@
elif con_driver == 'smtp':
self.connections[con_name] = \
zuul.connection.smtp.SMTPConnection(con_name, con_config)
+ elif con_driver == 'sql':
+ self.connections[con_name] = \
+ zuul.connection.sql.SQLConnection(con_name, con_config)
else:
raise Exception("Unknown driver, %s, for connection %s"
% (con_config['driver'], con_name))
@@ -1410,3 +1475,20 @@
pprint.pprint(self.statsd.stats)
raise Exception("Key %s not found in reported stats" % key)
+
+
+class ZuulDBTestCase(ZuulTestCase):
+ def setup_config(self, config_file='zuul-connections-same-gerrit.conf'):
+ super(ZuulDBTestCase, self).setup_config(config_file)
+ for section_name in self.config.sections():
+ con_match = re.match(r'^connection ([\'\"]?)(.*)(\1)$',
+ section_name, re.I)
+ if not con_match:
+ continue
+
+ if self.config.get(section_name, 'driver') == 'sql':
+ f = MySQLSchemaFixture()
+ self.useFixture(f)
+ if (self.config.get(section_name, 'dburi') ==
+ '$MYSQL_FIXTURE_DBURI$'):
+ self.config.set(section_name, 'dburi', f.dburi)
diff --git a/tests/fixtures/layout-cloner.yaml b/tests/fixtures/layout-cloner.yaml
index e840ed9..e8b5dde 100644
--- a/tests/fixtures/layout-cloner.yaml
+++ b/tests/fixtures/layout-cloner.yaml
@@ -1,4 +1,16 @@
pipelines:
+ - name: check
+ manager: IndependentPipelineManager
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
- name: gate
manager: DependentPipelineManager
failure-message: Build failed. For information on how to proceed, see http://wiki.example.org/Test_Failures
@@ -18,28 +30,54 @@
gerrit:
verified: -2
+ - name: post
+ manager: IndependentPipelineManager
+ trigger:
+ gerrit:
+ - event: ref-updated
+ ref: ^(?!refs/).*$
+
projects:
+ - name: org/project
+ check:
+ - integration
+ gate:
+ - integration
- name: org/project1
+ check:
+ - integration
gate:
- - integration
+ - integration
+ post:
+ - postjob
- name: org/project2
+ check:
+ - integration
gate:
- - integration
+ - integration
- name: org/project3
+ check:
+ - integration
gate:
- - integration
+ - integration
- name: org/project4
+ check:
+ - integration
gate:
- - integration
+ - integration
- name: org/project5
+ check:
+ - integration
gate:
- - integration
+ - integration
- name: org/project6
+ check:
+ - integration
gate:
- - integration
+ - integration
diff --git a/tests/fixtures/layout-mutex-reconfiguration.yaml b/tests/fixtures/layout-mutex-reconfiguration.yaml
new file mode 100644
index 0000000..76cf1e9
--- /dev/null
+++ b/tests/fixtures/layout-mutex-reconfiguration.yaml
@@ -0,0 +1,23 @@
+pipelines:
+ - name: check
+ manager: IndependentPipelineManager
+ trigger:
+ gerrit:
+ - event: patchset-created
+ success:
+ gerrit:
+ verified: 1
+ failure:
+ gerrit:
+ verified: -1
+
+jobs:
+ - name: mutex-one
+ mutex: test-mutex
+ - name: mutex-two
+ mutex: test-mutex
+
+projects:
+ - name: org/project
+ check:
+ - project-test1
diff --git a/tests/fixtures/layout-sql-reporter.yaml b/tests/fixtures/layout-sql-reporter.yaml
new file mode 100644
index 0000000..c79a432
--- /dev/null
+++ b/tests/fixtures/layout-sql-reporter.yaml
@@ -0,0 +1,27 @@
+pipelines:
+ - name: check
+ manager: IndependentPipelineManager
+ source:
+ review_gerrit
+ trigger:
+ review_gerrit:
+ - event: patchset-created
+ success:
+ review_gerrit:
+ verified: 1
+ resultsdb:
+ score: 1
+ failure:
+ review_gerrit:
+ verified: -1
+ resultsdb:
+ score: -1
+ resultsdb_failures:
+ score: -1
+
+projects:
+ - name: org/project
+ check:
+ - project-merge:
+ - project-test1
+ - project-test2
diff --git a/tests/fixtures/zuul-connections-bad-sql.conf b/tests/fixtures/zuul-connections-bad-sql.conf
new file mode 100644
index 0000000..150643d
--- /dev/null
+++ b/tests/fixtures/zuul-connections-bad-sql.conf
@@ -0,0 +1,50 @@
+[gearman]
+server=127.0.0.1
+
+[zuul]
+layout_config=layout-connections-multiple-voters.yaml
+url_pattern=http://logs.example.com/{change.number}/{change.patchset}/{pipeline.name}/{job.name}/{build.number}
+job_name_in_report=true
+
+[merger]
+git_dir=/tmp/zuul-test/git
+git_user_email=zuul@example.com
+git_user_name=zuul
+zuul_url=http://zuul.example.com/p
+
+[swift]
+authurl=https://identity.api.example.org/v2.0/
+user=username
+key=password
+tenant_name=" "
+
+default_container=logs
+region_name=EXP
+logserver_prefix=http://logs.example.org/server.app/
+
+[connection review_gerrit]
+driver=gerrit
+server=review.example.com
+user=jenkins
+sshkey=none
+
+[connection alt_voting_gerrit]
+driver=gerrit
+server=alt_review.example.com
+user=civoter
+sshkey=none
+
+[connection outgoing_smtp]
+driver=smtp
+server=localhost
+port=25
+default_from=zuul@example.com
+default_to=you@example.com
+
+[connection resultsdb]
+driver=sql
+dburi=mysql+pymysql://bad:creds@host/db
+
+[connection resultsdb_failures]
+driver=sql
+dburi=mysql+pymysql://bad:creds@host/db
diff --git a/tests/fixtures/zuul-connections-same-gerrit.conf b/tests/fixtures/zuul-connections-same-gerrit.conf
index af31c8a..2609d30 100644
--- a/tests/fixtures/zuul-connections-same-gerrit.conf
+++ b/tests/fixtures/zuul-connections-same-gerrit.conf
@@ -26,13 +26,13 @@
driver=gerrit
server=review.example.com
user=jenkins
-sshkey=none
+sshkey=fake_id_rsa1
[connection alt_voting_gerrit]
driver=gerrit
server=review.example.com
user=civoter
-sshkey=none
+sshkey=fake_id_rsa2
[connection outgoing_smtp]
driver=smtp
@@ -40,3 +40,11 @@
port=25
default_from=zuul@example.com
default_to=you@example.com
+
+[connection resultsdb]
+driver=sql
+dburi=$MYSQL_FIXTURE_DBURI$
+
+[connection resultsdb_failures]
+driver=sql
+dburi=$MYSQL_FIXTURE_DBURI$
diff --git a/tests/fixtures/zuul.conf b/tests/fixtures/zuul.conf
index b250c6d..0956cc4 100644
--- a/tests/fixtures/zuul.conf
+++ b/tests/fixtures/zuul.conf
@@ -26,7 +26,7 @@
driver=gerrit
server=review.example.com
user=jenkins
-sshkey=none
+sshkey=fake_id_rsa_path
[connection smtp]
driver=smtp
diff --git a/tests/test_cloner.py b/tests/test_cloner.py
index e3576bd..896fcba 100644
--- a/tests/test_cloner.py
+++ b/tests/test_cloner.py
@@ -91,6 +91,7 @@
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
+ zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.git_root,
@@ -107,11 +108,34 @@
'be correct' % (project, number))
work = self.getWorkspaceRepos(projects)
- upstream_repo_path = os.path.join(self.upstream_root, 'org/project1')
- self.assertEquals(
+ # project1 is the zuul_project so the origin should be set to the
+ # zuul_url since that is the most up to date.
+ cache_repo_path = os.path.join(cache_root, 'org/project1')
+ self.assertNotEqual(
work['org/project1'].remotes.origin.url,
+ cache_repo_path,
+ 'workspace repo origin should not be the cache'
+ )
+ zuul_url_repo_path = os.path.join(self.git_root, 'org/project1')
+ self.assertEqual(
+ work['org/project1'].remotes.origin.url,
+ zuul_url_repo_path,
+ 'workspace repo origin should be the zuul url'
+ )
+
+ # project2 is not the zuul_project so the origin should be set
+ # to upstream since that is the best we can do
+ cache_repo_path = os.path.join(cache_root, 'org/project2')
+ self.assertNotEqual(
+ work['org/project2'].remotes.origin.url,
+ cache_repo_path,
+ 'workspace repo origin should not be the cache'
+ )
+ upstream_repo_path = os.path.join(self.upstream_root, 'org/project2')
+ self.assertEqual(
+ work['org/project2'].remotes.origin.url,
upstream_repo_path,
- 'workspace repo origin should be upstream, not cache'
+ 'workspace repo origin should be the upstream url'
)
self.worker.hold_jobs_in_build = False
@@ -149,6 +173,7 @@
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
+ zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.git_root,
@@ -219,6 +244,7 @@
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
+ zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.git_root,
@@ -333,6 +359,7 @@
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
+ zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.git_root,
@@ -395,6 +422,7 @@
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
+ zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.git_root,
@@ -481,6 +509,7 @@
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
+ zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.git_root,
@@ -546,6 +575,7 @@
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
+ zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
zuul_ref=build.parameters.get('ZUUL_REF', None),
zuul_url=self.git_root,
@@ -567,56 +597,158 @@
self.worker.release()
self.waitUntilSettled()
+ def test_periodic_update(self):
+ # Test that the merger correctly updates its local repository
+ # before running a periodic job.
+
+ # Prime the merger with the current state
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ # Merge a different change
+ B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
+ B.setMerged()
+
+ # Start a periodic job
+ self.worker.hold_jobs_in_build = True
+ self.launcher.negative_function_cache_ttl = 0
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-timer.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+
+ # The pipeline triggers every second, so we should have seen
+ # several by now.
+ time.sleep(5)
+ self.waitUntilSettled()
+
+ builds = self.builds[:]
+
+ self.worker.hold_jobs_in_build = False
+ # Stop queuing timer triggered jobs so that the assertions
+ # below don't race against more jobs being queued.
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-no-timer.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+ self.worker.release()
+ self.waitUntilSettled()
+
+ projects = ['org/project']
+
+ self.assertEquals(2, len(builds), "Two builds are running")
+
+ upstream = self.getUpstreamRepos(projects)
+ self.assertEqual(upstream['org/project'].commit('master').hexsha,
+ B.patchsets[0]['revision'])
+ states = [
+ {'org/project':
+ str(upstream['org/project'].commit('master')),
+ },
+ {'org/project':
+ str(upstream['org/project'].commit('master')),
+ },
+ ]
+
+ for number, build in enumerate(builds):
+ self.log.debug("Build parameters: %s", build.parameters)
+ cloner = zuul.lib.cloner.Cloner(
+ git_base_url=self.upstream_root,
+ projects=projects,
+ workspace=self.workspace_root,
+ zuul_project=build.parameters.get('ZUUL_PROJECT', None),
+ zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
+ zuul_ref=build.parameters.get('ZUUL_REF', None),
+ zuul_url=self.git_root,
+ )
+ cloner.execute()
+ work = self.getWorkspaceRepos(projects)
+ state = states[number]
+
+ for project in projects:
+ self.assertEquals(state[project],
+ str(work[project].commit('HEAD')),
+ 'Project %s commit for build %s should '
+ 'be correct' % (project, number))
+
+ shutil.rmtree(self.workspace_root)
+
+ self.worker.hold_jobs_in_build = False
+ self.worker.release()
+ self.waitUntilSettled()
+
def test_post_checkout(self):
- project = "org/project"
- path = os.path.join(self.upstream_root, project)
- repo = git.Repo(path)
- repo.head.reference = repo.heads['master']
- commits = []
- for i in range(0, 3):
- commits.append(self.create_commit(project))
- newRev = commits[1]
+ self.worker.hold_jobs_in_build = True
+ project = "org/project1"
+
+ A = self.fake_gerrit.addFakeChange(project, 'master', 'A')
+ event = A.getRefUpdatedEvent()
+ A.setMerged()
+ self.fake_gerrit.addEvent(event)
+ self.waitUntilSettled()
+
+ build = self.builds[0]
+ state = {'org/project1': build.parameters['ZUUL_COMMIT']}
+
+ build.release()
+ self.waitUntilSettled()
cloner = zuul.lib.cloner.Cloner(
git_base_url=self.upstream_root,
projects=[project],
workspace=self.workspace_root,
- zuul_branch=None,
- zuul_ref='master',
+ zuul_project=build.parameters.get('ZUUL_PROJECT', None),
+ zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
+ zuul_ref=build.parameters.get('ZUUL_REF', None),
+ zuul_newrev=build.parameters.get('ZUUL_NEWREV', None),
zuul_url=self.git_root,
- zuul_project=project,
- zuul_newrev=newRev,
)
cloner.execute()
- repos = self.getWorkspaceRepos([project])
- cloned_sha = repos[project].rev_parse('HEAD').hexsha
- self.assertEqual(newRev, cloned_sha)
+ work = self.getWorkspaceRepos([project])
+ self.assertEquals(state[project],
+ str(work[project].commit('HEAD')),
+ 'Project %s commit for build %s should '
+ 'be correct' % (project, 0))
+ shutil.rmtree(self.workspace_root)
def test_post_and_master_checkout(self):
- project = "org/project1"
- master_project = "org/project2"
- path = os.path.join(self.upstream_root, project)
- repo = git.Repo(path)
- repo.head.reference = repo.heads['master']
- commits = []
- for i in range(0, 3):
- commits.append(self.create_commit(project))
- newRev = commits[1]
+ self.worker.hold_jobs_in_build = True
+ projects = ["org/project1", "org/project2"]
+
+ A = self.fake_gerrit.addFakeChange(projects[0], 'master', 'A')
+ event = A.getRefUpdatedEvent()
+ A.setMerged()
+ self.fake_gerrit.addEvent(event)
+ self.waitUntilSettled()
+
+ build = self.builds[0]
+ upstream = self.getUpstreamRepos(projects)
+ state = {'org/project1':
+ build.parameters['ZUUL_COMMIT'],
+ 'org/project2':
+ str(upstream['org/project2'].commit('master')),
+ }
+
+ build.release()
+ self.waitUntilSettled()
cloner = zuul.lib.cloner.Cloner(
git_base_url=self.upstream_root,
- projects=[project, master_project],
+ projects=projects,
workspace=self.workspace_root,
- zuul_branch=None,
- zuul_ref='master',
+ zuul_project=build.parameters.get('ZUUL_PROJECT', None),
+ zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
+ zuul_ref=build.parameters.get('ZUUL_REF', None),
+ zuul_newrev=build.parameters.get('ZUUL_NEWREV', None),
zuul_url=self.git_root,
- zuul_project=project,
- zuul_newrev=newRev
)
cloner.execute()
- repos = self.getWorkspaceRepos([project, master_project])
- cloned_sha = repos[project].rev_parse('HEAD').hexsha
- self.assertEqual(newRev, cloned_sha)
- self.assertEqual(
- repos[master_project].rev_parse('HEAD').hexsha,
- repos[master_project].rev_parse('master').hexsha)
+ work = self.getWorkspaceRepos(projects)
+
+ for project in projects:
+ self.assertEquals(state[project],
+ str(work[project].commit('HEAD')),
+ 'Project %s commit for build %s should '
+ 'be correct' % (project, 0))
+ shutil.rmtree(self.workspace_root)
diff --git a/tests/test_connection.py b/tests/test_connection.py
index c3458ac..f9f54f3 100644
--- a/tests/test_connection.py
+++ b/tests/test_connection.py
@@ -15,9 +15,21 @@
import logging
import testtools
-import zuul.connection.gerrit
+import sqlalchemy as sa
-from tests.base import ZuulTestCase
+import zuul.connection.gerrit
+import zuul.connection.sql
+
+from tests.base import ZuulTestCase, ZuulDBTestCase
+
+
+def _get_reporter_from_connection_name(reporters, connection_name):
+ # Reporters are placed into lists for each action they may exist in.
+ # Search through the given list for the correct reporter by its conncetion
+ # name
+ for r in reporters:
+ if r.connection.connection_name == connection_name:
+ return r
class TestGerritConnection(testtools.TestCase):
@@ -28,11 +40,18 @@
zuul.connection.gerrit.GerritConnection.driver_name)
-class TestConnections(ZuulTestCase):
- def setup_config(self, config_file='zuul-connections-same-gerrit.conf'):
- super(TestConnections, self).setup_config(config_file)
+class TestSQLConnection(testtools.TestCase):
+ log = logging.getLogger("zuul.test_connection")
- def test_multiple_connections(self):
+ def test_driver_name(self):
+ self.assertEqual(
+ 'sql',
+ zuul.connection.sql.SQLConnection.driver_name
+ )
+
+
+class TestConnections(ZuulDBTestCase):
+ def test_multiple_gerrit_connections(self):
"Test multiple connections to the one gerrit"
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
@@ -58,6 +77,178 @@
self.assertEqual(B.patchsets[-1]['approvals'][0]['by']['username'],
'civoter')
+ def _test_sql_tables_created(self, metadata_table=None):
+ "Test the tables for storing results are created properly"
+ buildset_table = 'zuul_buildset'
+ build_table = 'zuul_build'
+
+ insp = sa.engine.reflection.Inspector(
+ self.connections['resultsdb'].engine)
+
+ self.assertEqual(9, len(insp.get_columns(buildset_table)))
+ self.assertEqual(10, len(insp.get_columns(build_table)))
+
+ def test_sql_tables_created(self):
+ "Test the default table is created"
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-sql-reporter.yaml')
+ self.sched.reconfigure(self.config)
+ self._test_sql_tables_created()
+
+ def _test_sql_results(self):
+ "Test results are entered into an sql table"
+ # Grab the sa tables
+ reporter = _get_reporter_from_connection_name(
+ self.sched.layout.pipelines['check'].success_actions,
+ 'resultsdb'
+ )
+
+ # Add a success result
+ A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_review_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ # Add a failed result for a negative score
+ B = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'B')
+ self.worker.addFailTest('project-test1', B)
+ self.fake_review_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ conn = self.connections['resultsdb'].engine.connect()
+ result = conn.execute(
+ sa.sql.select([reporter.connection.zuul_buildset_table]))
+
+ buildsets = result.fetchall()
+ self.assertEqual(2, len(buildsets))
+ buildset0 = buildsets[0]
+ buildset1 = buildsets[1]
+
+ self.assertEqual('check', buildset0['pipeline'])
+ self.assertEqual('org/project', buildset0['project'])
+ self.assertEqual(1, buildset0['change'])
+ self.assertEqual(1, buildset0['patchset'])
+ self.assertEqual(1, buildset0['score'])
+ self.assertEqual('Build succeeded.', buildset0['message'])
+
+ buildset0_builds = conn.execute(
+ sa.sql.select([reporter.connection.zuul_build_table]).
+ where(
+ reporter.connection.zuul_build_table.c.buildset_id ==
+ buildset0['id']
+ )
+ ).fetchall()
+
+ # Check the first result, which should be the project-merge job
+ self.assertEqual('project-merge', buildset0_builds[0]['job_name'])
+ self.assertEqual("SUCCESS", buildset0_builds[0]['result'])
+ self.assertEqual('http://logs.example.com/1/1/check/project-merge/0',
+ buildset0_builds[0]['log_url'])
+
+ self.assertEqual('check', buildset1['pipeline'])
+ self.assertEqual('org/project', buildset1['project'])
+ self.assertEqual(2, buildset1['change'])
+ self.assertEqual(1, buildset1['patchset'])
+ self.assertEqual(-1, buildset1['score'])
+ self.assertEqual('Build failed.', buildset1['message'])
+
+ buildset1_builds = conn.execute(
+ sa.sql.select([reporter.connection.zuul_build_table]).
+ where(
+ reporter.connection.zuul_build_table.c.buildset_id ==
+ buildset1['id']
+ )
+ ).fetchall()
+
+ # Check the second last result, which should be the project-test1 job
+ # which failed
+ self.assertEqual('project-test1', buildset1_builds[-2]['job_name'])
+ self.assertEqual("FAILURE", buildset1_builds[-2]['result'])
+ self.assertEqual('http://logs.example.com/2/1/check/project-test1/4',
+ buildset1_builds[-2]['log_url'])
+
+ def test_sql_results(self):
+ "Test results are entered into the default sql table"
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-sql-reporter.yaml')
+ self.sched.reconfigure(self.config)
+ self._test_sql_results()
+
+ def test_multiple_sql_connections(self):
+ "Test putting results in different databases"
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-sql-reporter.yaml')
+ self.sched.reconfigure(self.config)
+
+ # Add a successful result
+ A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_review_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ # Add a failed result
+ B = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'B')
+ self.worker.addFailTest('project-test1', B)
+ self.fake_review_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ # Grab the sa tables for resultsdb
+ reporter1 = _get_reporter_from_connection_name(
+ self.sched.layout.pipelines['check'].success_actions,
+ 'resultsdb'
+ )
+
+ conn = self.connections['resultsdb'].engine.connect()
+ buildsets_resultsdb = conn.execute(sa.sql.select(
+ [reporter1.connection.zuul_buildset_table])).fetchall()
+ # Should have been 2 buildset reported to the resultsdb (both success
+ # and failure report)
+ self.assertEqual(2, len(buildsets_resultsdb))
+
+ # The first one should have passed
+ self.assertEqual('check', buildsets_resultsdb[0]['pipeline'])
+ self.assertEqual('org/project', buildsets_resultsdb[0]['project'])
+ self.assertEqual(1, buildsets_resultsdb[0]['change'])
+ self.assertEqual(1, buildsets_resultsdb[0]['patchset'])
+ self.assertEqual(1, buildsets_resultsdb[0]['score'])
+ self.assertEqual('Build succeeded.', buildsets_resultsdb[0]['message'])
+
+ # Grab the sa tables for resultsdb_failures
+ reporter2 = _get_reporter_from_connection_name(
+ self.sched.layout.pipelines['check'].failure_actions,
+ 'resultsdb_failures'
+ )
+
+ conn = self.connections['resultsdb_failures'].engine.connect()
+ buildsets_resultsdb_failures = conn.execute(sa.sql.select(
+ [reporter2.connection.zuul_buildset_table])).fetchall()
+ # The failure db should only have 1 buildset failed
+ self.assertEqual(1, len(buildsets_resultsdb_failures))
+
+ self.assertEqual('check', buildsets_resultsdb_failures[0]['pipeline'])
+ self.assertEqual(
+ 'org/project', buildsets_resultsdb_failures[0]['project'])
+ self.assertEqual(2, buildsets_resultsdb_failures[0]['change'])
+ self.assertEqual(1, buildsets_resultsdb_failures[0]['patchset'])
+ self.assertEqual(-1, buildsets_resultsdb_failures[0]['score'])
+ self.assertEqual(
+ 'Build failed.', buildsets_resultsdb_failures[0]['message'])
+
+
+class TestConnectionsBadSQL(ZuulDBTestCase):
+ def setup_config(self, config_file='zuul-connections-bad-sql.conf'):
+ super(TestConnectionsBadSQL, self).setup_config(config_file)
+
+ def test_unable_to_connect(self):
+ "Test the SQL reporter fails gracefully when unable to connect"
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-sql-reporter.yaml')
+ self.sched.reconfigure(self.config)
+
+ # Trigger a reporter. If no errors are raised, the reporter has been
+ # disabled correctly
+ A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.fake_review_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
class TestMultipleGerrits(ZuulTestCase):
def setup_config(self,
diff --git a/tests/test_reporter.py b/tests/test_reporter.py
index 8d3090a..6a179d2 100644
--- a/tests/test_reporter.py
+++ b/tests/test_reporter.py
@@ -12,18 +12,18 @@
# License for the specific language governing permissions and limitations
# under the License.
+import fixtures
import logging
import testtools
-import zuul.reporter
+import zuul.reporter.gerrit
+import zuul.reporter.smtp
+import zuul.reporter.sql
class TestSMTPReporter(testtools.TestCase):
log = logging.getLogger("zuul.test_reporter")
- def setUp(self):
- super(TestSMTPReporter, self).setUp()
-
def test_reporter_abc(self):
# We only need to instantiate a class for this
reporter = zuul.reporter.smtp.SMTPReporter({}) # noqa
@@ -35,12 +35,30 @@
class TestGerritReporter(testtools.TestCase):
log = logging.getLogger("zuul.test_reporter")
- def setUp(self):
- super(TestGerritReporter, self).setUp()
-
def test_reporter_abc(self):
# We only need to instantiate a class for this
reporter = zuul.reporter.gerrit.GerritReporter(None) # noqa
def test_reporter_name(self):
self.assertEqual('gerrit', zuul.reporter.gerrit.GerritReporter.name)
+
+
+class TestSQLReporter(testtools.TestCase):
+ log = logging.getLogger("zuul.test_reporter")
+
+ def test_reporter_abc(self):
+ # We only need to instantiate a class for this
+ # First mock out _setup_tables
+ def _fake_setup_tables(self):
+ pass
+
+ self.useFixture(fixtures.MonkeyPatch(
+ 'zuul.reporter.sql.SQLReporter._setup_tables',
+ _fake_setup_tables
+ ))
+
+ reporter = zuul.reporter.sql.SQLReporter() # noqa
+
+ def test_reporter_name(self):
+ self.assertEqual(
+ 'sql', zuul.reporter.sql.SQLReporter.name)
diff --git a/tests/test_requirements.py b/tests/test_requirements.py
index 3ae56ad..81814bf 100644
--- a/tests/test_requirements.py
+++ b/tests/test_requirements.py
@@ -245,7 +245,7 @@
self.assertEqual(len(self.history), 1)
self.assertEqual(self.history[0].name, job)
- # A +2 should allow it to be enqueued
+ # A +2 from nobody should not cause it to be enqueued
B = self.fake_gerrit.addFakeChange(project, 'master', 'B')
# A comment event that we will keep submitting to trigger
comment = B.addApproval('CRVW', 2, username='nobody')
@@ -253,6 +253,7 @@
self.waitUntilSettled()
self.assertEqual(len(self.history), 1)
+ # A +2 from jenkins should allow it to be enqueued
B.addApproval('VRFY', 2, username='jenkins')
self.fake_gerrit.addEvent(comment)
self.waitUntilSettled()
diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py
index b6fa4a3..6a16b37 100755
--- a/tests/test_scheduler.py
+++ b/tests/test_scheduler.py
@@ -2394,6 +2394,63 @@
self.assertEqual(B.reported, 1)
self.assertFalse('test-mutex' in self.sched.mutex.mutexes)
+ def test_mutex_abandon(self):
+ "Test abandon with job mutexes"
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-mutex.yaml')
+ self.sched.reconfigure(self.config)
+
+ self.worker.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.assertFalse('test-mutex' in self.sched.mutex.mutexes)
+
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.assertTrue('test-mutex' in self.sched.mutex.mutexes)
+
+ self.fake_gerrit.addEvent(A.getChangeAbandonedEvent())
+ self.waitUntilSettled()
+
+ # The check pipeline should be empty
+ items = self.sched.layout.pipelines['check'].getAllItems()
+ self.assertEqual(len(items), 0)
+
+ # The mutex should be released
+ self.assertFalse('test-mutex' in self.sched.mutex.mutexes)
+
+ def test_mutex_reconfigure(self):
+ "Test reconfigure with job mutexes"
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-mutex.yaml')
+ self.sched.reconfigure(self.config)
+
+ self.worker.hold_jobs_in_build = True
+
+ A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+ self.assertFalse('test-mutex' in self.sched.mutex.mutexes)
+
+ self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+ self.waitUntilSettled()
+
+ self.assertTrue('test-mutex' in self.sched.mutex.mutexes)
+
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-mutex-reconfiguration.yaml')
+ self.sched.reconfigure(self.config)
+ self.waitUntilSettled()
+
+ self.worker.release('project-test1')
+ self.waitUntilSettled()
+
+ # The check pipeline should be empty
+ items = self.sched.layout.pipelines['check'].getAllItems()
+ self.assertEqual(len(items), 0)
+
+ # The mutex should be released
+ self.assertFalse('test-mutex' in self.sched.mutex.mutexes)
+
def test_node_label(self):
"Test that a job runs on a specific node label"
self.worker.registerFunction('build:node-project-test1:debian')
@@ -3020,6 +3077,49 @@
self.worker.release('.*')
self.waitUntilSettled()
+ def test_timer_sshkey(self):
+ "Test that a periodic job can setup SSH key authentication"
+ self.worker.hold_jobs_in_build = True
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-timer.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+
+ # The pipeline triggers every second, so we should have seen
+ # several by now.
+ time.sleep(5)
+ self.waitUntilSettled()
+
+ self.assertEqual(len(self.builds), 2)
+
+ ssh_wrapper = os.path.join(self.git_root, ".ssh_wrapper_gerrit")
+ self.assertTrue(os.path.isfile(ssh_wrapper))
+ with open(ssh_wrapper) as f:
+ ssh_wrapper_content = f.read()
+ self.assertIn("fake_id_rsa", ssh_wrapper_content)
+ # In the unit tests Merger runs in the same process,
+ # so we see its' environment variables
+ self.assertEqual(os.environ['GIT_SSH'], ssh_wrapper)
+
+ self.worker.release('.*')
+ self.waitUntilSettled()
+ self.assertEqual(len(self.history), 2)
+
+ self.assertEqual(self.getJobFromHistory(
+ 'project-bitrot-stable-old').result, 'SUCCESS')
+ self.assertEqual(self.getJobFromHistory(
+ 'project-bitrot-stable-older').result, 'SUCCESS')
+
+ # Stop queuing timer triggered jobs and let any that may have
+ # queued through so that end of test assertions pass.
+ self.config.set('zuul', 'layout_config',
+ 'tests/fixtures/layout-no-timer.yaml')
+ self.sched.reconfigure(self.config)
+ self.registerJobs()
+ self.waitUntilSettled()
+ self.worker.release('.*')
+ self.waitUntilSettled()
+
def test_client_enqueue_change(self):
"Test that the RPC client can enqueue a change"
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
@@ -3803,19 +3903,23 @@
"Test cross-repo dependencies in multiple branches"
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
- C = self.fake_gerrit.addFakeChange('org/project2', 'mp', 'C')
- C.data['id'] = B.data['id']
+ C1 = self.fake_gerrit.addFakeChange('org/project2', 'mp', 'C1')
+ C2 = self.fake_gerrit.addFakeChange('org/project2', 'mp', 'C2',
+ status="ABANDONED")
+ C1.data['id'] = B.data['id']
+ C2.data['id'] = B.data['id']
+
A.addApproval('CRVW', 2)
B.addApproval('CRVW', 2)
- C.addApproval('CRVW', 2)
+ C1.addApproval('CRVW', 2)
- # A Depends-On: B+C
+ # A Depends-On: B+C1
A.data['commitMessage'] = '%s\n\nDepends-On: %s\n' % (
A.subject, B.data['id'])
self.worker.hold_jobs_in_build = True
B.addApproval('APRV', 1)
- C.addApproval('APRV', 1)
+ C1.addApproval('APRV', 1)
self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
self.waitUntilSettled()
@@ -3831,10 +3935,10 @@
self.assertEqual(A.data['status'], 'MERGED')
self.assertEqual(B.data['status'], 'MERGED')
- self.assertEqual(C.data['status'], 'MERGED')
+ self.assertEqual(C1.data['status'], 'MERGED')
self.assertEqual(A.reported, 2)
self.assertEqual(B.reported, 2)
- self.assertEqual(C.reported, 2)
+ self.assertEqual(C1.reported, 2)
self.assertEqual(self.getJobFromHistory('project1-merge').changes,
'2,1 3,1 1,1')
diff --git a/tools/test-setup.sh b/tools/test-setup.sh
new file mode 100755
index 0000000..f4a0458
--- /dev/null
+++ b/tools/test-setup.sh
@@ -0,0 +1,33 @@
+#!/bin/bash -xe
+
+# This script will be run by OpenStack CI before unit tests are run,
+# it sets up the test system as needed.
+# Developers should setup their test systems in a similar way.
+
+# This setup needs to be run as a user that can run sudo.
+
+# The root password for the MySQL database; pass it in via
+# MYSQL_ROOT_PW.
+DB_ROOT_PW=${MYSQL_ROOT_PW:-insecure_slave}
+
+# This user and its password are used by the tests, if you change it,
+# your tests might fail.
+DB_USER=openstack_citest
+DB_PW=openstack_citest
+
+sudo -H mysqladmin -u root password $DB_ROOT_PW
+
+# It's best practice to remove anonymous users from the database. If
+# a anonymous user exists, then it matches first for connections and
+# other connections from that host will not work.
+sudo -H mysql -u root -p$DB_ROOT_PW -h localhost -e "
+ DELETE FROM mysql.user WHERE User='';
+ FLUSH PRIVILEGES;
+ GRANT ALL PRIVILEGES ON *.*
+ TO '$DB_USER'@'%' identified by '$DB_PW' WITH GRANT OPTION;"
+
+# Now create our database.
+mysql -u $DB_USER -p$DB_PW -h 127.0.0.1 -e "
+ SET default_storage_engine=MYISAM;
+ DROP DATABASE IF EXISTS openstack_citest;
+ CREATE DATABASE openstack_citest CHARACTER SET utf8;"
diff --git a/zuul/alembic/sql_reporter/README b/zuul/alembic/sql_reporter/README
new file mode 100644
index 0000000..98e4f9c
--- /dev/null
+++ b/zuul/alembic/sql_reporter/README
@@ -0,0 +1 @@
+Generic single-database configuration.
\ No newline at end of file
diff --git a/zuul/alembic/sql_reporter/env.py b/zuul/alembic/sql_reporter/env.py
new file mode 100644
index 0000000..56a5b7e
--- /dev/null
+++ b/zuul/alembic/sql_reporter/env.py
@@ -0,0 +1,70 @@
+from __future__ import with_statement
+from alembic import context
+from sqlalchemy import engine_from_config, pool
+# from logging.config import fileConfig
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+# fileConfig(config.config_file_name)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+target_metadata = None
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline():
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url, target_metadata=target_metadata, literal_binds=True)
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ connectable = engine_from_config(
+ config.get_section(config.config_ini_section),
+ prefix='sqlalchemy.',
+ poolclass=pool.NullPool)
+
+ with connectable.connect() as connection:
+ context.configure(
+ connection=connection,
+ target_metadata=target_metadata
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/zuul/alembic/sql_reporter/script.py.mako b/zuul/alembic/sql_reporter/script.py.mako
new file mode 100644
index 0000000..43c0940
--- /dev/null
+++ b/zuul/alembic/sql_reporter/script.py.mako
@@ -0,0 +1,24 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+def upgrade():
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+ ${downgrades if downgrades else "pass"}
diff --git a/zuul/alembic/sql_reporter/versions/4d3ebd7f06b9_set_up_initial_reporter_tables.py b/zuul/alembic/sql_reporter/versions/4d3ebd7f06b9_set_up_initial_reporter_tables.py
new file mode 100644
index 0000000..783196f
--- /dev/null
+++ b/zuul/alembic/sql_reporter/versions/4d3ebd7f06b9_set_up_initial_reporter_tables.py
@@ -0,0 +1,53 @@
+"""Set up initial reporter tables
+
+Revision ID: 4d3ebd7f06b9
+Revises:
+Create Date: 2015-12-06 15:27:38.080020
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '4d3ebd7f06b9'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+from alembic import op
+import sqlalchemy as sa
+
+BUILDSET_TABLE = 'zuul_buildset'
+BUILD_TABLE = 'zuul_build'
+
+
+def upgrade():
+ op.create_table(
+ BUILDSET_TABLE,
+ sa.Column('id', sa.Integer, primary_key=True),
+ sa.Column('zuul_ref', sa.String(255)),
+ sa.Column('pipeline', sa.String(255)),
+ sa.Column('project', sa.String(255)),
+ sa.Column('change', sa.Integer, nullable=True),
+ sa.Column('patchset', sa.Integer, nullable=True),
+ sa.Column('ref', sa.String(255)),
+ sa.Column('score', sa.Integer),
+ sa.Column('message', sa.TEXT()),
+ )
+
+ op.create_table(
+ BUILD_TABLE,
+ sa.Column('id', sa.Integer, primary_key=True),
+ sa.Column('buildset_id', sa.Integer,
+ sa.ForeignKey(BUILDSET_TABLE + ".id")),
+ sa.Column('uuid', sa.String(36)),
+ sa.Column('job_name', sa.String(255)),
+ sa.Column('result', sa.String(255)),
+ sa.Column('start_time', sa.DateTime()),
+ sa.Column('end_time', sa.DateTime()),
+ sa.Column('voting', sa.Boolean),
+ sa.Column('log_url', sa.String(255)),
+ sa.Column('node_name', sa.String(255)),
+ )
+
+
+def downgrade():
+ raise Exception("Downgrades not supported")
diff --git a/zuul/alembic_reporter.ini b/zuul/alembic_reporter.ini
new file mode 100644
index 0000000..b7f787c
--- /dev/null
+++ b/zuul/alembic_reporter.ini
@@ -0,0 +1,69 @@
+# A generic, single database configuration.
+
+[alembic]
+# path to migration scripts
+# NOTE(jhesketh): We may use alembic for other db components of zuul in the
+# future. Use a sub-folder for the reporters own versions.
+script_location = alembic/sql_reporter
+
+# template used to generate migration files
+# file_template = %%(rev)s_%%(slug)s
+
+# max length of characters to apply to the
+# "slug" field
+#truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+# set to 'true' to allow .pyc and .pyo files without
+# a source .py file to be detected as revisions in the
+# versions/ directory
+# sourceless = false
+
+# version location specification; this defaults
+# to alembic/versions. When using multiple version
+# directories, initial revisions must be specified with --version-path
+# version_locations = %(here)s/bar %(here)s/bat alembic/versions
+
+# the output encoding used when revision files
+# are written from script.py.mako
+# output_encoding = utf-8
+
+sqlalchemy.url = mysql+pymysql://user@localhost/database
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
diff --git a/zuul/connection/gerrit.py b/zuul/connection/gerrit.py
index 62891cd..f3a6859 100644
--- a/zuul/connection/gerrit.py
+++ b/zuul/connection/gerrit.py
@@ -63,7 +63,7 @@
if change:
event.project_name = change.get('project')
event.branch = change.get('branch')
- event.change_number = change.get('number')
+ event.change_number = str(change.get('number'))
event.change_url = change.get('url')
patchset = data.get('patchSet')
if patchset:
@@ -135,13 +135,14 @@
poll_timeout = 500
def __init__(self, gerrit_connection, username, hostname, port=29418,
- keyfile=None):
+ keyfile=None, keepalive=60):
threading.Thread.__init__(self)
self.username = username
self.keyfile = keyfile
self.hostname = hostname
self.port = port
self.gerrit_connection = gerrit_connection
+ self.keepalive = keepalive
self._stopped = False
def _read(self, fd):
@@ -172,6 +173,8 @@
username=self.username,
port=self.port,
key_filename=self.keyfile)
+ transport = client.get_transport()
+ transport.set_keepalive(self.keepalive)
stdin, stdout, stderr = client.exec_command("gerrit stream-events")
@@ -189,13 +192,16 @@
stdout.channel.close()
ret = stdout.channel.recv_exit_status()
self.log.debug("SSH exit status: %s" % ret)
- client.close()
if ret and ret not in [-1, 130]:
raise Exception("Gerrit error executing stream-events")
except:
self.log.exception("Exception on ssh event stream:")
time.sleep(5)
+ finally:
+ # If we don't close on exceptions to connect we can leak the
+ # connection and DoS Gerrit.
+ client.close()
def run(self):
while not self._stopped:
@@ -208,7 +214,7 @@
class GerritConnection(BaseConnection):
driver_name = 'gerrit'
- log = logging.getLogger("connection.gerrit")
+ log = logging.getLogger("zuul.GerritConnection")
def __init__(self, connection_name, connection_config):
super(GerritConnection, self).__init__(connection_name,
@@ -224,6 +230,7 @@
self.server = self.connection_config.get('server')
self.port = int(self.connection_config.get('port', 29418))
self.keyfile = self.connection_config.get('sshkey', None)
+ self.keepalive = int(self.connection_config.get('keepalive', 60))
self.watcher_thread = None
self.event_queue = None
self.client = None
@@ -349,14 +356,25 @@
return alldata
def _open(self):
- client = paramiko.SSHClient()
- client.load_system_host_keys()
- client.set_missing_host_key_policy(paramiko.WarningPolicy())
- client.connect(self.server,
- username=self.user,
- port=self.port,
- key_filename=self.keyfile)
- self.client = client
+ if self.client:
+ # Paramiko needs explicit closes, its possible we will open even
+ # with an unclosed client so explicitly close here.
+ self.client.close()
+ try:
+ client = paramiko.SSHClient()
+ client.load_system_host_keys()
+ client.set_missing_host_key_policy(paramiko.WarningPolicy())
+ client.connect(self.server,
+ username=self.user,
+ port=self.port,
+ key_filename=self.keyfile)
+ transport = client.get_transport()
+ transport.set_keepalive(self.keepalive)
+ self.client = client
+ except Exception:
+ client.close()
+ self.client = None
+ raise
def _ssh(self, command, stdin_data=None):
if not self.client:
@@ -461,7 +479,8 @@
self.user,
self.server,
self.port,
- keyfile=self.keyfile)
+ keyfile=self.keyfile,
+ keepalive=self.keepalive)
self.watcher_thread.start()
def _stop_event_connector(self):
@@ -475,5 +494,5 @@
def getSchema():
- gerrit_connection = v.Any(str, v.Schema({}, extra=True))
+ gerrit_connection = v.Any(str, v.Schema(dict))
return gerrit_connection
diff --git a/zuul/connection/smtp.py b/zuul/connection/smtp.py
index d3eccff..0224056 100644
--- a/zuul/connection/smtp.py
+++ b/zuul/connection/smtp.py
@@ -23,7 +23,7 @@
class SMTPConnection(BaseConnection):
driver_name = 'smtp'
- log = logging.getLogger("connection.smtp")
+ log = logging.getLogger("zuul.SMTPConnection")
def __init__(self, connection_name, connection_config):
@@ -59,5 +59,5 @@
def getSchema():
- smtp_connection = v.Any(str, v.Schema({}, extra=True))
+ smtp_connection = v.Any(str, v.Schema(dict))
return smtp_connection
diff --git a/zuul/connection/sql.py b/zuul/connection/sql.py
new file mode 100644
index 0000000..73429e9
--- /dev/null
+++ b/zuul/connection/sql.py
@@ -0,0 +1,104 @@
+# Copyright 2014 Rackspace Australia
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+
+import alembic
+import alembic.config
+import sqlalchemy as sa
+import voluptuous as v
+
+from zuul.connection import BaseConnection
+
+BUILDSET_TABLE = 'zuul_buildset'
+BUILD_TABLE = 'zuul_build'
+
+
+class SQLConnection(BaseConnection):
+ driver_name = 'sql'
+ log = logging.getLogger("connection.sql")
+
+ def __init__(self, connection_name, connection_config):
+
+ super(SQLConnection, self).__init__(connection_name, connection_config)
+
+ self.dburi = None
+ self.engine = None
+ self.connection = None
+ self.tables_established = False
+ try:
+ self.dburi = self.connection_config.get('dburi')
+ self.engine = sa.create_engine(self.dburi)
+ self._migrate()
+ self._setup_tables()
+ self.tables_established = True
+ except sa.exc.NoSuchModuleError:
+ self.log.exception(
+ "The required module for the dburi dialect isn't available. "
+ "SQL connection %s will be unavailable." % connection_name)
+ except sa.exc.OperationalError:
+ self.log.exception(
+ "Unable to connect to the database or establish the required "
+ "tables. Reporter %s is disabled" % self)
+
+ def _migrate(self):
+ """Perform the alembic migrations for this connection"""
+ with self.engine.begin() as conn:
+ context = alembic.migration.MigrationContext.configure(conn)
+ current_rev = context.get_current_revision()
+ self.log.debug('Current migration revision: %s' % current_rev)
+
+ config = alembic.config.Config()
+ config.set_main_option("script_location",
+ "zuul:alembic/sql_reporter")
+ config.set_main_option("sqlalchemy.url",
+ self.connection_config.get('dburi'))
+
+ alembic.command.upgrade(config, 'head')
+
+ def _setup_tables(self):
+ metadata = sa.MetaData()
+
+ self.zuul_buildset_table = sa.Table(
+ BUILDSET_TABLE, metadata,
+ sa.Column('id', sa.Integer, primary_key=True),
+ sa.Column('zuul_ref', sa.String(255)),
+ sa.Column('pipeline', sa.String(255)),
+ sa.Column('project', sa.String(255)),
+ sa.Column('change', sa.Integer, nullable=True),
+ sa.Column('patchset', sa.Integer, nullable=True),
+ sa.Column('ref', sa.String(255)),
+ sa.Column('score', sa.Integer),
+ sa.Column('message', sa.TEXT()),
+ )
+
+ self.zuul_build_table = sa.Table(
+ BUILD_TABLE, metadata,
+ sa.Column('id', sa.Integer, primary_key=True),
+ sa.Column('buildset_id', sa.Integer,
+ sa.ForeignKey(BUILDSET_TABLE + ".id")),
+ sa.Column('uuid', sa.String(36)),
+ sa.Column('job_name', sa.String(255)),
+ sa.Column('result', sa.String(255)),
+ sa.Column('start_time', sa.DateTime()),
+ sa.Column('end_time', sa.DateTime()),
+ sa.Column('voting', sa.Boolean),
+ sa.Column('log_url', sa.String(255)),
+ sa.Column('node_name', sa.String(255)),
+ )
+
+
+def getSchema():
+ sql_connection = v.Any(str, v.Schema(dict))
+ return sql_connection
diff --git a/zuul/launcher/ansiblelaunchserver.py b/zuul/launcher/ansiblelaunchserver.py
index 20e5ad5..9a65da9 100644
--- a/zuul/launcher/ansiblelaunchserver.py
+++ b/zuul/launcher/ansiblelaunchserver.py
@@ -39,7 +39,7 @@
ANSIBLE_WATCHDOG_GRACE = 5 * 60
ANSIBLE_DEFAULT_TIMEOUT = 2 * 60 * 60
ANSIBLE_DEFAULT_PRE_TIMEOUT = 10 * 60
-ANSIBLE_DEFAULT_POST_TIMEOUT = 10 * 60
+ANSIBLE_DEFAULT_POST_TIMEOUT = 30 * 60
COMMANDS = ['reconfigure', 'stop', 'pause', 'unpause', 'release', 'graceful',
@@ -131,6 +131,11 @@
def __exit__(self, etype, value, tb):
if not self.keep:
+ # Ensure directories are writeable so that files can be removed
+ # from them.
+ for root, dirs, files in os.walk(self.root):
+ for d in dirs:
+ os.chmod(os.path.join(root, d), 0o755)
shutil.rmtree(self.root)
diff --git a/zuul/layoutvalidator.py b/zuul/layoutvalidator.py
index 91e15d1..a0e550b 100644
--- a/zuul/layoutvalidator.py
+++ b/zuul/layoutvalidator.py
@@ -39,7 +39,7 @@
'email': str,
'older-than': str,
'newer-than': str,
- }, extra=True)
+ }, extra=v.ALLOW_EXTRA)
require = {'approval': toList(approval),
'open': bool,
@@ -167,6 +167,7 @@
'reporter': {
'gerrit': 'zuul.reporter.gerrit',
'smtp': 'zuul.reporter.smtp',
+ 'sql': 'zuul.reporter.sql',
},
}
standard_drivers = {
diff --git a/zuul/lib/cloner.py b/zuul/lib/cloner.py
index 197c426..6e50eda 100644
--- a/zuul/lib/cloner.py
+++ b/zuul/lib/cloner.py
@@ -46,6 +46,8 @@
self.zuul_branch = zuul_branch or ''
self.zuul_ref = zuul_ref or ''
self.zuul_url = zuul_url
+ self.zuul_project = zuul_project
+
self.project_branches = project_branches or {}
self.project_revisions = {}
@@ -77,7 +79,18 @@
def cloneUpstream(self, project, dest):
# Check for a cached git repo first
git_cache = '%s/%s' % (self.cache_dir, project)
- git_upstream = '%s/%s' % (self.git_url, project)
+
+ # Then, if we are cloning the repo for the zuul_project, then
+ # set its origin to be the zuul merger, as it is guaranteed to
+ # be correct and up to date even if mirrors haven't updated
+ # yet. Otherwise, we can not be sure about the state of the
+ # project, so our best chance to get the most current state is
+ # by setting origin to the git_url.
+ if (self.zuul_url and project == self.zuul_project):
+ git_upstream = '%s/%s' % (self.zuul_url, project)
+ else:
+ git_upstream = '%s/%s' % (self.git_url, project)
+
repo_is_cloned = os.path.exists(os.path.join(dest, '.git'))
if (self.cache_dir and
os.path.exists(git_cache) and
@@ -104,23 +117,35 @@
return repo
- def fetchFromZuul(self, repo, project, ref):
- zuul_remote = '%s/%s' % (self.zuul_url, project)
+ def fetchRef(self, repo, project, ref):
+ # If we are fetching a zuul ref, the only place to get it is
+ # from the zuul merger (and it is guaranteed to be correct).
+ # Otherwise, the only way we can be certain that the ref
+ # (which, since it is not a zuul ref, is a branch or tag) is
+ # correct is in the case that it matches zuul_project. If
+ # neither of those two conditions are met, we are most likely
+ # to get the correct state from the git_url.
+ if (ref.startswith('refs/zuul') or
+ project == self.zuul_project):
+
+ remote = '%s/%s' % (self.zuul_url, project)
+ else:
+ remote = '%s/%s' % (self.git_url, project)
try:
- repo.fetchFrom(zuul_remote, ref)
- self.log.debug("Fetched ref %s from %s", ref, project)
+ repo.fetchFrom(remote, ref)
+ self.log.debug("Fetched ref %s from %s", ref, remote)
return True
except ValueError:
- self.log.debug("Project %s in Zuul does not have ref %s",
- project, ref)
+ self.log.debug("Repo %s does not have ref %s",
+ remote, ref)
return False
except GitCommandError as error:
# Bail out if fetch fails due to infrastructure reasons
if error.stderr.startswith('fatal: unable to access'):
raise
- self.log.debug("Project %s in Zuul does not have ref %s",
- project, ref)
+ self.log.debug("Repo %s does not have ref %s",
+ remote, ref)
return False
def prepareRepo(self, project, dest):
@@ -192,7 +217,7 @@
self.log.info("Attempting to check out revision %s for "
"project %s", indicated_revision, project)
try:
- self.fetchFromZuul(repo, project, self.zuul_ref)
+ self.fetchRef(repo, project, self.zuul_ref)
commit = repo.checkout(indicated_revision)
except (ValueError, GitCommandError):
raise exceptions.RevNotFound(project, indicated_revision)
@@ -201,10 +226,10 @@
# If we have a non empty zuul_ref to use, use it. Otherwise we fall
# back to checking out the branch.
elif ((override_zuul_ref and
- self.fetchFromZuul(repo, project, override_zuul_ref)) or
+ self.fetchRef(repo, project, override_zuul_ref)) or
(fallback_zuul_ref and
fallback_zuul_ref != override_zuul_ref and
- self.fetchFromZuul(repo, project, fallback_zuul_ref))):
+ self.fetchRef(repo, project, fallback_zuul_ref))):
# Work around a bug in GitPython which can not parse FETCH_HEAD
gitcmd = git.Git(dest)
fetch_head = gitcmd.rev_parse('FETCH_HEAD')
diff --git a/zuul/lib/connections.py b/zuul/lib/connections.py
index 92ddb0f..7d47775 100644
--- a/zuul/lib/connections.py
+++ b/zuul/lib/connections.py
@@ -12,13 +12,16 @@
# License for the specific language governing permissions and limitations
# under the License.
+import logging
import re
import zuul.connection.gerrit
import zuul.connection.smtp
+import zuul.connection.sql
def configure_connections(config):
+ log = logging.getLogger("configure_connections")
# Register connections from the config
# TODO(jhesketh): import connection modules dynamically
@@ -46,6 +49,9 @@
elif con_driver == 'smtp':
connections[con_name] = \
zuul.connection.smtp.SMTPConnection(con_name, con_config)
+ elif con_driver == 'sql':
+ connections[con_name] = \
+ zuul.connection.sql.SQLConnection(con_name, con_config)
else:
raise Exception("Unknown driver, %s, for connection %s"
% (con_config['driver'], con_name))
@@ -54,13 +60,21 @@
# connection named 'gerrit' or 'smtp' respectfully
if 'gerrit' in config.sections():
- connections['gerrit'] = \
- zuul.connection.gerrit.GerritConnection(
- 'gerrit', dict(config.items('gerrit')))
+ if 'gerrit' in connections:
+ log.warning("The legacy [gerrit] section will be ignored in favour"
+ " of the [connection gerrit].")
+ else:
+ connections['gerrit'] = \
+ zuul.connection.gerrit.GerritConnection(
+ 'gerrit', dict(config.items('gerrit')))
if 'smtp' in config.sections():
- connections['smtp'] = \
- zuul.connection.smtp.SMTPConnection(
- 'smtp', dict(config.items('smtp')))
+ if 'smtp' in connections:
+ log.warning("The legacy [smtp] section will be ignored in favour"
+ " of the [connection smtp].")
+ else:
+ connections['smtp'] = \
+ zuul.connection.smtp.SMTPConnection(
+ 'smtp', dict(config.items('smtp')))
return connections
diff --git a/zuul/lib/swift.py b/zuul/lib/swift.py
index b5d3bc7..5660819 100644
--- a/zuul/lib/swift.py
+++ b/zuul/lib/swift.py
@@ -24,7 +24,7 @@
class Swift(object):
- log = logging.getLogger("zuul.lib.swift")
+ log = logging.getLogger("zuul.Swift")
def __init__(self, config):
self.config = config
diff --git a/zuul/merger/client.py b/zuul/merger/client.py
index 950c385..9e8c243 100644
--- a/zuul/merger/client.py
+++ b/zuul/merger/client.py
@@ -97,9 +97,10 @@
data = dict(items=items)
self.submitJob('merger:merge', data, build_set, precedence)
- def updateRepo(self, project, url, build_set,
+ def updateRepo(self, project, connection_name, url, build_set,
precedence=zuul.model.PRECEDENCE_NORMAL):
data = dict(project=project,
+ connection_name=connection_name,
url=url)
self.submitJob('merger:update', data, build_set, precedence)
diff --git a/zuul/merger/merger.py b/zuul/merger/merger.py
index b3cfaca..a974e9c 100644
--- a/zuul/merger/merger.py
+++ b/zuul/merger/merger.py
@@ -223,6 +223,14 @@
fd.close()
os.chmod(name, 0o755)
+ def _setGitSsh(self, connection_name):
+ wrapper_name = '.ssh_wrapper_%s' % connection_name
+ name = os.path.join(self.working_root, wrapper_name)
+ if os.path.isfile(name):
+ os.environ['GIT_SSH'] = name
+ elif 'GIT_SSH' in os.environ:
+ del os.environ['GIT_SSH']
+
def addProject(self, project, url):
repo = None
try:
@@ -242,11 +250,12 @@
" without a url" % (project,))
return self.addProject(project, url)
- def updateRepo(self, project, url):
+ def updateRepo(self, project, connection_name, url):
+ self._setGitSsh(connection_name)
repo = self.getRepo(project, url)
try:
self.log.info("Updating local repository %s", project)
- repo.update()
+ repo.reset()
except Exception:
self.log.exception("Unable to update %s", project)
@@ -279,14 +288,6 @@
return commit
- def _setGitSsh(self, connection_name):
- wrapper_name = '.ssh_wrapper_%s' % connection_name
- name = os.path.join(self.working_root, wrapper_name)
- if os.path.isfile(name):
- os.environ['GIT_SSH'] = name
- elif 'GIT_SSH' in os.environ:
- del os.environ['GIT_SSH']
-
def _mergeItem(self, item, recent):
self.log.debug("Processing refspec %s for project %s / %s ref %s" %
(item['refspec'], item['project'], item['branch'],
diff --git a/zuul/merger/server.py b/zuul/merger/server.py
index d56993c..b1921d9 100644
--- a/zuul/merger/server.py
+++ b/zuul/merger/server.py
@@ -109,7 +109,9 @@
def update(self, job):
args = json.loads(job.arguments)
- self.merger.updateRepo(args['project'], args['url'])
+ self.merger.updateRepo(args['project'],
+ args['connection_name'],
+ args['url'])
result = dict(updated=True,
zuul_url=self.zuul_url)
job.sendWorkComplete(json.dumps(result))
diff --git a/zuul/reporter/__init__.py b/zuul/reporter/__init__.py
index 0c9a8d8..cd78412 100644
--- a/zuul/reporter/__init__.py
+++ b/zuul/reporter/__init__.py
@@ -64,40 +64,43 @@
}
return format_methods[self._action]
- def _formatItemReport(self, pipeline, item):
+ def _formatItemReport(self, pipeline, item, with_jobs=True):
"""Format a report from the given items. Usually to provide results to
a reporter taking free-form text."""
- ret = self._getFormatter()(pipeline, item)
+ ret = self._getFormatter()(pipeline, item, with_jobs)
if pipeline.footer_message:
ret += '\n' + pipeline.footer_message
return ret
- def _formatItemReportStart(self, pipeline, item):
+ def _formatItemReportStart(self, pipeline, item, with_jobs=True):
msg = "Starting %s jobs." % pipeline.name
if self.sched.config.has_option('zuul', 'status_url'):
msg += "\n" + self.sched.config.get('zuul', 'status_url')
return msg
- def _formatItemReportSuccess(self, pipeline, item):
- return (pipeline.success_message + '\n\n' +
- self._formatItemReportJobs(pipeline, item))
+ def _formatItemReportSuccess(self, pipeline, item, with_jobs=True):
+ msg = pipeline.success_message
+ if with_jobs:
+ msg += '\n\n' + self._formatItemReportJobs(pipeline, item)
+ return msg
- def _formatItemReportFailure(self, pipeline, item):
+ def _formatItemReportFailure(self, pipeline, item, with_jobs=True):
if item.dequeued_needing_change:
msg = 'This change depends on a change that failed to merge.\n'
elif not pipeline.didMergerSucceed(item):
msg = pipeline.merge_failure_message
else:
- msg = (pipeline.failure_message + '\n\n' +
- self._formatItemReportJobs(pipeline, item))
+ msg = pipeline.failure_message
+ if with_jobs:
+ msg += '\n\n' + self._formatItemReportJobs(pipeline, item)
return msg
- def _formatItemReportMergeFailure(self, pipeline, item):
+ def _formatItemReportMergeFailure(self, pipeline, item, with_jobs=True):
return pipeline.merge_failure_message
- def _formatItemReportDisabled(self, pipeline, item):
+ def _formatItemReportDisabled(self, pipeline, item, with_jobs=True):
if item.current_build_set.result == 'SUCCESS':
return self._formatItemReportSuccess(pipeline, item)
elif item.current_build_set.result == 'FAILURE':
diff --git a/zuul/reporter/gerrit.py b/zuul/reporter/gerrit.py
index 1427449..a92c31b 100644
--- a/zuul/reporter/gerrit.py
+++ b/zuul/reporter/gerrit.py
@@ -23,7 +23,7 @@
"""Sends off reports to Gerrit."""
name = 'gerrit'
- log = logging.getLogger("zuul.reporter.gerrit.Reporter")
+ log = logging.getLogger("zuul.GerritReporter")
def report(self, source, pipeline, item):
"""Send a message to gerrit."""
@@ -48,5 +48,5 @@
def getSchema():
- gerrit_reporter = v.Any(str, v.Schema({}, extra=True))
+ gerrit_reporter = v.Any(str, v.Schema(dict))
return gerrit_reporter
diff --git a/zuul/reporter/smtp.py b/zuul/reporter/smtp.py
index 586b941..3935098 100644
--- a/zuul/reporter/smtp.py
+++ b/zuul/reporter/smtp.py
@@ -22,7 +22,7 @@
"""Sends off reports to emails via SMTP."""
name = 'smtp'
- log = logging.getLogger("zuul.reporter.smtp.Reporter")
+ log = logging.getLogger("zuul.SMTPReporter")
def report(self, source, pipeline, item):
"""Send the compiled report message via smtp."""
diff --git a/zuul/reporter/sql.py b/zuul/reporter/sql.py
new file mode 100644
index 0000000..b663a59
--- /dev/null
+++ b/zuul/reporter/sql.py
@@ -0,0 +1,94 @@
+# Copyright 2015 Rackspace Australia
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import logging
+import voluptuous as v
+
+from zuul.reporter import BaseReporter
+
+
+class SQLReporter(BaseReporter):
+ """Sends off reports to a database."""
+
+ name = 'sql'
+ log = logging.getLogger("zuul.reporter.mysql.SQLReporter")
+
+ def __init__(self, reporter_config={}, sched=None, connection=None):
+ super(SQLReporter, self).__init__(
+ reporter_config, sched, connection)
+ self.result_score = reporter_config.get('score', None)
+
+ def report(self, source, pipeline, item):
+ """Create an entry into a database."""
+
+ if not self.connection.tables_established:
+ self.log.warn("SQL reporter (%s) is disabled " % self)
+ return
+
+ if self.sched.config.has_option('zuul', 'url_pattern'):
+ url_pattern = self.sched.config.get('zuul', 'url_pattern')
+ else:
+ url_pattern = None
+
+ score = self.reporter_config['score']\
+ if 'score' in self.reporter_config else 0
+
+ with self.connection.engine.begin() as conn:
+ buildset_ins = self.connection.zuul_buildset_table.insert().values(
+ zuul_ref=item.current_build_set.ref,
+ pipeline=item.pipeline.name,
+ project=item.change.project.name,
+ change=item.change.number,
+ patchset=item.change.patchset,
+ ref=item.change.refspec,
+ score=score,
+ message=self._formatItemReport(
+ pipeline, item, with_jobs=False),
+ )
+ buildset_ins_result = conn.execute(buildset_ins)
+ build_inserts = []
+
+ for job in pipeline.getJobs(item):
+ build = item.current_build_set.getBuild(job.name)
+ if not build:
+ # build hasn't began. The sql reporter can only send back
+ # stats about builds. It doesn't understand how to store
+ # information about the change.
+ continue
+
+ (result, url) = item.formatJobResult(job, url_pattern)
+
+ build_inserts.append({
+ 'buildset_id': buildset_ins_result.inserted_primary_key,
+ 'uuid': build.uuid,
+ 'job_name': build.job.name,
+ 'result': result,
+ 'start_time': datetime.datetime.fromtimestamp(
+ build.start_time),
+ 'end_time': datetime.datetime.fromtimestamp(
+ build.end_time),
+ 'voting': build.job.voting,
+ 'log_url': url,
+ 'node_name': build.node_name,
+ })
+ conn.execute(self.connection.zuul_build_table.insert(),
+ build_inserts)
+
+
+def getSchema():
+ sql_reporter = v.Schema({
+ 'score': int,
+ })
+ return sql_reporter
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index 8c26541..931571f 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -359,6 +359,7 @@
'reporter': {
'gerrit': 'zuul.reporter.gerrit:GerritReporter',
'smtp': 'zuul.reporter.smtp:SMTPReporter',
+ 'sql': 'zuul.reporter.sql:SQLReporter',
},
}
@@ -867,6 +868,8 @@
self.log.exception(
"Exception while canceling build %s "
"for change %s" % (build, item.change))
+ finally:
+ self.mutex.release(build.build_set.item, build.job)
self.layout = layout
self.maintainConnectionCache()
for trigger in self.triggers.values():
@@ -1501,8 +1504,9 @@
else:
self.log.debug("Preparing update repo for: %s" % item.change)
url = self.pipeline.source.getGitUrl(item.change.project)
+ connection_name = self.pipeline.source.connection.connection_name
self.sched.merger.updateRepo(item.change.project.name,
- url, build_set,
+ connection_name, url, build_set,
self.pipeline.precedence)
# merge:merge has been emitted properly:
build_set.merge_state = build_set.PENDING
@@ -1541,6 +1545,8 @@
except:
self.log.exception("Exception while canceling build %s "
"for change %s" % (build, item.change))
+ finally:
+ self.sched.mutex.release(build.build_set.item, build.job)
build.result = 'CANCELED'
canceled = True
self.updateBuildDescriptions(old_build_set)
diff --git a/zuul/source/gerrit.py b/zuul/source/gerrit.py
index 463f315..fa49505 100644
--- a/zuul/source/gerrit.py
+++ b/zuul/source/gerrit.py
@@ -36,7 +36,7 @@
class GerritSource(BaseSource):
name = 'gerrit'
- log = logging.getLogger("zuul.source.Gerrit")
+ log = logging.getLogger("zuul.GerritSource")
replication_timeout = 300
replication_retry_interval = 5
@@ -295,6 +295,9 @@
# cycle, we won't detect it. By explicitly performing a
# walk of the dependency tree, we will.
detect_cycle(dep, history)
+ # This is a git commit dependency. So we only ignore it if it is
+ # already merged. So even if it is "ABANDONED", we should not
+ # ignore it.
if (not dep.is_merged) and dep not in needs_changes:
needs_changes.append(dep)
@@ -315,7 +318,7 @@
# cycle, we won't detect it. By explicitly performing a
# walk of the dependency tree, we will.
detect_cycle(dep, history)
- if (not dep.is_merged) and dep not in needs_changes:
+ if dep.open and dep not in needs_changes:
needs_changes.append(dep)
change.needs_changes = needs_changes
@@ -327,7 +330,7 @@
self.log.debug("Updating %s: Getting git-needed change %s,%s" %
(change, dep_num, dep_ps))
dep = self._getChange(dep_num, dep_ps)
- if (not dep.is_merged) and dep.is_current_patchset:
+ if dep.open and dep.is_current_patchset:
needed_by_changes.append(dep)
for record in self._getNeededByFromCommit(data['id'], change):
@@ -340,7 +343,7 @@
# reference the latest patchset of its Depends-On (this
# change).
dep = self._getChange(dep_num, dep_ps, refresh=True)
- if (not dep.is_merged) and dep.is_current_patchset:
+ if dep.open and dep.is_current_patchset:
needed_by_changes.append(dep)
change.needed_by_changes = needed_by_changes
diff --git a/zuul/trigger/gerrit.py b/zuul/trigger/gerrit.py
index 8a3fe42..70c65fd 100644
--- a/zuul/trigger/gerrit.py
+++ b/zuul/trigger/gerrit.py
@@ -20,7 +20,7 @@
class GerritTrigger(BaseTrigger):
name = 'gerrit'
- log = logging.getLogger("zuul.trigger.Gerrit")
+ log = logging.getLogger("zuul.GerritTrigger")
def getEventFilters(self, trigger_conf):
def toList(item):
@@ -82,14 +82,14 @@
def getSchema():
def toList(x):
return v.Any([x], x)
- variable_dict = v.Schema({}, extra=True)
+ variable_dict = v.Schema(dict)
approval = v.Schema({'username': str,
'email-filter': str,
'email': str,
'older-than': str,
'newer-than': str,
- }, extra=True)
+ }, extra=v.ALLOW_EXTRA)
gerrit_trigger = {
v.Required('event'):
diff --git a/zuul/trigger/timer.py b/zuul/trigger/timer.py
index f81312e..f982914 100644
--- a/zuul/trigger/timer.py
+++ b/zuul/trigger/timer.py
@@ -23,7 +23,7 @@
class TimerTrigger(BaseTrigger):
name = 'timer'
- log = logging.getLogger("zuul.Timer")
+ log = logging.getLogger("zuul.TimerTrigger")
def __init__(self, trigger_config={}, sched=None, connection=None):
super(TimerTrigger, self).__init__(trigger_config, sched, connection)
diff --git a/zuul/trigger/zuultrigger.py b/zuul/trigger/zuultrigger.py
index 00b21f2..3712707 100644
--- a/zuul/trigger/zuultrigger.py
+++ b/zuul/trigger/zuultrigger.py
@@ -134,7 +134,7 @@
'email': str,
'older-than': str,
'newer-than': str,
- }, extra=True)
+ }, extra=v.ALLOW_EXTRA)
zuul_trigger = {
v.Required('event'):