Merge "Pass pngcrush on status png files"
diff --git a/doc/source/index.rst b/doc/source/index.rst
index c5beda0..fcc0d45 100644
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -1,8 +1,3 @@
-.. Zuul documentation master file, created by
-   sphinx-quickstart on Fri Jun  8 14:44:26 2012.
-   You can adapt this file completely to your liking, but it should at least
-   contain the root `toctree` directive.
-
 Zuul - A Project Gating System
 ==============================
 
diff --git a/doc/source/zuul.rst b/doc/source/zuul.rst
index 408b0ac..4c5a624 100644
--- a/doc/source/zuul.rst
+++ b/doc/source/zuul.rst
@@ -114,6 +114,11 @@
   starting jobs for a change.  Used by zuul-server only.
   ``status_url=https://zuul.example.com/status``
 
+**status_expiry**
+  Zuul will cache the status.json file for this many seconds. This is an
+  optional value and ``1`` is used by default.
+  ``status_expiry=1``
+
 **url_pattern**
   If you are storing build logs external to the system that originally
   ran jobs and wish to link to those logs when Zuul makes comments on
@@ -418,34 +423,13 @@
     containing 'retrigger' somewhere in the comment text are added to a
     change.
 
-    *require-approval*
+    *require-approval* (deprecated)
     This may be used for any event.  It requires that a certain kind
     of approval be present for the current patchset of the change (the
-    approval could be added by the event in question).  It takes
-    several sub-parameters, all of which are optional and are combined
-    together so that there must be an approval matching all specified
-    requirements.
-
-      *username*
-      If present, an approval from this username is required.
-
-      *email-filter*
-      If present, an approval with this email address is required.  It
-      is treated as a regular expression as above.
-
-      *older-than*
-      If present, the approval must be older than this amount of time
-      to match.  Provide a time interval as a number with a suffix of
-      "w" (weeks), "d" (days), "h" (hours), "m" (minutes), "s"
-      (seconds).  Example ``48h`` or ``2d``.
-
-      *newer-than*
-      If present, the approval must be newer than this amount of time
-      to match.  Same format as "older-than".
-
-      Any other field is interpreted as a review category and value
-      pair.  For example ``verified: 1`` would require that the approval
-      be for a +1 vote in the "Verified" column.
+    approval could be added by the event in question).  It follows the
+    same syntax as the "approval" pipeline requirement below.  This
+    form should be considered deprecated and the pipeline requirement
+    used instead.
 
   **timer**
     This trigger will run based on a cron-style time specification.
@@ -458,6 +442,53 @@
     supported, not the symbolic names.  Example: ``0 0 * * *`` runs
     at midnight.
 
+**require**
+  If this section is present, it established pre-requisites for any
+  kind of item entering the Pipeline.  Regardless of how the item is
+  to be enqueued (via any trigger or automatic dependency resolution),
+  the conditions specified here must be met or the item will not be
+  enqueued.
+
+  **approval**
+  This requires that a certain kind of approval be present for the
+  current patchset of the change (the approval could be added by the
+  event in question).  It takes several sub-parameters, all of which
+  are optional and are combined together so that there must be an
+  approval matching all specified requirements.
+
+    *username*
+    If present, an approval from this username is required.
+
+    *email-filter*
+    If present, an approval with this email address is required.  It
+    is treated as a regular expression as above.
+
+    *older-than*
+    If present, the approval must be older than this amount of time
+    to match.  Provide a time interval as a number with a suffix of
+    "w" (weeks), "d" (days), "h" (hours), "m" (minutes), "s"
+    (seconds).  Example ``48h`` or ``2d``.
+
+    *newer-than*
+    If present, the approval must be newer than this amount of time
+    to match.  Same format as "older-than".
+
+    Any other field is interpreted as a review category and value
+    pair.  For example ``verified: 1`` would require that the approval
+    be for a +1 vote in the "Verified" column.
+
+  **open**
+  A boolean value (``true`` or ``false``) that indicates whether the change
+  must be open or closed in order to be enqueued.
+
+  **current-patchset**
+  A boolean value (``true`` or ``false``) that indicates whether the change
+  must be the current patchset in order to be enqueued.
+
+  **status**
+  A string value that corresponds with the status of the change
+  reported by the trigger.  For example, when using the Gerrit
+  trigger, status values such as ``NEW`` or ``MERGED`` may be useful.
 
 **dequeue-on-new-patchset**
   Normally, if a new patchset is uploaded to a change that is in a
@@ -644,6 +675,18 @@
   The name of the job.  This field is treated as a regular expression
   and will be applied to each job that matches.
 
+**queue-name (optional)**
+  Zuul will automatically combine projects that share a job into
+  shared change queues for dependent pipeline managers.  In order to
+  report statistics about these queues, it is convenient for them to
+  have names.  Zuul can automatically name change queues, however
+  these can grow quite long and are prone to changing as projects in
+  the queue change.  If you assign a queue-name to a job, Zuul will
+  use that as the name for the shared change queue that contains that
+  job instead of the automatically generated one.  It is an error for
+  a shared change queue to have more than one job with a queue-name if
+  they are not the same.
+
 **failure-message (optional)**
   The message that should be reported to Gerrit if the job fails.
 
diff --git a/etc/status/.gitignore b/etc/status/.gitignore
index a4b9570..8b94cad 100644
--- a/etc/status/.gitignore
+++ b/etc/status/.gitignore
@@ -1,3 +1,4 @@
 public_html/jquery.min.js
 public_html/jquery-visibility.min.js
 public_html/bootstrap
+public_html/jquery.graphite.js
diff --git a/etc/status/fetch-dependencies.sh b/etc/status/fetch-dependencies.sh
index a5dddff..7823f29 100755
--- a/etc/status/fetch-dependencies.sh
+++ b/etc/status/fetch-dependencies.sh
@@ -8,6 +8,12 @@
 echo "Fetching jquery-visibility.min.js..."
 curl --silent https://raw.github.com/mathiasbynens/jquery-visibility/master/jquery-visibility.min.js > $BASE_DIR/public_html/jquery-visibility.min.js
 
+echo "Fetching jquery.graphite.js..."
+curl -L --silent https://github.com/prestontimmons/graphitejs/archive/master.zip > jquery-graphite.zip
+unzip -q -o jquery-graphite.zip -d $BASE_DIR/public_html/
+mv $BASE_DIR/public_html/graphitejs-master/jquery.graphite.js $BASE_DIR/public_html/
+rm -R jquery-graphite.zip $BASE_DIR/public_html/graphitejs-master
+
 echo "Fetching bootstrap..."
 curl -L --silent https://github.com/twbs/bootstrap/releases/download/v3.1.1/bootstrap-3.1.1-dist.zip > bootstrap.zip
 unzip -q -o bootstrap.zip -d $BASE_DIR/public_html/
diff --git a/etc/status/public_html/app.js b/etc/status/public_html/app.js
deleted file mode 100644
index a74711e..0000000
--- a/etc/status/public_html/app.js
+++ /dev/null
@@ -1,838 +0,0 @@
-// Client script for Zuul status page
-//
-// Copyright 2012 OpenStack Foundation
-// Copyright 2013 Timo Tijhof
-// Copyright 2013 Wikimedia Foundation
-// Copyright 2014 Rackspace Australia
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-'use strict';
-
-(function ($) {
-    var $container, $msg, $indicator, $queueInfo, $queueEventsNum,
-        $queueResultsNum, $pipelines, $jq;
-    var xhr, zuul,
-        current_filter = '',
-        demo = location.search.match(/[?&]demo=([^?&]*)/),
-        source_url = location.search.match(/[?&]source_url=([^?&]*)/),
-        source = demo ?
-            './status-' + (demo[1] || 'basic') + '.json-sample' :
-            'status.json';
-    source = source_url ? source_url[1] : source;
-
-    function set_cookie(name, value) {
-        document.cookie = name + '=' + value + '; path=/';
-    }
-
-    function read_cookie(name, default_value) {
-        var nameEQ = name + '=';
-        var ca = document.cookie.split(';');
-        for(var i=0;i < ca.length;i++) {
-            var c = ca[i];
-            while (c.charAt(0) === ' ') {
-                c = c.substring(1, c.length);
-            }
-            if (c.indexOf(nameEQ) === 0) {
-                return c.substring(nameEQ.length, c.length);
-            }
-        }
-        return default_value;
-    }
-
-
-    zuul = {
-        enabled: true,
-        collapsed_exceptions: [],
-
-        schedule: function () {
-            if (!zuul.enabled) {
-                setTimeout(zuul.schedule, 5000);
-                return;
-            }
-            zuul.update().complete(function () {
-                setTimeout(zuul.schedule, 5000);
-            });
-        },
-
-        /** @return {jQuery.Promise} */
-        update: function () {
-            // Cancel the previous update if it hasn't completed yet.
-            if (xhr) {
-                xhr.abort();
-            }
-
-            zuul.emit('update-start');
-
-            xhr = $.getJSON(source)
-                .done(function (data) {
-                    if ('message' in data) {
-                        $msg.removeClass('alert-danger').addClass('alert-info');
-                        $msg.text(data.message);
-                        $msg.show();
-                    } else {
-                        $msg.empty();
-                        $msg.hide();
-                    }
-
-                    if ('zuul_version' in data) {
-                        $('#zuul-version-span').text(data.zuul_version);
-                    }
-                    if ('last_reconfigured' in data) {
-                        var last_reconfigured =
-                            new Date(data.last_reconfigured);
-                        $('#last-reconfigured-span').text(
-                            last_reconfigured.toString());
-                    }
-
-                    $pipelines.html('');
-                    $.each(data.pipelines, function (i, pipeline) {
-                        $pipelines.append(zuul.format.pipeline(pipeline));
-                    });
-
-                    $queueEventsNum.text(
-                        data.trigger_event_queue ?
-                            data.trigger_event_queue.length : '0'
-                    );
-                    $queueResultsNum.text(
-                        data.result_event_queue ?
-                            data.result_event_queue.length : '0'
-                    );
-                })
-                .fail(function (err, jqXHR, errMsg) {
-                    $msg.text(source + ': ' + errMsg).show();
-                    $msg.removeClass('zuul-msg-wrap-off');
-                })
-                .complete(function () {
-                    xhr = undefined;
-                    zuul.emit('update-end');
-                });
-
-            return xhr;
-        },
-
-        format: {
-            job: function(job) {
-                var $job_line;
-                if (job.url !== null) {
-                    $job_line = $('<a href="' + job.url + '" />');
-                }
-                else{
-                    $job_line = $('<span />');
-                }
-                $job_line.text(job.name)
-                    .append(zuul.format.job_status(job));
-
-                if (job.voting === false) {
-                    $job_line.append(
-                        $(' <small />').text(' (non-voting)')
-                    );
-                }
-                return $job_line;
-            },
-
-            job_status: function(job) {
-                var result = job.result ? job.result.toLowerCase() : null;
-                if (result === null) {
-                    result = job.url ? 'in progress' : 'queued';
-                }
-
-                if (result === 'in progress') {
-                    return zuul.format.job_progress_bar(job.elapsed_time,
-                                                        job.remaining_time);
-                }
-                else {
-                    return zuul.format.status_label(result);
-                }
-            },
-
-            status_label: function(result) {
-                var $status = $('<span />');
-                $status.addClass('zuul-job-result label');
-
-                switch (result) {
-                    case 'success':
-                        $status.addClass('label-success');
-                        break;
-                    case 'failure':
-                        $status.addClass('label-danger');
-                        break;
-                    case 'unstable':
-                        $status.addClass('label-warning');
-                        break;
-                    case 'in progress':
-                    case 'queued':
-                    case 'lost':
-                        $status.addClass('label-default');
-                        break;
-                }
-                $status.text(result);
-                return $status;
-            },
-
-            job_progress_bar: function(elapsed_time, remaining_time) {
-                var progress_percent = 100 * (elapsed_time / (elapsed_time +
-                                                              remaining_time));
-                var $bar_inner = $('<div />')
-                    .addClass('progress-bar')
-                    .attr('role', 'progressbar')
-                    .attr('aria-valuenow', 'progressbar')
-                    .attr('aria-valuemin', progress_percent)
-                    .attr('aria-valuemin', '0')
-                    .attr('aria-valuemax', '100')
-                    .css('width', progress_percent + '%');
-
-                var $bar_outter = $('<div />')
-                    .addClass('progress zuul-job-result')
-                    .append($bar_inner);
-
-                return $bar_outter;
-            },
-
-            enqueue_time: function(ms) {
-                // Special format case for enqueue time to add style
-                var hours = 60 * 60 * 1000;
-                var now = Date.now();
-                var delta = now - ms;
-                var status = 'text-success';
-                var text = zuul.format.time(delta, true);
-                if (delta > (4 * hours)) {
-                    status = 'text-danger';
-                } else if (delta > (2 * hours)) {
-                    status = 'text-warning';
-                }
-                return '<span class="' + status + '">' + text + '</span>';
-            },
-
-            time: function(ms, words) {
-                if (typeof(words) === 'undefined') {
-                    words = false;
-                }
-                var seconds = (+ms)/1000;
-                var minutes = Math.floor(seconds/60);
-                var hours = Math.floor(minutes/60);
-                seconds = Math.floor(seconds % 60);
-                minutes = Math.floor(minutes % 60);
-                var r = '';
-                if (words) {
-                    if (hours) {
-                        r += hours;
-                        r += ' hr ';
-                    }
-                    r += minutes + ' min';
-                } else {
-                    if (hours < 10) {
-                        r += '0';
-                    }
-                    r += hours + ':';
-                    if (minutes < 10) {
-                        r += '0';
-                    }
-                    r += minutes + ':';
-                    if (seconds < 10) {
-                        r += '0';
-                    }
-                    r += seconds;
-                }
-                return r;
-            },
-
-            change_total_progress_bar: function(change) {
-                var job_percent = Math.floor(100 / change.jobs.length);
-                var $bar_outter = $('<div />')
-                    .addClass('progress zuul-change-total-result');
-
-                $.each(change.jobs, function (i, job) {
-                    var result = job.result ? job.result.toLowerCase() : null;
-                    if (result === null) {
-                        result = job.url ? 'in progress' : 'queued';
-                    }
-
-                    if (result !== 'queued') {
-                        var $bar_inner = $('<div />')
-                            .addClass('progress-bar');
-
-                        switch (result) {
-                            case 'success':
-                                $bar_inner.addClass('progress-bar-success');
-                                break;
-                            case 'lost':
-                            case 'failure':
-                                $bar_inner.addClass('progress-bar-danger');
-                                break;
-                            case 'unstable':
-                                $bar_inner.addClass('progress-bar-warning');
-                                break;
-                            case 'in progress':
-                            case 'queued':
-                                break;
-                        }
-                        $bar_inner.attr('title', job.name)
-                            .css('width', job_percent + '%');
-                        $bar_outter.append($bar_inner);
-                    }
-                });
-                return $bar_outter;
-            },
-
-            change_header: function(change) {
-                var change_id = change.id || 'NA';
-                if (change_id.length === 40) {
-                    change_id = change_id.substr(0, 7);
-                }
-
-                var $change_link = $('<small />');
-                if (change.url !== null) {
-                    $change_link.append(
-                        $('<a />').attr('href', change.url).text(change.id)
-                    );
-                }
-                else {
-                    $change_link.text(change_id);
-                }
-
-                var $change_progress_row_left = $('<div />')
-                    .addClass('col-xs-3')
-                    .append($change_link);
-                var $change_progress_row_right = $('<div />')
-                    .addClass('col-xs-9')
-                    .append(zuul.format.change_total_progress_bar(change));
-
-                var $change_progress_row = $('<div />')
-                    .addClass('row')
-                    .append($change_progress_row_left)
-                    .append($change_progress_row_right);
-
-                var $project_span = $('<span />')
-                    .addClass('change_project')
-                    .text(change.project);
-
-                var $left = $('<div />')
-                    .addClass('col-xs-8')
-                    .append($project_span, $('<br />'), $change_progress_row);
-
-                var remaining_time = zuul.format.time(
-                        change.remaining_time, true);
-                var enqueue_time = zuul.format.enqueue_time(
-                        change.enqueue_time);
-                var $remaining_time = $('<small />').addClass('time')
-                    .attr('title', 'Remaining Time').html(remaining_time);
-                var $enqueue_time = $('<small />').addClass('time')
-                    .attr('title', 'Elapsed Time').html(enqueue_time);
-
-                var $right = $('<div />')
-                    .addClass('col-xs-4 text-right')
-                    .append($remaining_time, $('<br />'), $enqueue_time);
-
-                var $header = $('<div />')
-                    .addClass('row')
-                    .append($left, $right);
-                return $header;
-            },
-
-            change_list: function(jobs) {
-                var $list = $('<ul />')
-                    .addClass('list-group zuul-patchset-body');
-
-                $.each(jobs, function (i, job) {
-                    var $item = $('<li />')
-                        .addClass('list-group-item')
-                        .addClass('zuul-change-job')
-                        .append(zuul.format.job(job));
-                    $list.append($item);
-                });
-
-                return $list;
-            },
-
-            change_panel: function (change) {
-                var $header = $('<div />')
-                    .addClass('panel-heading zuul-patchset-header')
-                    .append(zuul.format.change_header(change));
-
-                var panel_id = change.id ? change.id.replace(',', '_')
-                                         : change.project.replace('/', '_') +
-                                           '-' + change.enqueue_time;
-                var $panel = $('<div />')
-                    .attr('id', panel_id)
-                    .addClass('panel panel-default zuul-change')
-                    .append($header)
-                    .append(zuul.format.change_list(change.jobs));
-
-                $header.click(zuul.toggle_patchset);
-                return $panel;
-            },
-
-            change_status_icon: function(change) {
-                var icon_name = 'green.png';
-                var icon_title = 'Succeeding';
-
-                if (change.active !== true) {
-                    // Grey icon
-                    icon_name = 'grey.png';
-                    icon_title = 'Waiting until closer to head of queue to' +
-                        ' start jobs';
-                }
-                else if (change.failing_reasons &&
-                         change.failing_reasons.length > 0) {
-                    var reason = change.failing_reasons.join(', ');
-                    icon_title = 'Failing because ' + reason;
-                    if (reason.match(/merge conflict/)) {
-                        // Black icon
-                        icon_name = 'black.png';
-                    }
-                    else {
-                        // Red icon
-                        icon_name = 'red.png';
-                    }
-                }
-
-                var $icon = $('<img />')
-                    .attr('src', 'images/' + icon_name)
-                    .attr('title', icon_title)
-                    .css('margin-top', '-6px');
-
-                return $icon;
-            },
-
-            change_with_status_tree: function(change, change_queue) {
-                var $change_row = $('<tr />');
-
-                for (var i = 0; i < change_queue._tree_columns; i++) {
-                    var $tree_cell  = $('<td />')
-                        .css('height', '100%')
-                        .css('padding', '0 0 10px 0')
-                        .css('margin', '0')
-                        .css('width', '16px')
-                        .css('min-width', '16px')
-                        .css('overflow', 'hidden')
-                        .css('vertical-align', 'top');
-
-                    if (i < change._tree.length && change._tree[i] !== null) {
-                        $tree_cell.css('background-image',
-                                       'url(\'images/line.png\')')
-                            .css('background-repeat', 'repeat-y');
-                    }
-
-                    if (i === change._tree_index) {
-                        $tree_cell.append(
-                            zuul.format.change_status_icon(change));
-                    }
-                    if (change._tree_branches.indexOf(i) !== -1) {
-                        var $image = $('<img />')
-                            .css('vertical-align', 'baseline');
-                        if (change._tree_branches.indexOf(i) ===
-                            change._tree_branches.length - 1) {
-                            // Angle line
-                            $image.attr('src', 'images/line-angle.png');
-                        }
-                        else {
-                            // T line
-                            $image.attr('src', 'images/line-t.png');
-                        }
-                        $tree_cell.append($image);
-                    }
-                    $change_row.append($tree_cell);
-                }
-
-                var change_width = 360 - 16*change_queue._tree_columns;
-                var $change_column = $('<td />')
-                    .css('width', change_width + 'px')
-                    .addClass('zuul-change-cell')
-                    .append(zuul.format.change_panel(change));
-
-                $change_row.append($change_column);
-
-                var $change_table = $('<table />')
-                    .addClass('zuul-change-box')
-                    .css('-moz-box-sizing', 'content-box')
-                    .css('box-sizing', 'content-box')
-                    .append($change_row);
-
-                return $change_table;
-            },
-
-            pipeline: function (pipeline) {
-                var count = zuul.create_tree(pipeline);
-                var $html = $('<div />')
-                    .addClass('zuul-pipeline col-md-4')
-                    .append(
-                        $('<h3 />')
-                            .css('vertical-align', 'middle')
-                            .text(pipeline.name)
-                            .append(
-                                $('<span />')
-                                    .addClass('badge pull-right')
-                                    .css('vertical-align', 'middle')
-                                    .css('margin-top', '0.5em')
-                                    .text(count)
-                            )
-                    );
-
-                if (typeof pipeline.description === 'string') {
-                    $html.append(
-                        $('<p />').append(
-                            $('<small />').text(pipeline.description)
-                        )
-                    );
-                }
-
-                $.each(pipeline.change_queues,
-                       function (queue_i, change_queue) {
-                    $.each(change_queue.heads, function (head_i, changes) {
-                        if (pipeline.change_queues.length > 1 &&
-                            head_i === 0) {
-                            var name = change_queue.name;
-                            var short_name = name;
-                            if (short_name.length > 32) {
-                                short_name = short_name.substr(0, 32) + '...';
-                            }
-                            $html.append(
-                                $('<p />')
-                                    .text('Queue: ')
-                                    .append(
-                                        $('<abbr />')
-                                            .attr('title', name)
-                                            .text(short_name)
-                                    )
-                            );
-                        }
-
-                        $.each(changes, function (change_i, change) {
-                            var $change_box =
-                                zuul.format.change_with_status_tree(
-                                    change, change_queue);
-                            $html.append($change_box);
-                            zuul.display_patchset($change_box);
-                        });
-                    });
-                });
-                return $html;
-            },
-
-            filter_form_group: function() {
-                // Update the filter form with a clear button if required
-
-                var $label = $('<label />')
-                    .addClass('control-label')
-                    .attr('for', 'filter_string')
-                    .text('Filters')
-                    .css('padding-right', '0.5em');
-
-                var $input = $('<input />')
-                    .attr('type', 'text')
-                    .attr('id', 'filter_string')
-                    .addClass('form-control')
-                    .attr('title',
-                          'project(s), pipeline(s) or review(s) comma ' +
-                          'separated')
-                    .attr('value', current_filter);
-
-                $input.change(zuul.handle_filter_change);
-
-                var $clear_icon = $('<span />')
-                    .addClass('form-control-feedback')
-                    .addClass('glyphicon glyphicon-remove-circle')
-                    .attr('id', 'filter_form_clear_box')
-                    .attr('title', 'clear filter')
-                    .css('cursor', 'pointer');
-
-                $clear_icon.click(function() {
-                    $('#filter_string').val('').change();
-                });
-
-                if (current_filter === '') {
-                    $clear_icon.hide();
-                }
-
-                var $form_group = $('<div />')
-                    .addClass('form-group has-feedback')
-                    .append($label, $input, $clear_icon);
-                return $form_group;
-            },
-
-            expand_form_group: function() {
-                var expand_by_default = (
-                    read_cookie('zuul_expand_by_default', false) === 'true');
-
-                var $checkbox = $('<input />')
-                    .attr('type', 'checkbox')
-                    .attr('id', 'expand_by_default')
-                    .prop('checked', expand_by_default)
-                    .change(zuul.handle_expand_by_default);
-
-                var $label = $('<label />')
-                    .css('padding-left', '1em')
-                    .html('Expand by default: ')
-                    .append($checkbox);
-
-                var $form_group = $('<div />')
-                    .addClass('checkbox')
-                    .append($label);
-                return $form_group;
-            },
-
-            control_form: function() {
-                // Build the filter form filling anything from cookies
-
-                var $control_form = $('<form />')
-                    .attr('role', 'form')
-                    .addClass('form-inline')
-                    .submit(zuul.handle_filter_change);
-
-                $control_form
-                    .append(zuul.format.filter_form_group())
-                    .append(zuul.format.expand_form_group());
-
-                return $control_form;
-            },
-        },
-
-        emit: function () {
-            $jq.trigger.apply($jq, arguments);
-            return this;
-        },
-        on: function () {
-            $jq.on.apply($jq, arguments);
-            return this;
-        },
-        one: function () {
-            $jq.one.apply($jq, arguments);
-            return this;
-        },
-
-        toggle_patchset: function(e) {
-            // Toggle showing/hiding the patchset when the header is clicked
-            // Grab the patchset panel
-            var $panel = $(e.target).parents('.zuul-change');
-            var $body = $panel.children('.zuul-patchset-body');
-            $body.toggle(200);
-            var collapsed_index = zuul.collapsed_exceptions.indexOf(
-                $panel.attr('id'));
-            if (collapsed_index === -1 ) {
-                // Currently not an exception, add it to list
-                zuul.collapsed_exceptions.push($panel.attr('id'));
-            }
-            else {
-                // Currently an except, remove from exceptions
-                zuul.collapsed_exceptions.splice(collapsed_index, 1);
-            }
-        },
-
-        display_patchset: function($change_box, animate) {
-            // Determine if to show or hide the patchset and/or the results
-            // when loaded
-
-            // See if we should hide the body/results
-            var $panel = $change_box.find('.zuul-change');
-            var panel_change = $panel.attr('id');
-            var $body = $panel.children('.zuul-patchset-body');
-            var expand_by_default = $('#expand_by_default').prop('checked');
-
-            var collapsed_index = zuul.collapsed_exceptions.indexOf(panel_change);
-
-            if (expand_by_default && collapsed_index === -1 ||
-                !expand_by_default && collapsed_index !== -1) {
-                // Expand by default, or is an exception
-                $body.show(animate);
-            }
-            else {
-                $body.hide(animate);
-            }
-
-            // Check if we should hide the whole panel
-            var panel_project = $panel.find('.change_project').text()
-                .toLowerCase();
-
-
-            var panel_pipeline = $change_box.parents('.zuul-pipeline')
-                .children('h3').html().toLowerCase();
-
-            if (current_filter !== '') {
-                var show_panel = false;
-                var filter = current_filter.trim().split(/[\s,]+/);
-                $.each(filter, function(index, f_val) {
-                    if (f_val !== '') {
-                        f_val = f_val.toLowerCase();
-                        if (panel_project.indexOf(f_val) !== -1 ||
-                            panel_pipeline.indexOf(f_val) !== -1 ||
-                            panel_change.indexOf(f_val) !== -1) {
-                            show_panel = true;
-                        }
-                    }
-                });
-                if (show_panel === true) {
-                    $change_box.show(animate);
-                }
-                else {
-                    $change_box.hide(animate);
-                }
-            }
-            else {
-                $change_box.show(animate);
-            }
-        },
-
-        handle_filter_change: function() {
-            // Update the filter and save it to a cookie
-            current_filter = $('#filter_string').val();
-            set_cookie('zuul_filter_string', current_filter);
-            if (current_filter === '') {
-                $('#filter_form_clear_box').hide();
-            }
-            else {
-                $('#filter_form_clear_box').show();
-            }
-
-            $('.zuul-change-box').each(function(index, obj) {
-                var $change_box = $(obj);
-                zuul.display_patchset($change_box, 200);
-            });
-            return false;
-        },
-
-        handle_expand_by_default: function(e) {
-            // Handle toggling expand by default
-            set_cookie('zuul_expand_by_default', e.target.checked);
-            zuul.collapsed_exceptions = [];
-            $('.zuul-change-box').each(function(index, obj) {
-                var $change_box = $(obj);
-                zuul.display_patchset($change_box, 200);
-            });
-        },
-
-        create_tree: function(pipeline) {
-            var count = 0;
-            var pipeline_max_tree_columns = 1;
-            $.each(pipeline.change_queues, function(change_queue_i,
-                                                       change_queue) {
-                var tree = [];
-                var max_tree_columns = 1;
-                var changes = [];
-                var last_tree_length = 0;
-                $.each(change_queue.heads, function(head_i, head) {
-                    $.each(head, function(change_i, change) {
-                        changes[change.id] = change;
-                        change._tree_position = change_i;
-                    });
-                });
-                $.each(change_queue.heads, function(head_i, head) {
-                    $.each(head, function(change_i, change) {
-                        count += 1;
-                        var idx = tree.indexOf(change.id);
-                        if (idx > -1) {
-                            change._tree_index = idx;
-                            // remove...
-                            tree[idx] = null;
-                            while (tree[tree.length - 1] === null) {
-                                tree.pop();
-                            }
-                        } else {
-                            change._tree_index = 0;
-                        }
-                        change._tree_branches = [];
-                        change._tree = [];
-                        if (typeof(change.items_behind) === 'undefined') {
-                            change.items_behind = [];
-                        }
-                        change.items_behind.sort(function(a, b) {
-                            return (changes[b]._tree_position -
-                                    changes[a]._tree_position);
-                        });
-                        $.each(change.items_behind, function(i, id) {
-                            tree.push(id);
-                            if (tree.length>last_tree_length &&
-                                last_tree_length > 0) {
-                                change._tree_branches.push(
-                                    tree.length - 1);
-                            }
-                        });
-                        if (tree.length > max_tree_columns) {
-                            max_tree_columns = tree.length;
-                        }
-                        if (tree.length > pipeline_max_tree_columns) {
-                            pipeline_max_tree_columns = tree.length;
-                        }
-                        change._tree = tree.slice(0);  // make a copy
-                        last_tree_length = tree.length;
-                    });
-                });
-                change_queue._tree_columns = max_tree_columns;
-            });
-            pipeline._tree_columns = pipeline_max_tree_columns;
-            return count;
-        },
-    };
-
-    current_filter = read_cookie('zuul_filter_string', current_filter);
-
-    $jq = $(zuul);
-
-    $jq.on('update-start', function () {
-        $container.addClass('zuul-container-loading');
-        $indicator.addClass('zuul-spinner-on');
-    });
-
-    $jq.on('update-end', function () {
-        $container.removeClass('zuul-container-loading');
-        setTimeout(function () {
-            $indicator.removeClass('zuul-spinner-on');
-        }, 500);
-    });
-
-    $jq.one('update-end', function () {
-        // Do this asynchronous so that if the first update adds a message, it
-        // will not animate while we fade in the content. Instead it simply
-        // appears with the rest of the content.
-        setTimeout(function () {
-            // Fade in the content
-            $container.addClass('zuul-container-ready');
-        });
-    });
-
-    $(function ($) {
-        $msg = $('<div />').addClass('alert').hide();
-        $indicator = $('<button class="btn pull-right zuul-spinner">' +
-                       'updating ' +
-                       '<span class="glyphicon glyphicon-refresh"></span>' +
-                       '</button>');
-        $queueInfo = $('<p>Queue lengths: <span>0</span> events, ' +
-                       '<span>0</span> results.</p>');
-        $queueEventsNum = $queueInfo.find('span').eq(0);
-        $queueResultsNum = $queueEventsNum.next();
-
-        var $control_form = zuul.format.control_form();
-
-        $pipelines = $('<div class="row"></div>');
-        var $zuulVersion = $('<p>Zuul version: <span id="zuul-version-span">' +
-                         '</span></p>');
-        var $lastReconf = $('<p>Last reconfigured: ' +
-                        '<span id="last-reconfigured-span"></span></p>');
-
-        $container = $('#zuul-container').append($msg, $indicator,
-                                                 $queueInfo, $control_form,
-                                                 $pipelines, $zuulVersion,
-                                                 $lastReconf);
-
-        zuul.schedule();
-
-        $(document).on({
-            'show.visibility': function () {
-                zuul.enabled = true;
-                zuul.update();
-            },
-            'hide.visibility': function () {
-                zuul.enabled = false;
-            }
-        });
-    });
-}(jQuery));
diff --git a/etc/status/public_html/index.html b/etc/status/public_html/index.html
index f2ee00d..d77470b 100644
--- a/etc/status/public_html/index.html
+++ b/etc/status/public_html/index.html
@@ -21,62 +21,20 @@
     <title>Zuul Status</title>
     <link rel="stylesheet" href="bootstrap/css/bootstrap.min.css">
     <link rel="stylesheet" href="bootstrap/css/bootstrap-responsive.min.css">
-    <style>
-        .zuul-change {
-            margin-bottom: 10px;
-        }
-
-        .zuul-change-id {
-            float: right;
-        }
-
-        .zuul-job-result {
-            float: right;
-            width: 70px;
-            height: 15px;
-            margin: 0;
-        }
-
-        .zuul-change-total-result {
-            height: 10px;
-            width: 100px;
-            margin: 5px 0 0 0;
-        }
-
-        .zuul-spinner,
-        .zuul-spinner:hover {
-            opacity: 0;
-            transition: opacity 0.5s ease-out;
-            cursor: default;
-            pointer-events: none;
-        }
-
-        .zuul-spinner-on,
-        .zuul-spinner-on:hover {
-            opacity: 1;
-            transition-duration: 0.2s;
-            cursor: progress;
-        }
-
-        .zuul-change-cell {
-            padding-left: 5px;
-        }
-
-        .zuul-change-job {
-            padding: 5px 10px;
-        }
-
-    </style>
+    <link rel="stylesheet" href="styles/zuul.css" />
 </head>
 <body>
-    <div class="container">
-        <h1>Zuul Status</h1>
-        <p>Real-time status monitor of Zuul, the pipeline manager between Gerrit and Workers.</p>
 
-        <div class="zuul-container" id="zuul-container"></div>
-    </div>
+    <div id="zuul_container"></div>
+
     <script src="jquery.min.js"></script>
     <script src="jquery-visibility.min.js"></script>
-    <script src="app.js"></script>
+    <script src="jquery.graphite.js"></script>
+    <script src="jquery.zuul.js"></script>
+    <script src="zuul.app.js"></script>
+    <script>
+        zuul_build_dom(jQuery, '#zuul_container');
+        zuul_start(jQuery);
+    </script>
 </body>
 </html>
diff --git a/etc/status/public_html/jquery.zuul.js b/etc/status/public_html/jquery.zuul.js
new file mode 100644
index 0000000..9a3c1fa
--- /dev/null
+++ b/etc/status/public_html/jquery.zuul.js
@@ -0,0 +1,877 @@
+// jquery plugin for Zuul status page
+//
+// Copyright 2012 OpenStack Foundation
+// Copyright 2013 Timo Tijhof
+// Copyright 2013 Wikimedia Foundation
+// Copyright 2014 Rackspace Australia
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+'use strict';
+
+(function ($) {
+    function set_cookie(name, value) {
+        document.cookie = name + '=' + value + '; path=/';
+    }
+
+    function read_cookie(name, default_value) {
+        var nameEQ = name + '=';
+        var ca = document.cookie.split(';');
+        for(var i=0;i < ca.length;i++) {
+            var c = ca[i];
+            while (c.charAt(0) === ' ') {
+                c = c.substring(1, c.length);
+            }
+            if (c.indexOf(nameEQ) === 0) {
+                return c.substring(nameEQ.length, c.length);
+            }
+        }
+        return default_value;
+    }
+
+    $.zuul = function(options) {
+        var options = $.extend({
+            'enabled': true,
+            'graphite_url': '',
+            'source': 'status.json',
+            'msg_id': '#zuul_msg',
+            'pipelines_id': '#zuul_pipelines',
+            'queue_events_num': '#zuul_queue_events_num',
+            'queue_results_num': '#zuul_queue_results_num',
+        }, options);
+
+        var collapsed_exceptions = [];
+        var current_filter = read_cookie('zuul_filter_string', current_filter);
+        var $jq;
+
+        var xhr,
+            zuul_graph_update_count = 0,
+            zuul_sparkline_urls = {};
+
+        function get_sparkline_url(pipeline_name) {
+            if (options.graphite_url !== '') {
+                if (!(pipeline_name in zuul_sparkline_urls)) {
+                    zuul_sparkline_urls[pipeline_name] = $.fn.graphite
+                        .geturl({
+                        url: options.graphite_url,
+                        from: "-8hours",
+                        width: 100,
+                        height: 26,
+                        margin: 0,
+                        hideLegend: true,
+                        hideAxes: true,
+                        hideGrid: true,
+                        target: [
+                            "color(stats.gauges.zuul.pipeline." + pipeline_name
+                            + ".current_changes, '6b8182')"
+                        ]
+                    });
+                }
+                return zuul_sparkline_urls[pipeline_name];
+            }
+            return false;
+        }
+
+        var format = {
+            job: function(job) {
+                var $job_line = $('<span />');
+
+                if (job.url !== null) {
+                    $job_line.append(
+                        $('<a />')
+                            .addClass('zuul-job-name')
+                            .attr('href', job.url)
+                            .text(job.name)
+                    );
+                }
+                else {
+                    $job_line.append(
+                        $('<span />')
+                            .addClass('zuul-job-name')
+                            .text(job.name)
+                    );
+                }
+
+                $job_line.append(this.job_status(job));
+
+                if (job.voting === false) {
+                    $job_line.append(
+                        $(' <small />')
+                            .addClass('zuul-non-voting-desc')
+                            .text(' (non-voting)')
+                    );
+                }
+
+                return $job_line;
+            },
+
+            job_status: function(job) {
+                var result = job.result ? job.result.toLowerCase() : null;
+                if (result === null) {
+                    result = job.url ? 'in progress' : 'queued';
+                }
+
+                if (result === 'in progress') {
+                    return this.job_progress_bar(job.elapsed_time,
+                                                        job.remaining_time);
+                }
+                else {
+                    return this.status_label(result);
+                }
+            },
+
+            status_label: function(result) {
+                var $status = $('<span />');
+                $status.addClass('zuul-job-result label');
+
+                switch (result) {
+                    case 'success':
+                        $status.addClass('label-success');
+                        break;
+                    case 'failure':
+                        $status.addClass('label-danger');
+                        break;
+                    case 'unstable':
+                        $status.addClass('label-warning');
+                        break;
+                    case 'in progress':
+                    case 'queued':
+                    case 'lost':
+                        $status.addClass('label-default');
+                        break;
+                }
+                $status.text(result);
+                return $status;
+            },
+
+            job_progress_bar: function(elapsed_time, remaining_time) {
+                var progress_percent = 100 * (elapsed_time / (elapsed_time +
+                                                              remaining_time));
+                var $bar_inner = $('<div />')
+                    .addClass('progress-bar')
+                    .attr('role', 'progressbar')
+                    .attr('aria-valuenow', 'progressbar')
+                    .attr('aria-valuemin', progress_percent)
+                    .attr('aria-valuemin', '0')
+                    .attr('aria-valuemax', '100')
+                    .css('width', progress_percent + '%');
+
+                var $bar_outter = $('<div />')
+                    .addClass('progress zuul-job-result')
+                    .append($bar_inner);
+
+                return $bar_outter;
+            },
+
+            enqueue_time: function(ms) {
+                // Special format case for enqueue time to add style
+                var hours = 60 * 60 * 1000;
+                var now = Date.now();
+                var delta = now - ms;
+                var status = 'text-success';
+                var text = this.time(delta, true);
+                if (delta > (4 * hours)) {
+                    status = 'text-danger';
+                } else if (delta > (2 * hours)) {
+                    status = 'text-warning';
+                }
+                return '<span class="' + status + '">' + text + '</span>';
+            },
+
+            time: function(ms, words) {
+                if (typeof(words) === 'undefined') {
+                    words = false;
+                }
+                var seconds = (+ms)/1000;
+                var minutes = Math.floor(seconds/60);
+                var hours = Math.floor(minutes/60);
+                seconds = Math.floor(seconds % 60);
+                minutes = Math.floor(minutes % 60);
+                var r = '';
+                if (words) {
+                    if (hours) {
+                        r += hours;
+                        r += ' hr ';
+                    }
+                    r += minutes + ' min';
+                } else {
+                    if (hours < 10) {
+                        r += '0';
+                    }
+                    r += hours + ':';
+                    if (minutes < 10) {
+                        r += '0';
+                    }
+                    r += minutes + ':';
+                    if (seconds < 10) {
+                        r += '0';
+                    }
+                    r += seconds;
+                }
+                return r;
+            },
+
+            change_total_progress_bar: function(change) {
+                var job_percent = Math.floor(100 / change.jobs.length);
+                var $bar_outter = $('<div />')
+                    .addClass('progress zuul-change-total-result');
+
+                $.each(change.jobs, function (i, job) {
+                    var result = job.result ? job.result.toLowerCase() : null;
+                    if (result === null) {
+                        result = job.url ? 'in progress' : 'queued';
+                    }
+
+                    if (result !== 'queued') {
+                        var $bar_inner = $('<div />')
+                            .addClass('progress-bar');
+
+                        switch (result) {
+                            case 'success':
+                                $bar_inner.addClass('progress-bar-success');
+                                break;
+                            case 'lost':
+                            case 'failure':
+                                $bar_inner.addClass('progress-bar-danger');
+                                break;
+                            case 'unstable':
+                                $bar_inner.addClass('progress-bar-warning');
+                                break;
+                            case 'in progress':
+                            case 'queued':
+                                break;
+                        }
+                        $bar_inner.attr('title', job.name)
+                            .css('width', job_percent + '%');
+                        $bar_outter.append($bar_inner);
+                    }
+                });
+                return $bar_outter;
+            },
+
+            change_header: function(change) {
+                var change_id = change.id || 'NA';
+                if (change_id.length === 40) {
+                    change_id = change_id.substr(0, 7);
+                }
+
+                var $change_link = $('<small />');
+                if (change.url !== null) {
+                    $change_link.append(
+                        $('<a />').attr('href', change.url).text(change.id)
+                    );
+                }
+                else {
+                    $change_link.text(change_id);
+                }
+
+                var $change_progress_row_left = $('<div />')
+                    .addClass('col-xs-3')
+                    .append($change_link);
+                var $change_progress_row_right = $('<div />')
+                    .addClass('col-xs-9')
+                    .append(this.change_total_progress_bar(change));
+
+                var $change_progress_row = $('<div />')
+                    .addClass('row')
+                    .append($change_progress_row_left)
+                    .append($change_progress_row_right);
+
+                var $project_span = $('<span />')
+                    .addClass('change_project')
+                    .text(change.project);
+
+                var $left = $('<div />')
+                    .addClass('col-xs-8')
+                    .append($project_span, $change_progress_row);
+
+                var remaining_time = this.time(
+                        change.remaining_time, true);
+                var enqueue_time = this.enqueue_time(
+                        change.enqueue_time);
+                var $remaining_time = $('<small />').addClass('time')
+                    .attr('title', 'Remaining Time').html(remaining_time);
+                var $enqueue_time = $('<small />').addClass('time')
+                    .attr('title', 'Elapsed Time').html(enqueue_time);
+
+                var $right = $('<div />')
+                    .addClass('col-xs-4 text-right')
+                    .append($remaining_time, $('<br />'), $enqueue_time);
+
+                var $header = $('<div />')
+                    .addClass('row')
+                    .append($left, $right);
+                return $header;
+            },
+
+            change_list: function(jobs) {
+                var format = this;
+                var $list = $('<ul />')
+                    .addClass('list-group zuul-patchset-body');
+
+                $.each(jobs, function (i, job) {
+                    var $item = $('<li />')
+                        .addClass('list-group-item')
+                        .addClass('zuul-change-job')
+                        .append(format.job(job));
+                    $list.append($item);
+                });
+
+                return $list;
+            },
+
+            change_panel: function (change) {
+                var $header = $('<div />')
+                    .addClass('panel-heading zuul-patchset-header')
+                    .append(this.change_header(change));
+
+                var panel_id = change.id ? change.id.replace(',', '_')
+                                         : change.project.replace('/', '_') +
+                                           '-' + change.enqueue_time;
+                var $panel = $('<div />')
+                    .attr('id', panel_id)
+                    .addClass('panel panel-default zuul-change')
+                    .append($header)
+                    .append(this.change_list(change.jobs));
+
+                $header.click(this.toggle_patchset);
+                return $panel;
+            },
+
+            change_status_icon: function(change) {
+                var icon_name = 'green.png';
+                var icon_title = 'Succeeding';
+
+                if (change.active !== true) {
+                    // Grey icon
+                    icon_name = 'grey.png';
+                    icon_title = 'Waiting until closer to head of queue to' +
+                        ' start jobs';
+                }
+                else if (change.failing_reasons &&
+                         change.failing_reasons.length > 0) {
+                    var reason = change.failing_reasons.join(', ');
+                    icon_title = 'Failing because ' + reason;
+                    if (reason.match(/merge conflict/)) {
+                        // Black icon
+                        icon_name = 'black.png';
+                    }
+                    else {
+                        // Red icon
+                        icon_name = 'red.png';
+                    }
+                }
+
+                var $icon = $('<img />')
+                    .attr('src', 'images/' + icon_name)
+                    .attr('title', icon_title)
+                    .css('margin-top', '-6px');
+
+                return $icon;
+            },
+
+            change_with_status_tree: function(change, change_queue) {
+                var $change_row = $('<tr />');
+
+                for (var i = 0; i < change_queue._tree_columns; i++) {
+                    var $tree_cell  = $('<td />')
+                        .css('height', '100%')
+                        .css('padding', '0 0 10px 0')
+                        .css('margin', '0')
+                        .css('width', '16px')
+                        .css('min-width', '16px')
+                        .css('overflow', 'hidden')
+                        .css('vertical-align', 'top');
+
+                    if (i < change._tree.length && change._tree[i] !== null) {
+                        $tree_cell.css('background-image',
+                                       'url(\'images/line.png\')')
+                            .css('background-repeat', 'repeat-y');
+                    }
+
+                    if (i === change._tree_index) {
+                        $tree_cell.append(
+                            this.change_status_icon(change));
+                    }
+                    if (change._tree_branches.indexOf(i) !== -1) {
+                        var $image = $('<img />')
+                            .css('vertical-align', 'baseline');
+                        if (change._tree_branches.indexOf(i) ===
+                            change._tree_branches.length - 1) {
+                            // Angle line
+                            $image.attr('src', 'images/line-angle.png');
+                        }
+                        else {
+                            // T line
+                            $image.attr('src', 'images/line-t.png');
+                        }
+                        $tree_cell.append($image);
+                    }
+                    $change_row.append($tree_cell);
+                }
+
+                var change_width = 360 - 16*change_queue._tree_columns;
+                var $change_column = $('<td />')
+                    .css('width', change_width + 'px')
+                    .addClass('zuul-change-cell')
+                    .append(this.change_panel(change));
+
+                $change_row.append($change_column);
+
+                var $change_table = $('<table />')
+                    .addClass('zuul-change-box')
+                    .css('-moz-box-sizing', 'content-box')
+                    .css('box-sizing', 'content-box')
+                    .append($change_row);
+
+                return $change_table;
+            },
+
+            pipeline_sparkline: function(pipeline_name) {
+                if (options.graphite_url !== '') {
+                    var $sparkline = $('<img />')
+                        .addClass('pull-right')
+                        .attr('src', get_sparkline_url(pipeline_name));
+                    return $sparkline;
+                }
+                return false;
+            },
+
+            pipeline_header: function(pipeline, count) {
+                // Format the pipeline name, sparkline and description
+                var $header_div = $('<div />')
+                    .addClass('zuul-pipeline-header');
+
+                var $heading = $('<h3 />')
+                    .css('vertical-align', 'middle')
+                    .text(pipeline.name)
+                    .append(
+                        $('<span />')
+                            .addClass('badge pull-right')
+                            .css('vertical-align', 'middle')
+                            .css('margin-top', '0.5em')
+                            .text(count)
+                    )
+                    .append(this.pipeline_sparkline(pipeline.name));
+
+                $header_div.append($heading);
+
+                if (typeof pipeline.description === 'string') {
+                    $header_div.append(
+                        $('<p />').append(
+                            $('<small />').text(pipeline.description)
+                        )
+                    );
+                }
+                return $header_div;
+            },
+
+            pipeline: function (pipeline, count) {
+                var format = this;
+                var $html = $('<div />')
+                    .addClass('zuul-pipeline col-md-4')
+                    .append(this.pipeline_header(pipeline, count));
+
+                $.each(pipeline.change_queues,
+                       function (queue_i, change_queue) {
+                    $.each(change_queue.heads, function (head_i, changes) {
+                        if (pipeline.change_queues.length > 1 &&
+                            head_i === 0) {
+                            var name = change_queue.name;
+                            var short_name = name;
+                            if (short_name.length > 32) {
+                                short_name = short_name.substr(0, 32) + '...';
+                            }
+                            $html.append(
+                                $('<p />')
+                                    .text('Queue: ')
+                                    .append(
+                                        $('<abbr />')
+                                            .attr('title', name)
+                                            .text(short_name)
+                                    )
+                            );
+                        }
+
+                        $.each(changes, function (change_i, change) {
+                            var $change_box =
+                                format.change_with_status_tree(
+                                    change, change_queue);
+                            $html.append($change_box);
+                            format.display_patchset($change_box);
+                        });
+                    });
+                });
+                return $html;
+            },
+
+            toggle_patchset: function(e) {
+                // Toggle showing/hiding the patchset when the header is
+                // clicked.
+
+                // Grab the patchset panel
+                var $panel = $(e.target).parents('.zuul-change');
+                var $body = $panel.children('.zuul-patchset-body');
+                $body.toggle(200);
+                var collapsed_index = collapsed_exceptions.indexOf(
+                    $panel.attr('id'));
+                if (collapsed_index === -1 ) {
+                    // Currently not an exception, add it to list
+                    collapsed_exceptions.push($panel.attr('id'));
+                }
+                else {
+                    // Currently an except, remove from exceptions
+                    collapsed_exceptions.splice(collapsed_index, 1);
+                }
+            },
+
+            display_patchset: function($change_box, animate) {
+                // Determine if to show or hide the patchset and/or the results
+                // when loaded
+
+                // See if we should hide the body/results
+                var $panel = $change_box.find('.zuul-change');
+                var panel_change = $panel.attr('id');
+                var $body = $panel.children('.zuul-patchset-body');
+                var expand_by_default = $('#expand_by_default')
+                    .prop('checked');
+
+                var collapsed_index = collapsed_exceptions
+                    .indexOf(panel_change);
+
+                if (expand_by_default && collapsed_index === -1 ||
+                    !expand_by_default && collapsed_index !== -1) {
+                    // Expand by default, or is an exception
+                    $body.show(animate);
+                }
+                else {
+                    $body.hide(animate);
+                }
+
+                // Check if we should hide the whole panel
+                var panel_project = $panel.find('.change_project').text()
+                    .toLowerCase();
+
+
+                var panel_pipeline = $change_box
+                    .parents('.zuul-pipeline')
+                    .find('.zuul-pipeline-header > h3')
+                    .html()
+                    .toLowerCase();
+
+                if (current_filter !== '') {
+                    var show_panel = false;
+                    var filter = current_filter.trim().split(/[\s,]+/);
+                    $.each(filter, function(index, f_val) {
+                        if (f_val !== '') {
+                            f_val = f_val.toLowerCase();
+                            if (panel_project.indexOf(f_val) !== -1 ||
+                                panel_pipeline.indexOf(f_val) !== -1 ||
+                                panel_change.indexOf(f_val) !== -1) {
+                                show_panel = true;
+                            }
+                        }
+                    });
+                    if (show_panel === true) {
+                        $change_box.show(animate);
+                    }
+                    else {
+                        $change_box.hide(animate);
+                    }
+                }
+                else {
+                    $change_box.show(animate);
+                }
+            },
+        };
+
+        var app = {
+            schedule: function (app) {
+                var app = app || this;
+                if (!options.enabled) {
+                    setTimeout(function() {app.schedule(app);}, 5000);
+                    return;
+                }
+                app.update().complete(function () {
+                    setTimeout(function() {app.schedule(app);}, 5000);
+                });
+
+                /* Only update graphs every minute */
+                if (zuul_graph_update_count > 11) {
+                    zuul_graph_update_count = 0;
+                    zuul.update_sparklines();
+                }
+            },
+
+            /** @return {jQuery.Promise} */
+            update: function () {
+                // Cancel the previous update if it hasn't completed yet.
+                if (xhr) {
+                    xhr.abort();
+                }
+
+                this.emit('update-start');
+                var app = this;
+
+                var $msg = $(options.msg_id)
+                xhr = $.getJSON(options.source)
+                    .done(function (data) {
+                        if ('message' in data) {
+                            $msg.removeClass('alert-danger')
+                                .addClass('alert-info')
+                                .text(data.message)
+                                .show();
+                        } else {
+                            $msg.empty()
+                                .hide();
+                        }
+
+                        if ('zuul_version' in data) {
+                            $('#zuul-version-span').text(data.zuul_version);
+                        }
+                        if ('last_reconfigured' in data) {
+                            var last_reconfigured =
+                                new Date(data.last_reconfigured);
+                            $('#last-reconfigured-span').text(
+                                last_reconfigured.toString());
+                        }
+
+                        var $pipelines = $(options.pipelines_id);
+                        $pipelines.html('');
+                        $.each(data.pipelines, function (i, pipeline) {
+                            var count = app.create_tree(pipeline);
+                            $pipelines.append(
+                                format.pipeline(pipeline, count));
+                        });
+
+                        $(options.queue_events_num).text(
+                            data.trigger_event_queue ?
+                                data.trigger_event_queue.length : '0'
+                        );
+                        $(options.queue_results_num).text(
+                            data.result_event_queue ?
+                                data.result_event_queue.length : '0'
+                        );
+                    })
+                    .fail(function (err, jqXHR, errMsg) {
+                        $msg.text(source + ': ' + errMsg).show();
+                        $msg.removeClass('zuul-msg-wrap-off');
+                    })
+                    .complete(function () {
+                        xhr = undefined;
+                        app.emit('update-end');
+                    });
+
+                return xhr;
+            },
+
+            update_sparklines: function() {
+                $.each(zuul_sparkline_urls, function(name, url) {
+                    var newimg = new Image();
+                    var parts = url.split('#');
+                    newimg.src = parts[0] + '#' + new Date().getTime();
+                    $(newimg).load(function (x) {
+                        zuul_sparkline_urls[name] = newimg.src;
+                    });
+                });
+            },
+
+            emit: function () {
+                $jq.trigger.apply($jq, arguments);
+                return this;
+            },
+            on: function () {
+                $jq.on.apply($jq, arguments);
+                return this;
+            },
+            one: function () {
+                $jq.one.apply($jq, arguments);
+                return this;
+            },
+
+            control_form: function() {
+                // Build the filter form filling anything from cookies
+
+                var $control_form = $('<form />')
+                    .attr('role', 'form')
+                    .addClass('form-inline')
+                    .submit(this.handle_filter_change);
+
+                $control_form
+                    .append(this.filter_form_group())
+                    .append(this.expand_form_group());
+
+                return $control_form;
+            },
+
+            filter_form_group: function() {
+                // Update the filter form with a clear button if required
+
+                var $label = $('<label />')
+                    .addClass('control-label')
+                    .attr('for', 'filter_string')
+                    .text('Filters')
+                    .css('padding-right', '0.5em');
+
+                var $input = $('<input />')
+                    .attr('type', 'text')
+                    .attr('id', 'filter_string')
+                    .addClass('form-control')
+                    .attr('title',
+                          'project(s), pipeline(s) or review(s) comma ' +
+                          'separated')
+                    .attr('value', current_filter);
+
+                $input.change(this.handle_filter_change);
+
+                var $clear_icon = $('<span />')
+                    .addClass('form-control-feedback')
+                    .addClass('glyphicon glyphicon-remove-circle')
+                    .attr('id', 'filter_form_clear_box')
+                    .attr('title', 'clear filter')
+                    .css('cursor', 'pointer');
+
+                $clear_icon.click(function() {
+                    $('#filter_string').val('').change();
+                });
+
+                if (current_filter === '') {
+                    $clear_icon.hide();
+                }
+
+                var $form_group = $('<div />')
+                    .addClass('form-group has-feedback')
+                    .append($label, $input, $clear_icon);
+                return $form_group;
+            },
+
+            expand_form_group: function() {
+                var expand_by_default = (
+                    read_cookie('zuul_expand_by_default', false) === 'true');
+
+                var $checkbox = $('<input />')
+                    .attr('type', 'checkbox')
+                    .attr('id', 'expand_by_default')
+                    .prop('checked', expand_by_default)
+                    .change(this.handle_expand_by_default);
+
+                var $label = $('<label />')
+                    .css('padding-left', '1em')
+                    .html('Expand by default: ')
+                    .append($checkbox);
+
+                var $form_group = $('<div />')
+                    .addClass('checkbox')
+                    .append($label);
+                return $form_group;
+            },
+
+            handle_filter_change: function() {
+                // Update the filter and save it to a cookie
+                current_filter = $('#filter_string').val();
+                set_cookie('zuul_filter_string', current_filter);
+                if (current_filter === '') {
+                    $('#filter_form_clear_box').hide();
+                }
+                else {
+                    $('#filter_form_clear_box').show();
+                }
+
+                $('.zuul-change-box').each(function(index, obj) {
+                    var $change_box = $(obj);
+                    format.display_patchset($change_box, 200);
+                });
+                return false;
+            },
+
+            handle_expand_by_default: function(e) {
+                // Handle toggling expand by default
+                set_cookie('zuul_expand_by_default', e.target.checked);
+                collapsed_exceptions = [];
+                $('.zuul-change-box').each(function(index, obj) {
+                    var $change_box = $(obj);
+                    format.display_patchset($change_box, 200);
+                });
+            },
+
+            create_tree: function(pipeline) {
+                var count = 0;
+                var pipeline_max_tree_columns = 1;
+                $.each(pipeline.change_queues, function(change_queue_i,
+                                                           change_queue) {
+                    var tree = [];
+                    var max_tree_columns = 1;
+                    var changes = [];
+                    var last_tree_length = 0;
+                    $.each(change_queue.heads, function(head_i, head) {
+                        $.each(head, function(change_i, change) {
+                            changes[change.id] = change;
+                            change._tree_position = change_i;
+                        });
+                    });
+                    $.each(change_queue.heads, function(head_i, head) {
+                        $.each(head, function(change_i, change) {
+                            count += 1;
+                            var idx = tree.indexOf(change.id);
+                            if (idx > -1) {
+                                change._tree_index = idx;
+                                // remove...
+                                tree[idx] = null;
+                                while (tree[tree.length - 1] === null) {
+                                    tree.pop();
+                                }
+                            } else {
+                                change._tree_index = 0;
+                            }
+                            change._tree_branches = [];
+                            change._tree = [];
+                            if (typeof(change.items_behind) === 'undefined') {
+                                change.items_behind = [];
+                            }
+                            change.items_behind.sort(function(a, b) {
+                                return (changes[b]._tree_position -
+                                        changes[a]._tree_position);
+                            });
+                            $.each(change.items_behind, function(i, id) {
+                                tree.push(id);
+                                if (tree.length>last_tree_length &&
+                                    last_tree_length > 0) {
+                                    change._tree_branches.push(
+                                        tree.length - 1);
+                                }
+                            });
+                            if (tree.length > max_tree_columns) {
+                                max_tree_columns = tree.length;
+                            }
+                            if (tree.length > pipeline_max_tree_columns) {
+                                pipeline_max_tree_columns = tree.length;
+                            }
+                            change._tree = tree.slice(0);  // make a copy
+                            last_tree_length = tree.length;
+                        });
+                    });
+                    change_queue._tree_columns = max_tree_columns;
+                });
+                pipeline._tree_columns = pipeline_max_tree_columns;
+                return count;
+            },
+        };
+
+        $jq = $(app);
+        return {
+            options: options,
+            format: format,
+            app: app,
+            jq: $jq
+        };
+    }
+}(jQuery));
diff --git a/etc/status/public_html/styles/zuul.css b/etc/status/public_html/styles/zuul.css
new file mode 100644
index 0000000..e833f4b
--- /dev/null
+++ b/etc/status/public_html/styles/zuul.css
@@ -0,0 +1,56 @@
+.zuul-change {
+    margin-bottom: 10px;
+}
+
+.zuul-change-id {
+    float: right;
+}
+
+.zuul-job-result {
+    float: right;
+    width: 70px;
+    height: 15px;
+    margin: 2px 0 0 0;
+}
+
+.zuul-change-total-result {
+    height: 10px;
+    width: 100px;
+    margin: 5px 0 0 0;
+}
+
+.zuul-spinner,
+.zuul-spinner:hover {
+    opacity: 0;
+    transition: opacity 0.5s ease-out;
+    cursor: default;
+    pointer-events: none;
+}
+
+.zuul-spinner-on,
+.zuul-spinner-on:hover {
+    opacity: 1;
+    transition-duration: 0.2s;
+    cursor: progress;
+}
+
+.zuul-change-cell {
+    padding-left: 5px;
+}
+
+.zuul-change-job {
+    padding: 2px 8px;
+}
+
+.zuul-job-name {
+    font-size: small;
+}
+
+.zuul-non-voting-desc {
+    font-size: smaller;
+}
+
+.zuul-patchset-header {
+    font-size: small;
+    padding: 8px 12px;
+}
\ No newline at end of file
diff --git a/etc/status/public_html/zuul.app.js b/etc/status/public_html/zuul.app.js
new file mode 100644
index 0000000..6f87a92
--- /dev/null
+++ b/etc/status/public_html/zuul.app.js
@@ -0,0 +1,97 @@
+// Client script for Zuul status page
+//
+// Copyright 2013 OpenStack Foundation
+// Copyright 2013 Timo Tijhof
+// Copyright 2013 Wikimedia Foundation
+// Copyright 2014 Rackspace Australia
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+
+function zuul_build_dom($, container) {
+    // Build a default-looking DOM
+    default_layout = '<div class="container">'
+        + '<h1>Zuul Status</h1>'
+        + '<p>Real-time status monitor of Zuul, the pipeline manager between Gerrit and Workers.</p>'
+        + '<div class="zuul-container" id="zuul-container">'
+        + '<div style="display: none;" class="alert" id="zuul_msg"></div>'
+        + '<button class="btn pull-right zuul-spinner">updating <span class="glyphicon glyphicon-refresh"></span></button>'
+        + '<p>Queue lengths: <span id="zuul_queue_events_num">0</span> events, <span id="zuul_queue_results_num">0</span> results.</p>'
+        + '<div id="zuul_controls"></div>'
+        + '<div id="zuul_pipelines" class="row"></div>'
+        + '<p>Zuul version: <span id="zuul-version-span"></span></p>'
+        + '<p>Last reconfigured: <span id="last-reconfigured-span"></span></p>'
+        + '</div></div>';
+
+    $(function ($) {
+        // DOM ready
+        $container = $(container);
+        $container.html(default_layout);
+    });
+}
+
+function zuul_start($) {
+    // Start the zuul app (expects default dom)
+
+    var $container, $indicator;
+    var demo = location.search.match(/[?&]demo=([^?&]*)/),
+        source_url = location.search.match(/[?&]source_url=([^?&]*)/),
+        source = demo ? './status-' + (demo[1] || 'basic') + '.json-sample' :
+            'status.json';
+    source = source_url ? source_url[1] : source;
+
+    var zuul = $.zuul({
+        source: source,
+        //graphite_url: 'http://graphite.openstack.org/render/'
+    });
+
+    zuul.jq.on('update-start', function () {
+        $container.addClass('zuul-container-loading');
+        $indicator.addClass('zuul-spinner-on');
+    });
+
+    zuul.jq.on('update-end', function () {
+        $container.removeClass('zuul-container-loading');
+        setTimeout(function () {
+            $indicator.removeClass('zuul-spinner-on');
+        }, 500);
+    });
+
+    zuul.jq.one('update-end', function () {
+        // Do this asynchronous so that if the first update adds a
+        // message, it will not animate while we fade in the content.
+        // Instead it simply appears with the rest of the content.
+        setTimeout(function () {
+            // Fade in the content
+            $container.addClass('zuul-container-ready');
+        });
+    });
+
+    $(function ($) {
+        // DOM ready
+        $container = $('#zuul-container');
+        $indicator = $('#zuul-spinner');
+        $('#zuul_controls').append(zuul.app.control_form());
+
+        zuul.app.schedule();
+
+        $(document).on({
+            'show.visibility': function () {
+                zuul.options.enabled = true;
+                zuul.app.update();
+            },
+            'hide.visibility': function () {
+                zuul.options.enabled = false;
+            }
+        });
+    });
+}
\ No newline at end of file
diff --git a/tests/base.py b/tests/base.py
new file mode 100755
index 0000000..01b42f2
--- /dev/null
+++ b/tests/base.py
@@ -0,0 +1,1204 @@
+#!/usr/bin/env python
+
+# Copyright 2012 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import ConfigParser
+import gc
+import hashlib
+import json
+import logging
+import os
+import pprint
+import Queue
+import random
+import re
+import select
+import shutil
+import socket
+import string
+import subprocess
+import swiftclient
+import threading
+import time
+import urllib2
+
+import git
+import gear
+import fixtures
+import six.moves.urllib.parse as urlparse
+import statsd
+import testtools
+
+import zuul.scheduler
+import zuul.webapp
+import zuul.rpclistener
+import zuul.launcher.gearman
+import zuul.lib.swift
+import zuul.merger.server
+import zuul.merger.client
+import zuul.reporter.gerrit
+import zuul.reporter.smtp
+import zuul.trigger.gerrit
+import zuul.trigger.timer
+
+FIXTURE_DIR = os.path.join(os.path.dirname(__file__),
+                           'fixtures')
+
+logging.basicConfig(level=logging.DEBUG,
+                    format='%(asctime)s %(name)-32s '
+                    '%(levelname)-8s %(message)s')
+
+
+def repack_repo(path):
+    cmd = ['git', '--git-dir=%s/.git' % path, 'repack', '-afd']
+    output = subprocess.Popen(cmd, close_fds=True,
+                              stdout=subprocess.PIPE,
+                              stderr=subprocess.PIPE)
+    out = output.communicate()
+    if output.returncode:
+        raise Exception("git repack returned %d" % output.returncode)
+    return out
+
+
+def random_sha1():
+    return hashlib.sha1(str(random.random())).hexdigest()
+
+
+class ChangeReference(git.Reference):
+    _common_path_default = "refs/changes"
+    _points_to_commits_only = True
+
+
+class FakeChange(object):
+    categories = {'APRV': ('Approved', -1, 1),
+                  'CRVW': ('Code-Review', -2, 2),
+                  'VRFY': ('Verified', -2, 2)}
+
+    def __init__(self, gerrit, number, project, branch, subject,
+                 status='NEW', upstream_root=None):
+        self.gerrit = gerrit
+        self.reported = 0
+        self.queried = 0
+        self.patchsets = []
+        self.number = number
+        self.project = project
+        self.branch = branch
+        self.subject = subject
+        self.latest_patchset = 0
+        self.depends_on_change = None
+        self.needed_by_changes = []
+        self.fail_merge = False
+        self.messages = []
+        self.data = {
+            'branch': branch,
+            'comments': [],
+            'commitMessage': subject,
+            'createdOn': time.time(),
+            'id': 'I' + random_sha1(),
+            'lastUpdated': time.time(),
+            'number': str(number),
+            'open': status == 'NEW',
+            'owner': {'email': 'user@example.com',
+                      'name': 'User Name',
+                      'username': 'username'},
+            'patchSets': self.patchsets,
+            'project': project,
+            'status': status,
+            'subject': subject,
+            'submitRecords': [],
+            'url': 'https://hostname/%s' % number}
+
+        self.upstream_root = upstream_root
+        self.addPatchset()
+        self.data['submitRecords'] = self.getSubmitRecords()
+        self.open = status == 'NEW'
+
+    def add_fake_change_to_repo(self, msg, fn, large):
+        path = os.path.join(self.upstream_root, self.project)
+        repo = git.Repo(path)
+        ref = ChangeReference.create(repo, '1/%s/%s' % (self.number,
+                                                        self.latest_patchset),
+                                     'refs/tags/init')
+        repo.head.reference = ref
+        repo.head.reset(index=True, working_tree=True)
+        repo.git.clean('-x', '-f', '-d')
+
+        path = os.path.join(self.upstream_root, self.project)
+        if not large:
+            fn = os.path.join(path, fn)
+            f = open(fn, 'w')
+            f.write("test %s %s %s\n" %
+                    (self.branch, self.number, self.latest_patchset))
+            f.close()
+            repo.index.add([fn])
+        else:
+            for fni in range(100):
+                fn = os.path.join(path, str(fni))
+                f = open(fn, 'w')
+                for ci in range(4096):
+                    f.write(random.choice(string.printable))
+                f.close()
+                repo.index.add([fn])
+
+        r = repo.index.commit(msg)
+        repo.head.reference = 'master'
+        repo.head.reset(index=True, working_tree=True)
+        repo.git.clean('-x', '-f', '-d')
+        repo.heads['master'].checkout()
+        return r
+
+    def addPatchset(self, files=[], large=False):
+        self.latest_patchset += 1
+        if files:
+            fn = files[0]
+        else:
+            fn = '%s-%s' % (self.branch, self.number)
+        msg = self.subject + '-' + str(self.latest_patchset)
+        c = self.add_fake_change_to_repo(msg, fn, large)
+        ps_files = [{'file': '/COMMIT_MSG',
+                     'type': 'ADDED'},
+                    {'file': 'README',
+                     'type': 'MODIFIED'}]
+        for f in files:
+            ps_files.append({'file': f, 'type': 'ADDED'})
+        d = {'approvals': [],
+             'createdOn': time.time(),
+             'files': ps_files,
+             'number': str(self.latest_patchset),
+             'ref': 'refs/changes/1/%s/%s' % (self.number,
+                                              self.latest_patchset),
+             'revision': c.hexsha,
+             'uploader': {'email': 'user@example.com',
+                          'name': 'User name',
+                          'username': 'user'}}
+        self.data['currentPatchSet'] = d
+        self.patchsets.append(d)
+        self.data['submitRecords'] = self.getSubmitRecords()
+
+    def getPatchsetCreatedEvent(self, patchset):
+        event = {"type": "patchset-created",
+                 "change": {"project": self.project,
+                            "branch": self.branch,
+                            "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
+                            "number": str(self.number),
+                            "subject": self.subject,
+                            "owner": {"name": "User Name"},
+                            "url": "https://hostname/3"},
+                 "patchSet": self.patchsets[patchset - 1],
+                 "uploader": {"name": "User Name"}}
+        return event
+
+    def getChangeRestoredEvent(self):
+        event = {"type": "change-restored",
+                 "change": {"project": self.project,
+                            "branch": self.branch,
+                            "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
+                            "number": str(self.number),
+                            "subject": self.subject,
+                            "owner": {"name": "User Name"},
+                            "url": "https://hostname/3"},
+                 "restorer": {"name": "User Name"},
+                 "reason": ""}
+        return event
+
+    def getChangeCommentEvent(self, patchset):
+        event = {"type": "comment-added",
+                 "change": {"project": self.project,
+                            "branch": self.branch,
+                            "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
+                            "number": str(self.number),
+                            "subject": self.subject,
+                            "owner": {"name": "User Name"},
+                            "url": "https://hostname/3"},
+                 "patchSet": self.patchsets[patchset - 1],
+                 "author": {"name": "User Name"},
+                 "approvals": [{"type": "Code-Review",
+                                "description": "Code-Review",
+                                "value": "0"}],
+                 "comment": "This is a comment"}
+        return event
+
+    def addApproval(self, category, value, username='jenkins',
+                    granted_on=None):
+        if not granted_on:
+            granted_on = time.time()
+        approval = {'description': self.categories[category][0],
+                    'type': category,
+                    'value': str(value),
+                    'by': {
+                        'username': username,
+                        'email': username + '@example.com',
+                    },
+                    'grantedOn': int(granted_on)}
+        for i, x in enumerate(self.patchsets[-1]['approvals'][:]):
+            if x['by']['username'] == username and x['type'] == category:
+                del self.patchsets[-1]['approvals'][i]
+        self.patchsets[-1]['approvals'].append(approval)
+        event = {'approvals': [approval],
+                 'author': {'email': 'user@example.com',
+                            'name': 'User Name',
+                            'username': 'username'},
+                 'change': {'branch': self.branch,
+                            'id': 'Iaa69c46accf97d0598111724a38250ae76a22c87',
+                            'number': str(self.number),
+                            'owner': {'email': 'user@example.com',
+                                      'name': 'User Name',
+                                      'username': 'username'},
+                            'project': self.project,
+                            'subject': self.subject,
+                            'topic': 'master',
+                            'url': 'https://hostname/459'},
+                 'comment': '',
+                 'patchSet': self.patchsets[-1],
+                 'type': 'comment-added'}
+        self.data['submitRecords'] = self.getSubmitRecords()
+        return json.loads(json.dumps(event))
+
+    def getSubmitRecords(self):
+        status = {}
+        for cat in self.categories.keys():
+            status[cat] = 0
+
+        for a in self.patchsets[-1]['approvals']:
+            cur = status[a['type']]
+            cat_min, cat_max = self.categories[a['type']][1:]
+            new = int(a['value'])
+            if new == cat_min:
+                cur = new
+            elif abs(new) > abs(cur):
+                cur = new
+            status[a['type']] = cur
+
+        labels = []
+        ok = True
+        for typ, cat in self.categories.items():
+            cur = status[typ]
+            cat_min, cat_max = cat[1:]
+            if cur == cat_min:
+                value = 'REJECT'
+                ok = False
+            elif cur == cat_max:
+                value = 'OK'
+            else:
+                value = 'NEED'
+                ok = False
+            labels.append({'label': cat[0], 'status': value})
+        if ok:
+            return [{'status': 'OK'}]
+        return [{'status': 'NOT_READY',
+                 'labels': labels}]
+
+    def setDependsOn(self, other, patchset):
+        self.depends_on_change = other
+        d = {'id': other.data['id'],
+             'number': other.data['number'],
+             'ref': other.patchsets[patchset - 1]['ref']
+             }
+        self.data['dependsOn'] = [d]
+
+        other.needed_by_changes.append(self)
+        needed = other.data.get('neededBy', [])
+        d = {'id': self.data['id'],
+             'number': self.data['number'],
+             'ref': self.patchsets[patchset - 1]['ref'],
+             'revision': self.patchsets[patchset - 1]['revision']
+             }
+        needed.append(d)
+        other.data['neededBy'] = needed
+
+    def query(self):
+        self.queried += 1
+        d = self.data.get('dependsOn')
+        if d:
+            d = d[0]
+            if (self.depends_on_change.patchsets[-1]['ref'] == d['ref']):
+                d['isCurrentPatchSet'] = True
+            else:
+                d['isCurrentPatchSet'] = False
+        return json.loads(json.dumps(self.data))
+
+    def setMerged(self):
+        if (self.depends_on_change and
+            self.depends_on_change.data['status'] != 'MERGED'):
+            return
+        if self.fail_merge:
+            return
+        self.data['status'] = 'MERGED'
+        self.open = False
+
+        path = os.path.join(self.upstream_root, self.project)
+        repo = git.Repo(path)
+        repo.heads[self.branch].commit = \
+            repo.commit(self.patchsets[-1]['revision'])
+
+    def setReported(self):
+        self.reported += 1
+
+
+class FakeGerrit(object):
+    def __init__(self, *args, **kw):
+        self.event_queue = Queue.Queue()
+        self.fixture_dir = os.path.join(FIXTURE_DIR, 'gerrit')
+        self.change_number = 0
+        self.changes = {}
+
+    def addFakeChange(self, project, branch, subject, status='NEW'):
+        self.change_number += 1
+        c = FakeChange(self, self.change_number, project, branch, subject,
+                       upstream_root=self.upstream_root,
+                       status=status)
+        self.changes[self.change_number] = c
+        return c
+
+    def addEvent(self, data):
+        return self.event_queue.put(data)
+
+    def getEvent(self):
+        return self.event_queue.get()
+
+    def eventDone(self):
+        self.event_queue.task_done()
+
+    def review(self, project, changeid, message, action):
+        number, ps = changeid.split(',')
+        change = self.changes[int(number)]
+        change.messages.append(message)
+        if 'submit' in action:
+            change.setMerged()
+        if message:
+            change.setReported()
+
+    def query(self, number):
+        change = self.changes.get(int(number))
+        if change:
+            return change.query()
+        return {}
+
+    def startWatching(self, *args, **kw):
+        pass
+
+
+class BuildHistory(object):
+    def __init__(self, **kw):
+        self.__dict__.update(kw)
+
+    def __repr__(self):
+        return ("<Completed build, result: %s name: %s #%s changes: %s>" %
+                (self.result, self.name, self.number, self.changes))
+
+
+class FakeURLOpener(object):
+    def __init__(self, upstream_root, fake_gerrit, url):
+        self.upstream_root = upstream_root
+        self.fake_gerrit = fake_gerrit
+        self.url = url
+
+    def read(self):
+        res = urlparse.urlparse(self.url)
+        path = res.path
+        project = '/'.join(path.split('/')[2:-2])
+        ret = '001e# service=git-upload-pack\n'
+        ret += ('000000a31270149696713ba7e06f1beb760f20d359c4abed HEAD\x00'
+                'multi_ack thin-pack side-band side-band-64k ofs-delta '
+                'shallow no-progress include-tag multi_ack_detailed no-done\n')
+        path = os.path.join(self.upstream_root, project)
+        repo = git.Repo(path)
+        for ref in repo.refs:
+            r = ref.object.hexsha + ' ' + ref.path + '\n'
+            ret += '%04x%s' % (len(r) + 4, r)
+        ret += '0000'
+        return ret
+
+
+class FakeGerritTrigger(zuul.trigger.gerrit.Gerrit):
+    name = 'gerrit'
+
+    def __init__(self, upstream_root, *args):
+        super(FakeGerritTrigger, self).__init__(*args)
+        self.upstream_root = upstream_root
+
+    def getGitUrl(self, project):
+        return os.path.join(self.upstream_root, project.name)
+
+
+class FakeStatsd(threading.Thread):
+    def __init__(self):
+        threading.Thread.__init__(self)
+        self.daemon = True
+        self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+        self.sock.bind(('', 0))
+        self.port = self.sock.getsockname()[1]
+        self.wake_read, self.wake_write = os.pipe()
+        self.stats = []
+
+    def run(self):
+        while True:
+            poll = select.poll()
+            poll.register(self.sock, select.POLLIN)
+            poll.register(self.wake_read, select.POLLIN)
+            ret = poll.poll()
+            for (fd, event) in ret:
+                if fd == self.sock.fileno():
+                    data = self.sock.recvfrom(1024)
+                    if not data:
+                        return
+                    self.stats.append(data[0])
+                if fd == self.wake_read:
+                    return
+
+    def stop(self):
+        os.write(self.wake_write, '1\n')
+
+
+class FakeBuild(threading.Thread):
+    log = logging.getLogger("zuul.test")
+
+    def __init__(self, worker, job, number, node):
+        threading.Thread.__init__(self)
+        self.daemon = True
+        self.worker = worker
+        self.job = job
+        self.name = job.name.split(':')[1]
+        self.number = number
+        self.node = node
+        self.parameters = json.loads(job.arguments)
+        self.unique = self.parameters['ZUUL_UUID']
+        self.wait_condition = threading.Condition()
+        self.waiting = False
+        self.aborted = False
+        self.created = time.time()
+        self.description = ''
+        self.run_error = False
+
+    def release(self):
+        self.wait_condition.acquire()
+        self.wait_condition.notify()
+        self.waiting = False
+        self.log.debug("Build %s released" % self.unique)
+        self.wait_condition.release()
+
+    def isWaiting(self):
+        self.wait_condition.acquire()
+        if self.waiting:
+            ret = True
+        else:
+            ret = False
+        self.wait_condition.release()
+        return ret
+
+    def _wait(self):
+        self.wait_condition.acquire()
+        self.waiting = True
+        self.log.debug("Build %s waiting" % self.unique)
+        self.wait_condition.wait()
+        self.wait_condition.release()
+
+    def run(self):
+        data = {
+            'url': 'https://server/job/%s/%s/' % (self.name, self.number),
+            'name': self.name,
+            'number': self.number,
+            'manager': self.worker.worker_id,
+            'worker_name': 'My Worker',
+            'worker_hostname': 'localhost',
+            'worker_ips': ['127.0.0.1', '192.168.1.1'],
+            'worker_fqdn': 'zuul.example.org',
+            'worker_program': 'FakeBuilder',
+            'worker_version': 'v1.1',
+            'worker_extra': {'something': 'else'}
+        }
+
+        self.log.debug('Running build %s' % self.unique)
+
+        self.job.sendWorkData(json.dumps(data))
+        self.log.debug('Sent WorkData packet with %s' % json.dumps(data))
+        self.job.sendWorkStatus(0, 100)
+
+        if self.worker.hold_jobs_in_build:
+            self.log.debug('Holding build %s' % self.unique)
+            self._wait()
+        self.log.debug("Build %s continuing" % self.unique)
+
+        self.worker.lock.acquire()
+
+        result = 'SUCCESS'
+        if (('ZUUL_REF' in self.parameters) and
+            self.worker.shouldFailTest(self.name,
+                                       self.parameters['ZUUL_REF'])):
+            result = 'FAILURE'
+        if self.aborted:
+            result = 'ABORTED'
+
+        if self.run_error:
+            work_fail = True
+            result = 'RUN_ERROR'
+        else:
+            data['result'] = result
+            work_fail = False
+
+        changes = None
+        if 'ZUUL_CHANGE_IDS' in self.parameters:
+            changes = self.parameters['ZUUL_CHANGE_IDS']
+
+        self.worker.build_history.append(
+            BuildHistory(name=self.name, number=self.number,
+                         result=result, changes=changes, node=self.node,
+                         uuid=self.unique, description=self.description,
+                         pipeline=self.parameters['ZUUL_PIPELINE'])
+        )
+
+        self.job.sendWorkData(json.dumps(data))
+        if work_fail:
+            self.job.sendWorkFail()
+        else:
+            self.job.sendWorkComplete(json.dumps(data))
+        del self.worker.gearman_jobs[self.job.unique]
+        self.worker.running_builds.remove(self)
+        self.worker.lock.release()
+
+
+class FakeWorker(gear.Worker):
+    def __init__(self, worker_id, test):
+        super(FakeWorker, self).__init__(worker_id)
+        self.gearman_jobs = {}
+        self.build_history = []
+        self.running_builds = []
+        self.build_counter = 0
+        self.fail_tests = {}
+        self.test = test
+
+        self.hold_jobs_in_build = False
+        self.lock = threading.Lock()
+        self.__work_thread = threading.Thread(target=self.work)
+        self.__work_thread.daemon = True
+        self.__work_thread.start()
+
+    def handleJob(self, job):
+        parts = job.name.split(":")
+        cmd = parts[0]
+        name = parts[1]
+        if len(parts) > 2:
+            node = parts[2]
+        else:
+            node = None
+        if cmd == 'build':
+            self.handleBuild(job, name, node)
+        elif cmd == 'stop':
+            self.handleStop(job, name)
+        elif cmd == 'set_description':
+            self.handleSetDescription(job, name)
+
+    def handleBuild(self, job, name, node):
+        build = FakeBuild(self, job, self.build_counter, node)
+        job.build = build
+        self.gearman_jobs[job.unique] = job
+        self.build_counter += 1
+
+        self.running_builds.append(build)
+        build.start()
+
+    def handleStop(self, job, name):
+        self.log.debug("handle stop")
+        parameters = json.loads(job.arguments)
+        name = parameters['name']
+        number = parameters['number']
+        for build in self.running_builds:
+            if build.name == name and build.number == number:
+                build.aborted = True
+                build.release()
+                job.sendWorkComplete()
+                return
+        job.sendWorkFail()
+
+    def handleSetDescription(self, job, name):
+        self.log.debug("handle set description")
+        parameters = json.loads(job.arguments)
+        name = parameters['name']
+        number = parameters['number']
+        descr = parameters['html_description']
+        for build in self.running_builds:
+            if build.name == name and build.number == number:
+                build.description = descr
+                job.sendWorkComplete()
+                return
+        for build in self.build_history:
+            if build.name == name and build.number == number:
+                build.description = descr
+                job.sendWorkComplete()
+                return
+        job.sendWorkFail()
+
+    def work(self):
+        while self.running:
+            try:
+                job = self.getJob()
+            except gear.InterruptedError:
+                continue
+            try:
+                self.handleJob(job)
+            except:
+                self.log.exception("Worker exception:")
+
+    def addFailTest(self, name, change):
+        l = self.fail_tests.get(name, [])
+        l.append(change)
+        self.fail_tests[name] = l
+
+    def shouldFailTest(self, name, ref):
+        l = self.fail_tests.get(name, [])
+        for change in l:
+            if self.test.ref_has_change(ref, change):
+                return True
+        return False
+
+    def release(self, regex=None):
+        builds = self.running_builds[:]
+        self.log.debug("releasing build %s (%s)" % (regex,
+                                                    len(self.running_builds)))
+        for build in builds:
+            if not regex or re.match(regex, build.name):
+                self.log.debug("releasing build %s" %
+                               (build.parameters['ZUUL_UUID']))
+                build.release()
+            else:
+                self.log.debug("not releasing build %s" %
+                               (build.parameters['ZUUL_UUID']))
+        self.log.debug("done releasing builds %s (%s)" %
+                       (regex, len(self.running_builds)))
+
+
+class FakeGearmanServer(gear.Server):
+    def __init__(self):
+        self.hold_jobs_in_queue = False
+        super(FakeGearmanServer, self).__init__(0)
+
+    def getJobForConnection(self, connection, peek=False):
+        for queue in [self.high_queue, self.normal_queue, self.low_queue]:
+            for job in queue:
+                if not hasattr(job, 'waiting'):
+                    if job.name.startswith('build:'):
+                        job.waiting = self.hold_jobs_in_queue
+                    else:
+                        job.waiting = False
+                if job.waiting:
+                    continue
+                if job.name in connection.functions:
+                    if not peek:
+                        queue.remove(job)
+                        connection.related_jobs[job.handle] = job
+                        job.worker_connection = connection
+                    job.running = True
+                    return job
+        return None
+
+    def release(self, regex=None):
+        released = False
+        qlen = (len(self.high_queue) + len(self.normal_queue) +
+                len(self.low_queue))
+        self.log.debug("releasing queued job %s (%s)" % (regex, qlen))
+        for job in self.getQueue():
+            cmd, name = job.name.split(':')
+            if cmd != 'build':
+                continue
+            if not regex or re.match(regex, name):
+                self.log.debug("releasing queued job %s" %
+                               job.unique)
+                job.waiting = False
+                released = True
+            else:
+                self.log.debug("not releasing queued job %s" %
+                               job.unique)
+        if released:
+            self.wakeConnections()
+        qlen = (len(self.high_queue) + len(self.normal_queue) +
+                len(self.low_queue))
+        self.log.debug("done releasing queued jobs %s (%s)" % (regex, qlen))
+
+
+class FakeSMTP(object):
+    log = logging.getLogger('zuul.FakeSMTP')
+
+    def __init__(self, messages, server, port):
+        self.server = server
+        self.port = port
+        self.messages = messages
+
+    def sendmail(self, from_email, to_email, msg):
+        self.log.info("Sending email from %s, to %s, with msg %s" % (
+                      from_email, to_email, msg))
+
+        headers = msg.split('\n\n', 1)[0]
+        body = msg.split('\n\n', 1)[1]
+
+        self.messages.append(dict(
+            from_email=from_email,
+            to_email=to_email,
+            msg=msg,
+            headers=headers,
+            body=body,
+        ))
+
+        return True
+
+    def quit(self):
+        return True
+
+
+class FakeSwiftClientConnection(swiftclient.client.Connection):
+    def post_account(self, headers):
+        # Do nothing
+        pass
+
+    def get_auth(self):
+        # Returns endpoint and (unused) auth token
+        endpoint = os.path.join('https://storage.example.org', 'V1',
+                                'AUTH_account')
+        return endpoint, ''
+
+
+class ZuulTestCase(testtools.TestCase):
+    log = logging.getLogger("zuul.test")
+
+    def setUp(self):
+        super(ZuulTestCase, self).setUp()
+        test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
+        try:
+            test_timeout = int(test_timeout)
+        except ValueError:
+            # If timeout value is invalid do not set a timeout.
+            test_timeout = 0
+        if test_timeout > 0:
+            self.useFixture(fixtures.Timeout(test_timeout, gentle=False))
+
+        if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or
+            os.environ.get('OS_STDOUT_CAPTURE') == '1'):
+            stdout = self.useFixture(fixtures.StringStream('stdout')).stream
+            self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
+        if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or
+            os.environ.get('OS_STDERR_CAPTURE') == '1'):
+            stderr = self.useFixture(fixtures.StringStream('stderr')).stream
+            self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
+        if (os.environ.get('OS_LOG_CAPTURE') == 'True' or
+            os.environ.get('OS_LOG_CAPTURE') == '1'):
+            self.useFixture(fixtures.FakeLogger(
+                level=logging.DEBUG,
+                format='%(asctime)s %(name)-32s '
+                '%(levelname)-8s %(message)s'))
+        tmp_root = self.useFixture(fixtures.TempDir(
+            rootdir=os.environ.get("ZUUL_TEST_ROOT"))).path
+        self.test_root = os.path.join(tmp_root, "zuul-test")
+        self.upstream_root = os.path.join(self.test_root, "upstream")
+        self.git_root = os.path.join(self.test_root, "git")
+
+        if os.path.exists(self.test_root):
+            shutil.rmtree(self.test_root)
+        os.makedirs(self.test_root)
+        os.makedirs(self.upstream_root)
+        os.makedirs(self.git_root)
+
+        # Make per test copy of Configuration.
+        self.setup_config()
+        self.config.set('zuul', 'layout_config',
+                        os.path.join(FIXTURE_DIR, "layout.yaml"))
+        self.config.set('merger', 'git_dir', self.git_root)
+
+        # For each project in config:
+        self.init_repo("org/project")
+        self.init_repo("org/project1")
+        self.init_repo("org/project2")
+        self.init_repo("org/project3")
+        self.init_repo("org/one-job-project")
+        self.init_repo("org/nonvoting-project")
+        self.init_repo("org/templated-project")
+        self.init_repo("org/layered-project")
+        self.init_repo("org/node-project")
+        self.init_repo("org/conflict-project")
+        self.init_repo("org/noop-project")
+        self.init_repo("org/experimental-project")
+
+        self.statsd = FakeStatsd()
+        os.environ['STATSD_HOST'] = 'localhost'
+        os.environ['STATSD_PORT'] = str(self.statsd.port)
+        self.statsd.start()
+        # the statsd client object is configured in the statsd module import
+        reload(statsd)
+        reload(zuul.scheduler)
+
+        self.gearman_server = FakeGearmanServer()
+
+        self.config.set('gearman', 'port', str(self.gearman_server.port))
+
+        self.worker = FakeWorker('fake_worker', self)
+        self.worker.addServer('127.0.0.1', self.gearman_server.port)
+        self.gearman_server.worker = self.worker
+
+        self.merge_server = zuul.merger.server.MergeServer(self.config)
+        self.merge_server.start()
+
+        self.sched = zuul.scheduler.Scheduler()
+
+        self.useFixture(fixtures.MonkeyPatch('swiftclient.client.Connection',
+                                             FakeSwiftClientConnection))
+        self.swift = zuul.lib.swift.Swift(self.config)
+
+        def URLOpenerFactory(*args, **kw):
+            if isinstance(args[0], urllib2.Request):
+                return old_urlopen(*args, **kw)
+            args = [self.fake_gerrit] + list(args)
+            return FakeURLOpener(self.upstream_root, *args, **kw)
+
+        old_urlopen = urllib2.urlopen
+        urllib2.urlopen = URLOpenerFactory
+
+        self.launcher = zuul.launcher.gearman.Gearman(self.config, self.sched,
+                                                      self.swift)
+        self.merge_client = zuul.merger.client.MergeClient(
+            self.config, self.sched)
+
+        self.smtp_messages = []
+
+        def FakeSMTPFactory(*args, **kw):
+            args = [self.smtp_messages] + list(args)
+            return FakeSMTP(*args, **kw)
+
+        zuul.lib.gerrit.Gerrit = FakeGerrit
+        self.useFixture(fixtures.MonkeyPatch('smtplib.SMTP', FakeSMTPFactory))
+
+        self.gerrit = FakeGerritTrigger(
+            self.upstream_root, self.config, self.sched)
+        self.gerrit.replication_timeout = 1.5
+        self.gerrit.replication_retry_interval = 0.5
+        self.fake_gerrit = self.gerrit.gerrit
+        self.fake_gerrit.upstream_root = self.upstream_root
+
+        self.webapp = zuul.webapp.WebApp(self.sched, port=0)
+        self.rpc = zuul.rpclistener.RPCListener(self.config, self.sched)
+
+        self.sched.setLauncher(self.launcher)
+        self.sched.setMerger(self.merge_client)
+        self.sched.registerTrigger(self.gerrit)
+        self.timer = zuul.trigger.timer.Timer(self.config, self.sched)
+        self.sched.registerTrigger(self.timer)
+
+        self.sched.registerReporter(
+            zuul.reporter.gerrit.Reporter(self.gerrit))
+        self.smtp_reporter = zuul.reporter.smtp.Reporter(
+            self.config.get('smtp', 'default_from'),
+            self.config.get('smtp', 'default_to'),
+            self.config.get('smtp', 'server'))
+        self.sched.registerReporter(self.smtp_reporter)
+
+        self.sched.start()
+        self.sched.reconfigure(self.config)
+        self.sched.resume()
+        self.webapp.start()
+        self.rpc.start()
+        self.launcher.gearman.waitForServer()
+        self.registerJobs()
+        self.builds = self.worker.running_builds
+        self.history = self.worker.build_history
+
+        self.addCleanup(self.assertFinalState)
+        self.addCleanup(self.shutdown)
+
+    def setup_config(self):
+        """Per test config object. Override to set different config."""
+        self.config = ConfigParser.ConfigParser()
+        self.config.read(os.path.join(FIXTURE_DIR, "zuul.conf"))
+
+    def assertFinalState(self):
+        # Make sure that the change cache is cleared
+        self.assertEqual(len(self.gerrit._change_cache.keys()), 0)
+        # Make sure that git.Repo objects have been garbage collected.
+        repos = []
+        gc.collect()
+        for obj in gc.get_objects():
+            if isinstance(obj, git.Repo):
+                repos.append(obj)
+        self.assertEqual(len(repos), 0)
+        self.assertEmptyQueues()
+
+    def shutdown(self):
+        self.log.debug("Shutting down after tests")
+        self.launcher.stop()
+        self.merge_server.stop()
+        self.merge_server.join()
+        self.merge_client.stop()
+        self.worker.shutdown()
+        self.gerrit.stop()
+        self.timer.stop()
+        self.sched.stop()
+        self.sched.join()
+        self.statsd.stop()
+        self.statsd.join()
+        self.webapp.stop()
+        self.webapp.join()
+        self.rpc.stop()
+        self.rpc.join()
+        self.gearman_server.shutdown()
+        threads = threading.enumerate()
+        if len(threads) > 1:
+            self.log.error("More than one thread is running: %s" % threads)
+        super(ZuulTestCase, self).tearDown()
+
+    def init_repo(self, project):
+        parts = project.split('/')
+        path = os.path.join(self.upstream_root, *parts[:-1])
+        if not os.path.exists(path):
+            os.makedirs(path)
+        path = os.path.join(self.upstream_root, project)
+        repo = git.Repo.init(path)
+
+        repo.config_writer().set_value('user', 'email', 'user@example.com')
+        repo.config_writer().set_value('user', 'name', 'User Name')
+        repo.config_writer().write()
+
+        fn = os.path.join(path, 'README')
+        f = open(fn, 'w')
+        f.write("test\n")
+        f.close()
+        repo.index.add([fn])
+        repo.index.commit('initial commit')
+        master = repo.create_head('master')
+        repo.create_tag('init')
+
+        mp = repo.create_head('mp')
+        repo.head.reference = mp
+        f = open(fn, 'a')
+        f.write("test mp\n")
+        f.close()
+        repo.index.add([fn])
+        repo.index.commit('mp commit')
+
+        repo.head.reference = master
+        repo.head.reset(index=True, working_tree=True)
+        repo.git.clean('-x', '-f', '-d')
+
+    def ref_has_change(self, ref, change):
+        path = os.path.join(self.git_root, change.project)
+        repo = git.Repo(path)
+        for commit in repo.iter_commits(ref):
+            if commit.message.strip() == ('%s-1' % change.subject):
+                return True
+        return False
+
+    def job_has_changes(self, *args):
+        job = args[0]
+        commits = args[1:]
+        if isinstance(job, FakeBuild):
+            parameters = job.parameters
+        else:
+            parameters = json.loads(job.arguments)
+        project = parameters['ZUUL_PROJECT']
+        path = os.path.join(self.git_root, project)
+        repo = git.Repo(path)
+        ref = parameters['ZUUL_REF']
+        sha = parameters['ZUUL_COMMIT']
+        repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
+        repo_shas = [c.hexsha for c in repo.iter_commits(ref)]
+        commit_messages = ['%s-1' % commit.subject for commit in commits]
+        self.log.debug("Checking if job %s has changes; commit_messages %s;"
+                       " repo_messages %s; sha %s" % (job, commit_messages,
+                                                      repo_messages, sha))
+        for msg in commit_messages:
+            if msg not in repo_messages:
+                self.log.debug("  messages do not match")
+                return False
+        if repo_shas[0] != sha:
+            self.log.debug("  sha does not match")
+            return False
+        self.log.debug("  OK")
+        return True
+
+    def registerJobs(self):
+        count = 0
+        for job in self.sched.layout.jobs.keys():
+            self.worker.registerFunction('build:' + job)
+            count += 1
+        self.worker.registerFunction('stop:' + self.worker.worker_id)
+        count += 1
+
+        while len(self.gearman_server.functions) < count:
+            time.sleep(0)
+
+    def release(self, job):
+        if isinstance(job, FakeBuild):
+            job.release()
+        else:
+            job.waiting = False
+            self.log.debug("Queued job %s released" % job.unique)
+            self.gearman_server.wakeConnections()
+
+    def getParameter(self, job, name):
+        if isinstance(job, FakeBuild):
+            return job.parameters[name]
+        else:
+            parameters = json.loads(job.arguments)
+            return parameters[name]
+
+    def resetGearmanServer(self):
+        self.worker.setFunctions([])
+        while True:
+            done = True
+            for connection in self.gearman_server.active_connections:
+                if (connection.functions and
+                    connection.client_id not in ['Zuul RPC Listener',
+                                                 'Zuul Merger']):
+                    done = False
+            if done:
+                break
+            time.sleep(0)
+        self.gearman_server.functions = set()
+        self.rpc.register()
+        self.merge_server.register()
+
+    def haveAllBuildsReported(self):
+        # See if Zuul is waiting on a meta job to complete
+        if self.launcher.meta_jobs:
+            return False
+        # Find out if every build that the worker has completed has been
+        # reported back to Zuul.  If it hasn't then that means a Gearman
+        # event is still in transit and the system is not stable.
+        for build in self.worker.build_history:
+            zbuild = self.launcher.builds.get(build.uuid)
+            if not zbuild:
+                # It has already been reported
+                continue
+            # It hasn't been reported yet.
+            return False
+        # Make sure that none of the worker connections are in GRAB_WAIT
+        for connection in self.worker.active_connections:
+            if connection.state == 'GRAB_WAIT':
+                return False
+        return True
+
+    def areAllBuildsWaiting(self):
+        ret = True
+
+        builds = self.launcher.builds.values()
+        for build in builds:
+            client_job = None
+            for conn in self.launcher.gearman.active_connections:
+                for j in conn.related_jobs.values():
+                    if j.unique == build.uuid:
+                        client_job = j
+                        break
+            if not client_job:
+                self.log.debug("%s is not known to the gearman client" %
+                               build)
+                ret = False
+                continue
+            if not client_job.handle:
+                self.log.debug("%s has no handle" % client_job)
+                ret = False
+                continue
+            server_job = self.gearman_server.jobs.get(client_job.handle)
+            if not server_job:
+                self.log.debug("%s is not known to the gearman server" %
+                               client_job)
+                ret = False
+                continue
+            if not hasattr(server_job, 'waiting'):
+                self.log.debug("%s is being enqueued" % server_job)
+                ret = False
+                continue
+            if server_job.waiting:
+                continue
+            worker_job = self.worker.gearman_jobs.get(server_job.unique)
+            if worker_job:
+                if worker_job.build.isWaiting():
+                    continue
+                else:
+                    self.log.debug("%s is running" % worker_job)
+                    ret = False
+            else:
+                self.log.debug("%s is unassigned" % server_job)
+                ret = False
+        return ret
+
+    def waitUntilSettled(self):
+        self.log.debug("Waiting until settled...")
+        start = time.time()
+        while True:
+            if time.time() - start > 10:
+                print 'queue status:',
+                print self.sched.trigger_event_queue.empty(),
+                print self.sched.result_event_queue.empty(),
+                print self.fake_gerrit.event_queue.empty(),
+                print self.areAllBuildsWaiting()
+                raise Exception("Timeout waiting for Zuul to settle")
+            # Make sure no new events show up while we're checking
+            self.worker.lock.acquire()
+            # have all build states propogated to zuul?
+            if self.haveAllBuildsReported():
+                # Join ensures that the queue is empty _and_ events have been
+                # processed
+                self.fake_gerrit.event_queue.join()
+                self.sched.trigger_event_queue.join()
+                self.sched.result_event_queue.join()
+                self.sched.run_handler_lock.acquire()
+                if (self.sched.trigger_event_queue.empty() and
+                    self.sched.result_event_queue.empty() and
+                    self.fake_gerrit.event_queue.empty() and
+                    not self.merge_client.build_sets and
+                    self.haveAllBuildsReported() and
+                    self.areAllBuildsWaiting()):
+                    self.sched.run_handler_lock.release()
+                    self.worker.lock.release()
+                    self.log.debug("...settled.")
+                    return
+                self.sched.run_handler_lock.release()
+            self.worker.lock.release()
+            self.sched.wake_event.wait(0.1)
+
+    def countJobResults(self, jobs, result):
+        jobs = filter(lambda x: x.result == result, jobs)
+        return len(jobs)
+
+    def getJobFromHistory(self, name):
+        history = self.worker.build_history
+        for job in history:
+            if job.name == name:
+                return job
+        raise Exception("Unable to find job %s in history" % name)
+
+    def assertEmptyQueues(self):
+        # Make sure there are no orphaned jobs
+        for pipeline in self.sched.layout.pipelines.values():
+            for queue in pipeline.queues:
+                if len(queue.queue) != 0:
+                    print 'pipeline %s queue %s contents %s' % (
+                        pipeline.name, queue.name, queue.queue)
+                self.assertEqual(len(queue.queue), 0)
+
+    def assertReportedStat(self, key, value=None, kind=None):
+        start = time.time()
+        while time.time() < (start + 5):
+            for stat in self.statsd.stats:
+                pprint.pprint(self.statsd.stats)
+                k, v = stat.split(':')
+                if key == k:
+                    if value is None and kind is None:
+                        return
+                    elif value:
+                        if value == v:
+                            return
+                    elif kind:
+                        if v.endswith('|' + kind):
+                            return
+            time.sleep(0.1)
+
+        pprint.pprint(self.statsd.stats)
+        raise Exception("Key %s not found in reported stats" % key)
diff --git a/tests/fixtures/layout-bad-queue.yaml b/tests/fixtures/layout-bad-queue.yaml
new file mode 100644
index 0000000..3eb2051
--- /dev/null
+++ b/tests/fixtures/layout-bad-queue.yaml
@@ -0,0 +1,74 @@
+pipelines:
+  - name: check
+    manager: IndependentPipelineManager
+    trigger:
+      gerrit:
+        - event: patchset-created
+    success:
+      gerrit:
+        verified: 1
+    failure:
+      gerrit:
+        verified: -1
+
+  - name: post
+    manager: IndependentPipelineManager
+    trigger:
+      gerrit:
+        - event: ref-updated
+          ref: ^(?!refs/).*$
+
+  - name: gate
+    manager: DependentPipelineManager
+    failure-message: Build failed.  For information on how to proceed, see http://wiki.example.org/Test_Failures
+    trigger:
+      gerrit:
+        - event: comment-added
+          approval:
+            - approved: 1
+    success:
+      gerrit:
+        verified: 2
+        submit: true
+    failure:
+      gerrit:
+        verified: -2
+    start:
+      gerrit:
+        verified: 0
+    precedence: high
+
+jobs:
+  - name: project1-project2-integration
+    queue-name: integration
+  - name: project1-test1
+    queue-name: not_integration
+
+projects:
+  - name: org/project1
+    check:
+      - project1-merge:
+        - project1-test1
+        - project1-test2
+        - project1-project2-integration
+    gate:
+      - project1-merge:
+        - project1-test1
+        - project1-test2
+        - project1-project2-integration
+    post:
+      - project1-post
+
+  - name: org/project2
+    check:
+      - project2-merge:
+        - project2-test1
+        - project2-test2
+        - project1-project2-integration
+    gate:
+      - project2-merge:
+        - project2-test1
+        - project2-test2
+        - project1-project2-integration
+    post:
+      - project2-post
diff --git a/tests/fixtures/layout-current-patchset.yaml b/tests/fixtures/layout-current-patchset.yaml
new file mode 100644
index 0000000..dc8f768
--- /dev/null
+++ b/tests/fixtures/layout-current-patchset.yaml
@@ -0,0 +1,24 @@
+includes:
+  - python-file: custom_functions.py
+
+pipelines:
+  - name: check
+    manager: IndependentPipelineManager
+    require:
+      current-patchset: True
+    trigger:
+      gerrit:
+        - event: patchset-created
+        - event: comment-added
+    success:
+      gerrit:
+        verified: 1
+    failure:
+      gerrit:
+        verified: -1
+
+projects:
+  - name: org/project
+    merge-mode: cherry-pick
+    check:
+      - project-check
diff --git a/tests/fixtures/layout-idle.yaml b/tests/fixtures/layout-idle.yaml
index e4574fa..0870788 100644
--- a/tests/fixtures/layout-idle.yaml
+++ b/tests/fixtures/layout-idle.yaml
@@ -8,5 +8,5 @@
 projects:
   - name: org/project
     periodic:
-      - project-test1
-      - project-test2
+      - project-bitrot-stable-old
+      - project-bitrot-stable-older
diff --git a/tests/fixtures/layout-no-timer.yaml b/tests/fixtures/layout-no-timer.yaml
new file mode 100644
index 0000000..9436821
--- /dev/null
+++ b/tests/fixtures/layout-no-timer.yaml
@@ -0,0 +1,14 @@
+pipelines:
+  - name: periodic
+    manager: IndependentPipelineManager
+    # Trigger is required, set it to one that is a noop
+    # during tests that check the timer trigger.
+    trigger:
+      gerrit:
+        - event: patchset-created
+
+projects:
+  - name: org/project
+    periodic:
+      - project-bitrot-stable-old
+      - project-bitrot-stable-older
diff --git a/tests/fixtures/layout-pipeline-requirements.yaml b/tests/fixtures/layout-pipeline-requirements.yaml
new file mode 100644
index 0000000..1826d88
--- /dev/null
+++ b/tests/fixtures/layout-pipeline-requirements.yaml
@@ -0,0 +1,59 @@
+includes:
+  - python-file: custom_functions.py
+
+pipelines:
+  - name: check
+    manager: IndependentPipelineManager
+    require:
+      approval:
+        - email-filter: jenkins@example.com
+          older-than: 48h
+      open: True
+    trigger:
+      gerrit:
+        - event: patchset-created
+        - event: comment-added
+    success:
+      gerrit:
+        verified: 1
+    failure:
+      gerrit:
+        verified: -1
+
+  - name: gate
+    manager: DependentPipelineManager
+    failure-message: Build failed.  For information on how to proceed, see http://wiki.example.org/Test_Failures
+    require:
+      status:
+        - NEW
+      approval:
+        - verified: 1
+          username: jenkins
+          newer-than: 48h
+    trigger:
+      gerrit:
+        - event: comment-added
+          approval:
+            - approved: 1
+        - event: comment-added
+          approval:
+            - verified: 1
+    success:
+      gerrit:
+        verified: 2
+        submit: true
+    failure:
+      gerrit:
+        verified: -2
+    start:
+      gerrit:
+        verified: 0
+    precedence: high
+
+projects:
+  - name: org/project
+    merge-mode: cherry-pick
+    check:
+      - project-check
+    gate:
+      - project-gate
diff --git a/tests/fixtures/layout-timer-smtp.yaml b/tests/fixtures/layout-timer-smtp.yaml
index ac59df4..b5a6ce0 100644
--- a/tests/fixtures/layout-timer-smtp.yaml
+++ b/tests/fixtures/layout-timer-smtp.yaml
@@ -3,7 +3,7 @@
     manager: IndependentPipelineManager
     trigger:
       timer:
-        - time: '* * * * * */10'
+        - time: '* * * * * */1'
     success:
       smtp:
         to: alternative_me@example.com
diff --git a/tests/fixtures/layout-timer.yaml b/tests/fixtures/layout-timer.yaml
index 9e0f66b..4904f87 100644
--- a/tests/fixtures/layout-timer.yaml
+++ b/tests/fixtures/layout-timer.yaml
@@ -15,7 +15,7 @@
     manager: IndependentPipelineManager
     trigger:
       timer:
-        - time: '* * * * * */10'
+        - time: '* * * * * */1'
 
 projects:
   - name: org/project
diff --git a/tests/fixtures/layout.yaml b/tests/fixtures/layout.yaml
index b02c782..cc4d34c 100644
--- a/tests/fixtures/layout.yaml
+++ b/tests/fixtures/layout.yaml
@@ -93,6 +93,16 @@
       gerrit:
         verified: 0
 
+  - name: experimental
+    manager: IndependentPipelineManager
+    trigger:
+      gerrit:
+        - event: patchset-created
+    success:
+      gerrit: {}
+    failure:
+      gerrit: {}
+
 jobs:
   - name: ^.*-merge$
     failure-message: Unable to merge change
@@ -104,6 +114,8 @@
       - '.*-requires'
   - name: node-project-test1
     parameter-function: select_debian_node
+  - name: project1-project2-integration
+    queue-name: integration
 
 project-templates:
   - name: test-one-and-two
@@ -230,3 +242,7 @@
   - name: org/noop-project
     gate:
       - noop
+
+  - name: org/experimental-project
+    experimental:
+      - experimental-project-test
diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py
index 7cfea1c..6e65774 100755
--- a/tests/test_scheduler.py
+++ b/tests/test_scheduler.py
@@ -14,1175 +14,31 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
-import ConfigParser
-from cStringIO import StringIO
-import gc
-import hashlib
 import json
 import logging
 import os
-import pprint
-import Queue
-import random
 import re
-import select
 import shutil
-import socket
-import string
-import subprocess
-import swiftclient
-import threading
 import time
 import urllib
 import urllib2
 
 import git
-import gear
-import fixtures
-import six.moves.urllib.parse as urlparse
-import statsd
 import testtools
 
 import zuul.scheduler
-import zuul.webapp
-import zuul.rpclistener
 import zuul.rpcclient
-import zuul.launcher.gearman
-import zuul.lib.swift
-import zuul.merger.server
-import zuul.merger.client
 import zuul.reporter.gerrit
 import zuul.reporter.smtp
-import zuul.trigger.gerrit
-import zuul.trigger.timer
 
-FIXTURE_DIR = os.path.join(os.path.dirname(__file__),
-                           'fixtures')
-CONFIG = ConfigParser.ConfigParser()
-CONFIG.read(os.path.join(FIXTURE_DIR, "zuul.conf"))
-
-CONFIG.set('zuul', 'layout_config',
-           os.path.join(FIXTURE_DIR, "layout.yaml"))
+from tests.base import ZuulTestCase, repack_repo
 
 logging.basicConfig(level=logging.DEBUG,
                     format='%(asctime)s %(name)-32s '
                     '%(levelname)-8s %(message)s')
 
 
-def repack_repo(path):
-    cmd = ['git', '--git-dir=%s/.git' % path, 'repack', '-afd']
-    output = subprocess.Popen(cmd, close_fds=True,
-                              stdout=subprocess.PIPE,
-                              stderr=subprocess.PIPE)
-    out = output.communicate()
-    if output.returncode:
-        raise Exception("git repack returned %d" % output.returncode)
-    return out
-
-
-def random_sha1():
-    return hashlib.sha1(str(random.random())).hexdigest()
-
-
-class ChangeReference(git.Reference):
-    _common_path_default = "refs/changes"
-    _points_to_commits_only = True
-
-
-class FakeChange(object):
-    categories = {'APRV': ('Approved', -1, 1),
-                  'CRVW': ('Code-Review', -2, 2),
-                  'VRFY': ('Verified', -2, 2)}
-
-    def __init__(self, gerrit, number, project, branch, subject,
-                 status='NEW', upstream_root=None):
-        self.gerrit = gerrit
-        self.reported = 0
-        self.queried = 0
-        self.patchsets = []
-        self.number = number
-        self.project = project
-        self.branch = branch
-        self.subject = subject
-        self.latest_patchset = 0
-        self.depends_on_change = None
-        self.needed_by_changes = []
-        self.fail_merge = False
-        self.messages = []
-        self.data = {
-            'branch': branch,
-            'comments': [],
-            'commitMessage': subject,
-            'createdOn': time.time(),
-            'id': 'I' + random_sha1(),
-            'lastUpdated': time.time(),
-            'number': str(number),
-            'open': True,
-            'owner': {'email': 'user@example.com',
-                      'name': 'User Name',
-                      'username': 'username'},
-            'patchSets': self.patchsets,
-            'project': project,
-            'status': status,
-            'subject': subject,
-            'submitRecords': [],
-            'url': 'https://hostname/%s' % number}
-
-        self.upstream_root = upstream_root
-        self.addPatchset()
-        self.data['submitRecords'] = self.getSubmitRecords()
-
-    def add_fake_change_to_repo(self, msg, fn, large):
-        path = os.path.join(self.upstream_root, self.project)
-        repo = git.Repo(path)
-        ref = ChangeReference.create(repo, '1/%s/%s' % (self.number,
-                                                        self.latest_patchset),
-                                     'refs/tags/init')
-        repo.head.reference = ref
-        repo.head.reset(index=True, working_tree=True)
-        repo.git.clean('-x', '-f', '-d')
-
-        path = os.path.join(self.upstream_root, self.project)
-        if not large:
-            fn = os.path.join(path, fn)
-            f = open(fn, 'w')
-            f.write("test %s %s %s\n" %
-                    (self.branch, self.number, self.latest_patchset))
-            f.close()
-            repo.index.add([fn])
-        else:
-            for fni in range(100):
-                fn = os.path.join(path, str(fni))
-                f = open(fn, 'w')
-                for ci in range(4096):
-                    f.write(random.choice(string.printable))
-                f.close()
-                repo.index.add([fn])
-
-        r = repo.index.commit(msg)
-        repo.head.reference = 'master'
-        repo.head.reset(index=True, working_tree=True)
-        repo.git.clean('-x', '-f', '-d')
-        repo.heads['master'].checkout()
-        return r
-
-    def addPatchset(self, files=[], large=False):
-        self.latest_patchset += 1
-        if files:
-            fn = files[0]
-        else:
-            fn = '%s-%s' % (self.branch, self.number)
-        msg = self.subject + '-' + str(self.latest_patchset)
-        c = self.add_fake_change_to_repo(msg, fn, large)
-        ps_files = [{'file': '/COMMIT_MSG',
-                     'type': 'ADDED'},
-                    {'file': 'README',
-                     'type': 'MODIFIED'}]
-        for f in files:
-            ps_files.append({'file': f, 'type': 'ADDED'})
-        d = {'approvals': [],
-             'createdOn': time.time(),
-             'files': ps_files,
-             'number': str(self.latest_patchset),
-             'ref': 'refs/changes/1/%s/%s' % (self.number,
-                                              self.latest_patchset),
-             'revision': c.hexsha,
-             'uploader': {'email': 'user@example.com',
-                          'name': 'User name',
-                          'username': 'user'}}
-        self.data['currentPatchSet'] = d
-        self.patchsets.append(d)
-        self.data['submitRecords'] = self.getSubmitRecords()
-
-    def getPatchsetCreatedEvent(self, patchset):
-        event = {"type": "patchset-created",
-                 "change": {"project": self.project,
-                            "branch": self.branch,
-                            "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
-                            "number": str(self.number),
-                            "subject": self.subject,
-                            "owner": {"name": "User Name"},
-                            "url": "https://hostname/3"},
-                 "patchSet": self.patchsets[patchset - 1],
-                 "uploader": {"name": "User Name"}}
-        return event
-
-    def getChangeRestoredEvent(self):
-        event = {"type": "change-restored",
-                 "change": {"project": self.project,
-                            "branch": self.branch,
-                            "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
-                            "number": str(self.number),
-                            "subject": self.subject,
-                            "owner": {"name": "User Name"},
-                            "url": "https://hostname/3"},
-                 "restorer": {"name": "User Name"},
-                 "reason": ""}
-        return event
-
-    def addApproval(self, category, value, username='jenkins',
-                    granted_on=None):
-        if not granted_on:
-            granted_on = time.time()
-        approval = {'description': self.categories[category][0],
-                    'type': category,
-                    'value': str(value),
-                    'by': {
-                        'username': username,
-                        'email': username + '@example.com',
-                    },
-                    'grantedOn': int(granted_on)}
-        for i, x in enumerate(self.patchsets[-1]['approvals'][:]):
-            if x['by']['username'] == username and x['type'] == category:
-                del self.patchsets[-1]['approvals'][i]
-        self.patchsets[-1]['approvals'].append(approval)
-        event = {'approvals': [approval],
-                 'author': {'email': 'user@example.com',
-                            'name': 'User Name',
-                            'username': 'username'},
-                 'change': {'branch': self.branch,
-                            'id': 'Iaa69c46accf97d0598111724a38250ae76a22c87',
-                            'number': str(self.number),
-                            'owner': {'email': 'user@example.com',
-                                      'name': 'User Name',
-                                      'username': 'username'},
-                            'project': self.project,
-                            'subject': self.subject,
-                            'topic': 'master',
-                            'url': 'https://hostname/459'},
-                 'comment': '',
-                 'patchSet': self.patchsets[-1],
-                 'type': 'comment-added'}
-        self.data['submitRecords'] = self.getSubmitRecords()
-        return json.loads(json.dumps(event))
-
-    def getSubmitRecords(self):
-        status = {}
-        for cat in self.categories.keys():
-            status[cat] = 0
-
-        for a in self.patchsets[-1]['approvals']:
-            cur = status[a['type']]
-            cat_min, cat_max = self.categories[a['type']][1:]
-            new = int(a['value'])
-            if new == cat_min:
-                cur = new
-            elif abs(new) > abs(cur):
-                cur = new
-            status[a['type']] = cur
-
-        labels = []
-        ok = True
-        for typ, cat in self.categories.items():
-            cur = status[typ]
-            cat_min, cat_max = cat[1:]
-            if cur == cat_min:
-                value = 'REJECT'
-                ok = False
-            elif cur == cat_max:
-                value = 'OK'
-            else:
-                value = 'NEED'
-                ok = False
-            labels.append({'label': cat[0], 'status': value})
-        if ok:
-            return [{'status': 'OK'}]
-        return [{'status': 'NOT_READY',
-                 'labels': labels}]
-
-    def setDependsOn(self, other, patchset):
-        self.depends_on_change = other
-        d = {'id': other.data['id'],
-             'number': other.data['number'],
-             'ref': other.patchsets[patchset - 1]['ref']
-             }
-        self.data['dependsOn'] = [d]
-
-        other.needed_by_changes.append(self)
-        needed = other.data.get('neededBy', [])
-        d = {'id': self.data['id'],
-             'number': self.data['number'],
-             'ref': self.patchsets[patchset - 1]['ref'],
-             'revision': self.patchsets[patchset - 1]['revision']
-             }
-        needed.append(d)
-        other.data['neededBy'] = needed
-
-    def query(self):
-        self.queried += 1
-        d = self.data.get('dependsOn')
-        if d:
-            d = d[0]
-            if (self.depends_on_change.patchsets[-1]['ref'] == d['ref']):
-                d['isCurrentPatchSet'] = True
-            else:
-                d['isCurrentPatchSet'] = False
-        return json.loads(json.dumps(self.data))
-
-    def setMerged(self):
-        if (self.depends_on_change and
-            self.depends_on_change.data['status'] != 'MERGED'):
-            return
-        if self.fail_merge:
-            return
-        self.data['status'] = 'MERGED'
-        self.open = False
-
-        path = os.path.join(self.upstream_root, self.project)
-        repo = git.Repo(path)
-        repo.heads[self.branch].commit = \
-            repo.commit(self.patchsets[-1]['revision'])
-
-    def setReported(self):
-        self.reported += 1
-
-
-class FakeGerrit(object):
-    def __init__(self, *args, **kw):
-        self.event_queue = Queue.Queue()
-        self.fixture_dir = os.path.join(FIXTURE_DIR, 'gerrit')
-        self.change_number = 0
-        self.changes = {}
-
-    def addFakeChange(self, project, branch, subject):
-        self.change_number += 1
-        c = FakeChange(self, self.change_number, project, branch, subject,
-                       upstream_root=self.upstream_root)
-        self.changes[self.change_number] = c
-        return c
-
-    def addEvent(self, data):
-        return self.event_queue.put(data)
-
-    def getEvent(self):
-        return self.event_queue.get()
-
-    def eventDone(self):
-        self.event_queue.task_done()
-
-    def review(self, project, changeid, message, action):
-        number, ps = changeid.split(',')
-        change = self.changes[int(number)]
-        change.messages.append(message)
-        if 'submit' in action:
-            change.setMerged()
-        if message:
-            change.setReported()
-
-    def query(self, number):
-        change = self.changes.get(int(number))
-        if change:
-            return change.query()
-        return {}
-
-    def startWatching(self, *args, **kw):
-        pass
-
-
-class BuildHistory(object):
-    def __init__(self, **kw):
-        self.__dict__.update(kw)
-
-    def __repr__(self):
-        return ("<Completed build, result: %s name: %s #%s changes: %s>" %
-                (self.result, self.name, self.number, self.changes))
-
-
-class FakeURLOpener(object):
-    def __init__(self, upstream_root, fake_gerrit, url):
-        self.upstream_root = upstream_root
-        self.fake_gerrit = fake_gerrit
-        self.url = url
-
-    def read(self):
-        res = urlparse.urlparse(self.url)
-        path = res.path
-        project = '/'.join(path.split('/')[2:-2])
-        ret = '001e# service=git-upload-pack\n'
-        ret += ('000000a31270149696713ba7e06f1beb760f20d359c4abed HEAD\x00'
-                'multi_ack thin-pack side-band side-band-64k ofs-delta '
-                'shallow no-progress include-tag multi_ack_detailed no-done\n')
-        path = os.path.join(self.upstream_root, project)
-        repo = git.Repo(path)
-        for ref in repo.refs:
-            r = ref.object.hexsha + ' ' + ref.path + '\n'
-            ret += '%04x%s' % (len(r) + 4, r)
-        ret += '0000'
-        return ret
-
-
-class FakeGerritTrigger(zuul.trigger.gerrit.Gerrit):
-    name = 'gerrit'
-
-    def __init__(self, upstream_root, *args):
-        super(FakeGerritTrigger, self).__init__(*args)
-        self.upstream_root = upstream_root
-
-    def getGitUrl(self, project):
-        return os.path.join(self.upstream_root, project.name)
-
-
-class FakeStatsd(threading.Thread):
-    def __init__(self):
-        threading.Thread.__init__(self)
-        self.daemon = True
-        self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-        self.sock.bind(('', 0))
-        self.port = self.sock.getsockname()[1]
-        self.wake_read, self.wake_write = os.pipe()
-        self.stats = []
-
-    def run(self):
-        while True:
-            poll = select.poll()
-            poll.register(self.sock, select.POLLIN)
-            poll.register(self.wake_read, select.POLLIN)
-            ret = poll.poll()
-            for (fd, event) in ret:
-                if fd == self.sock.fileno():
-                    data = self.sock.recvfrom(1024)
-                    if not data:
-                        return
-                    self.stats.append(data[0])
-                if fd == self.wake_read:
-                    return
-
-    def stop(self):
-        os.write(self.wake_write, '1\n')
-
-
-class FakeBuild(threading.Thread):
-    log = logging.getLogger("zuul.test")
-
-    def __init__(self, worker, job, number, node):
-        threading.Thread.__init__(self)
-        self.daemon = True
-        self.worker = worker
-        self.job = job
-        self.name = job.name.split(':')[1]
-        self.number = number
-        self.node = node
-        self.parameters = json.loads(job.arguments)
-        self.unique = self.parameters['ZUUL_UUID']
-        self.wait_condition = threading.Condition()
-        self.waiting = False
-        self.aborted = False
-        self.created = time.time()
-        self.description = ''
-        self.run_error = False
-
-    def release(self):
-        self.wait_condition.acquire()
-        self.wait_condition.notify()
-        self.waiting = False
-        self.log.debug("Build %s released" % self.unique)
-        self.wait_condition.release()
-
-    def isWaiting(self):
-        self.wait_condition.acquire()
-        if self.waiting:
-            ret = True
-        else:
-            ret = False
-        self.wait_condition.release()
-        return ret
-
-    def _wait(self):
-        self.wait_condition.acquire()
-        self.waiting = True
-        self.log.debug("Build %s waiting" % self.unique)
-        self.wait_condition.wait()
-        self.wait_condition.release()
-
-    def run(self):
-        data = {
-            'url': 'https://server/job/%s/%s/' % (self.name, self.number),
-            'name': self.name,
-            'number': self.number,
-            'manager': self.worker.worker_id,
-            'worker_name': 'My Worker',
-            'worker_hostname': 'localhost',
-            'worker_ips': ['127.0.0.1', '192.168.1.1'],
-            'worker_fqdn': 'zuul.example.org',
-            'worker_program': 'FakeBuilder',
-            'worker_version': 'v1.1',
-            'worker_extra': {'something': 'else'}
-        }
-
-        self.log.debug('Running build %s' % self.unique)
-
-        self.job.sendWorkData(json.dumps(data))
-        self.log.debug('Sent WorkData packet with %s' % json.dumps(data))
-        self.job.sendWorkStatus(0, 100)
-
-        if self.worker.hold_jobs_in_build:
-            self.log.debug('Holding build %s' % self.unique)
-            self._wait()
-        self.log.debug("Build %s continuing" % self.unique)
-
-        self.worker.lock.acquire()
-
-        result = 'SUCCESS'
-        if (('ZUUL_REF' in self.parameters) and
-            self.worker.shouldFailTest(self.name,
-                                       self.parameters['ZUUL_REF'])):
-            result = 'FAILURE'
-        if self.aborted:
-            result = 'ABORTED'
-
-        if self.run_error:
-            work_fail = True
-            result = 'RUN_ERROR'
-        else:
-            data['result'] = result
-            work_fail = False
-
-        changes = None
-        if 'ZUUL_CHANGE_IDS' in self.parameters:
-            changes = self.parameters['ZUUL_CHANGE_IDS']
-
-        self.worker.build_history.append(
-            BuildHistory(name=self.name, number=self.number,
-                         result=result, changes=changes, node=self.node,
-                         uuid=self.unique, description=self.description,
-                         pipeline=self.parameters['ZUUL_PIPELINE'])
-        )
-
-        self.job.sendWorkData(json.dumps(data))
-        if work_fail:
-            self.job.sendWorkFail()
-        else:
-            self.job.sendWorkComplete(json.dumps(data))
-        del self.worker.gearman_jobs[self.job.unique]
-        self.worker.running_builds.remove(self)
-        self.worker.lock.release()
-
-
-class FakeWorker(gear.Worker):
-    def __init__(self, worker_id, test):
-        super(FakeWorker, self).__init__(worker_id)
-        self.gearman_jobs = {}
-        self.build_history = []
-        self.running_builds = []
-        self.build_counter = 0
-        self.fail_tests = {}
-        self.test = test
-
-        self.hold_jobs_in_build = False
-        self.lock = threading.Lock()
-        self.__work_thread = threading.Thread(target=self.work)
-        self.__work_thread.daemon = True
-        self.__work_thread.start()
-
-    def handleJob(self, job):
-        parts = job.name.split(":")
-        cmd = parts[0]
-        name = parts[1]
-        if len(parts) > 2:
-            node = parts[2]
-        else:
-            node = None
-        if cmd == 'build':
-            self.handleBuild(job, name, node)
-        elif cmd == 'stop':
-            self.handleStop(job, name)
-        elif cmd == 'set_description':
-            self.handleSetDescription(job, name)
-
-    def handleBuild(self, job, name, node):
-        build = FakeBuild(self, job, self.build_counter, node)
-        job.build = build
-        self.gearman_jobs[job.unique] = job
-        self.build_counter += 1
-
-        self.running_builds.append(build)
-        build.start()
-
-    def handleStop(self, job, name):
-        self.log.debug("handle stop")
-        parameters = json.loads(job.arguments)
-        name = parameters['name']
-        number = parameters['number']
-        for build in self.running_builds:
-            if build.name == name and build.number == number:
-                build.aborted = True
-                build.release()
-                job.sendWorkComplete()
-                return
-        job.sendWorkFail()
-
-    def handleSetDescription(self, job, name):
-        self.log.debug("handle set description")
-        parameters = json.loads(job.arguments)
-        name = parameters['name']
-        number = parameters['number']
-        descr = parameters['html_description']
-        for build in self.running_builds:
-            if build.name == name and build.number == number:
-                build.description = descr
-                job.sendWorkComplete()
-                return
-        for build in self.build_history:
-            if build.name == name and build.number == number:
-                build.description = descr
-                job.sendWorkComplete()
-                return
-        job.sendWorkFail()
-
-    def work(self):
-        while self.running:
-            try:
-                job = self.getJob()
-            except gear.InterruptedError:
-                continue
-            try:
-                self.handleJob(job)
-            except:
-                self.log.exception("Worker exception:")
-
-    def addFailTest(self, name, change):
-        l = self.fail_tests.get(name, [])
-        l.append(change)
-        self.fail_tests[name] = l
-
-    def shouldFailTest(self, name, ref):
-        l = self.fail_tests.get(name, [])
-        for change in l:
-            if self.test.ref_has_change(ref, change):
-                return True
-        return False
-
-    def release(self, regex=None):
-        builds = self.running_builds[:]
-        self.log.debug("releasing build %s (%s)" % (regex,
-                                                    len(self.running_builds)))
-        for build in builds:
-            if not regex or re.match(regex, build.name):
-                self.log.debug("releasing build %s" %
-                               (build.parameters['ZUUL_UUID']))
-                build.release()
-            else:
-                self.log.debug("not releasing build %s" %
-                               (build.parameters['ZUUL_UUID']))
-        self.log.debug("done releasing builds %s (%s)" %
-                       (regex, len(self.running_builds)))
-
-
-class FakeGearmanServer(gear.Server):
-    def __init__(self):
-        self.hold_jobs_in_queue = False
-        super(FakeGearmanServer, self).__init__(0)
-
-    def getJobForConnection(self, connection, peek=False):
-        for queue in [self.high_queue, self.normal_queue, self.low_queue]:
-            for job in queue:
-                if not hasattr(job, 'waiting'):
-                    if job.name.startswith('build:'):
-                        job.waiting = self.hold_jobs_in_queue
-                    else:
-                        job.waiting = False
-                if job.waiting:
-                    continue
-                if job.name in connection.functions:
-                    if not peek:
-                        queue.remove(job)
-                        connection.related_jobs[job.handle] = job
-                        job.worker_connection = connection
-                    job.running = True
-                    return job
-        return None
-
-    def release(self, regex=None):
-        released = False
-        qlen = (len(self.high_queue) + len(self.normal_queue) +
-                len(self.low_queue))
-        self.log.debug("releasing queued job %s (%s)" % (regex, qlen))
-        for job in self.getQueue():
-            cmd, name = job.name.split(':')
-            if cmd != 'build':
-                continue
-            if not regex or re.match(regex, name):
-                self.log.debug("releasing queued job %s" %
-                               job.unique)
-                job.waiting = False
-                released = True
-            else:
-                self.log.debug("not releasing queued job %s" %
-                               job.unique)
-        if released:
-            self.wakeConnections()
-        qlen = (len(self.high_queue) + len(self.normal_queue) +
-                len(self.low_queue))
-        self.log.debug("done releasing queued jobs %s (%s)" % (regex, qlen))
-
-
-class FakeSMTP(object):
-    log = logging.getLogger('zuul.FakeSMTP')
-
-    def __init__(self, messages, server, port):
-        self.server = server
-        self.port = port
-        self.messages = messages
-
-    def sendmail(self, from_email, to_email, msg):
-        self.log.info("Sending email from %s, to %s, with msg %s" % (
-                      from_email, to_email, msg))
-
-        headers = msg.split('\n\n', 1)[0]
-        body = msg.split('\n\n', 1)[1]
-
-        self.messages.append(dict(
-            from_email=from_email,
-            to_email=to_email,
-            msg=msg,
-            headers=headers,
-            body=body,
-        ))
-
-        return True
-
-    def quit(self):
-        return True
-
-
-class FakeSwiftClientConnection(swiftclient.client.Connection):
-    def post_account(self, headers):
-        # Do nothing
-        pass
-
-    def get_auth(self):
-        # Returns endpoint and (unused) auth token
-        endpoint = os.path.join('https://storage.example.org', 'V1',
-                                'AUTH_account')
-        return endpoint, ''
-
-
-class TestScheduler(testtools.TestCase):
-    log = logging.getLogger("zuul.test")
-
-    def setUp(self):
-        super(TestScheduler, self).setUp()
-        test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
-        try:
-            test_timeout = int(test_timeout)
-        except ValueError:
-            # If timeout value is invalid do not set a timeout.
-            test_timeout = 0
-        if test_timeout > 0:
-            self.useFixture(fixtures.Timeout(test_timeout, gentle=False))
-
-        if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or
-            os.environ.get('OS_STDOUT_CAPTURE') == '1'):
-            stdout = self.useFixture(fixtures.StringStream('stdout')).stream
-            self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
-        if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or
-            os.environ.get('OS_STDERR_CAPTURE') == '1'):
-            stderr = self.useFixture(fixtures.StringStream('stderr')).stream
-            self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
-        if (os.environ.get('OS_LOG_CAPTURE') == 'True' or
-            os.environ.get('OS_LOG_CAPTURE') == '1'):
-            self.useFixture(fixtures.FakeLogger(
-                level=logging.DEBUG,
-                format='%(asctime)s %(name)-32s '
-                '%(levelname)-8s %(message)s'))
-        tmp_root = self.useFixture(fixtures.TempDir(
-            rootdir=os.environ.get("ZUUL_TEST_ROOT"))).path
-        self.test_root = os.path.join(tmp_root, "zuul-test")
-        self.upstream_root = os.path.join(self.test_root, "upstream")
-        self.git_root = os.path.join(self.test_root, "git")
-
-        CONFIG.set('merger', 'git_dir', self.git_root)
-        if os.path.exists(self.test_root):
-            shutil.rmtree(self.test_root)
-        os.makedirs(self.test_root)
-        os.makedirs(self.upstream_root)
-        os.makedirs(self.git_root)
-
-        # For each project in config:
-        self.init_repo("org/project")
-        self.init_repo("org/project1")
-        self.init_repo("org/project2")
-        self.init_repo("org/project3")
-        self.init_repo("org/one-job-project")
-        self.init_repo("org/nonvoting-project")
-        self.init_repo("org/templated-project")
-        self.init_repo("org/layered-project")
-        self.init_repo("org/node-project")
-        self.init_repo("org/conflict-project")
-        self.init_repo("org/noop-project")
-
-        self.statsd = FakeStatsd()
-        os.environ['STATSD_HOST'] = 'localhost'
-        os.environ['STATSD_PORT'] = str(self.statsd.port)
-        self.statsd.start()
-        # the statsd client object is configured in the statsd module import
-        reload(statsd)
-        reload(zuul.scheduler)
-
-        self.gearman_server = FakeGearmanServer()
-
-        self.config = ConfigParser.ConfigParser()
-        cfg = StringIO()
-        CONFIG.write(cfg)
-        cfg.seek(0)
-        self.config.readfp(cfg)
-        self.config.set('gearman', 'port', str(self.gearman_server.port))
-
-        self.worker = FakeWorker('fake_worker', self)
-        self.worker.addServer('127.0.0.1', self.gearman_server.port)
-        self.gearman_server.worker = self.worker
-
-        self.merge_server = zuul.merger.server.MergeServer(self.config)
-        self.merge_server.start()
-
-        self.sched = zuul.scheduler.Scheduler()
-
-        self.useFixture(fixtures.MonkeyPatch('swiftclient.client.Connection',
-                                             FakeSwiftClientConnection))
-        self.swift = zuul.lib.swift.Swift(self.config)
-
-        def URLOpenerFactory(*args, **kw):
-            args = [self.fake_gerrit] + list(args)
-            return FakeURLOpener(self.upstream_root, *args, **kw)
-
-        urllib2.urlopen = URLOpenerFactory
-
-        self.launcher = zuul.launcher.gearman.Gearman(self.config, self.sched,
-                                                      self.swift)
-        self.merge_client = zuul.merger.client.MergeClient(
-            self.config, self.sched)
-
-        self.smtp_messages = []
-
-        def FakeSMTPFactory(*args, **kw):
-            args = [self.smtp_messages] + list(args)
-            return FakeSMTP(*args, **kw)
-
-        zuul.lib.gerrit.Gerrit = FakeGerrit
-        self.useFixture(fixtures.MonkeyPatch('smtplib.SMTP', FakeSMTPFactory))
-
-        self.gerrit = FakeGerritTrigger(
-            self.upstream_root, self.config, self.sched)
-        self.gerrit.replication_timeout = 1.5
-        self.gerrit.replication_retry_interval = 0.5
-        self.fake_gerrit = self.gerrit.gerrit
-        self.fake_gerrit.upstream_root = self.upstream_root
-
-        self.webapp = zuul.webapp.WebApp(self.sched, port=0)
-        self.rpc = zuul.rpclistener.RPCListener(self.config, self.sched)
-
-        self.sched.setLauncher(self.launcher)
-        self.sched.setMerger(self.merge_client)
-        self.sched.registerTrigger(self.gerrit)
-        self.timer = zuul.trigger.timer.Timer(self.config, self.sched)
-        self.sched.registerTrigger(self.timer)
-
-        self.sched.registerReporter(
-            zuul.reporter.gerrit.Reporter(self.gerrit))
-        self.smtp_reporter = zuul.reporter.smtp.Reporter(
-            self.config.get('smtp', 'default_from'),
-            self.config.get('smtp', 'default_to'),
-            self.config.get('smtp', 'server'))
-        self.sched.registerReporter(self.smtp_reporter)
-
-        self.sched.start()
-        self.sched.reconfigure(self.config)
-        self.sched.resume()
-        self.webapp.start()
-        self.rpc.start()
-        self.launcher.gearman.waitForServer()
-        self.registerJobs()
-        self.builds = self.worker.running_builds
-        self.history = self.worker.build_history
-
-        self.addCleanup(self.assertFinalState)
-        self.addCleanup(self.shutdown)
-
-    def assertFinalState(self):
-        # Make sure that the change cache is cleared
-        self.assertEqual(len(self.gerrit._change_cache.keys()), 0)
-        # Make sure that git.Repo objects have been garbage collected.
-        repos = []
-        gc.collect()
-        for obj in gc.get_objects():
-            if isinstance(obj, git.Repo):
-                repos.append(obj)
-        self.assertEqual(len(repos), 0)
-        self.assertEmptyQueues()
-
-    def shutdown(self):
-        self.log.debug("Shutting down after tests")
-        self.launcher.stop()
-        self.merge_server.stop()
-        self.merge_server.join()
-        self.merge_client.stop()
-        self.worker.shutdown()
-        self.gerrit.stop()
-        self.timer.stop()
-        self.sched.stop()
-        self.sched.join()
-        self.statsd.stop()
-        self.statsd.join()
-        self.webapp.stop()
-        self.webapp.join()
-        self.rpc.stop()
-        self.rpc.join()
-        self.gearman_server.shutdown()
-        threads = threading.enumerate()
-        if len(threads) > 1:
-            self.log.error("More than one thread is running: %s" % threads)
-        super(TestScheduler, self).tearDown()
-
-    def init_repo(self, project):
-        parts = project.split('/')
-        path = os.path.join(self.upstream_root, *parts[:-1])
-        if not os.path.exists(path):
-            os.makedirs(path)
-        path = os.path.join(self.upstream_root, project)
-        repo = git.Repo.init(path)
-
-        repo.config_writer().set_value('user', 'email', 'user@example.com')
-        repo.config_writer().set_value('user', 'name', 'User Name')
-        repo.config_writer().write()
-
-        fn = os.path.join(path, 'README')
-        f = open(fn, 'w')
-        f.write("test\n")
-        f.close()
-        repo.index.add([fn])
-        repo.index.commit('initial commit')
-        master = repo.create_head('master')
-        repo.create_tag('init')
-
-        mp = repo.create_head('mp')
-        repo.head.reference = mp
-        f = open(fn, 'a')
-        f.write("test mp\n")
-        f.close()
-        repo.index.add([fn])
-        repo.index.commit('mp commit')
-
-        repo.head.reference = master
-        repo.head.reset(index=True, working_tree=True)
-        repo.git.clean('-x', '-f', '-d')
-
-    def ref_has_change(self, ref, change):
-        path = os.path.join(self.git_root, change.project)
-        repo = git.Repo(path)
-        for commit in repo.iter_commits(ref):
-            if commit.message.strip() == ('%s-1' % change.subject):
-                return True
-        return False
-
-    def job_has_changes(self, *args):
-        job = args[0]
-        commits = args[1:]
-        if isinstance(job, FakeBuild):
-            parameters = job.parameters
-        else:
-            parameters = json.loads(job.arguments)
-        project = parameters['ZUUL_PROJECT']
-        path = os.path.join(self.git_root, project)
-        repo = git.Repo(path)
-        ref = parameters['ZUUL_REF']
-        sha = parameters['ZUUL_COMMIT']
-        repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
-        repo_shas = [c.hexsha for c in repo.iter_commits(ref)]
-        commit_messages = ['%s-1' % commit.subject for commit in commits]
-        self.log.debug("Checking if job %s has changes; commit_messages %s;"
-                       " repo_messages %s; sha %s" % (job, commit_messages,
-                                                      repo_messages, sha))
-        for msg in commit_messages:
-            if msg not in repo_messages:
-                self.log.debug("  messages do not match")
-                return False
-        if repo_shas[0] != sha:
-            self.log.debug("  sha does not match")
-            return False
-        self.log.debug("  OK")
-        return True
-
-    def registerJobs(self):
-        count = 0
-        for job in self.sched.layout.jobs.keys():
-            self.worker.registerFunction('build:' + job)
-            count += 1
-        self.worker.registerFunction('stop:' + self.worker.worker_id)
-        count += 1
-
-        while len(self.gearman_server.functions) < count:
-            time.sleep(0)
-
-    def release(self, job):
-        if isinstance(job, FakeBuild):
-            job.release()
-        else:
-            job.waiting = False
-            self.log.debug("Queued job %s released" % job.unique)
-            self.gearman_server.wakeConnections()
-
-    def getParameter(self, job, name):
-        if isinstance(job, FakeBuild):
-            return job.parameters[name]
-        else:
-            parameters = json.loads(job.arguments)
-            return parameters[name]
-
-    def resetGearmanServer(self):
-        self.worker.setFunctions([])
-        while True:
-            done = True
-            for connection in self.gearman_server.active_connections:
-                if (connection.functions and
-                    connection.client_id not in ['Zuul RPC Listener',
-                                                 'Zuul Merger']):
-                    done = False
-            if done:
-                break
-            time.sleep(0)
-        self.gearman_server.functions = set()
-        self.rpc.register()
-        self.merge_server.register()
-
-    def haveAllBuildsReported(self):
-        # See if Zuul is waiting on a meta job to complete
-        if self.launcher.meta_jobs:
-            return False
-        # Find out if every build that the worker has completed has been
-        # reported back to Zuul.  If it hasn't then that means a Gearman
-        # event is still in transit and the system is not stable.
-        for build in self.worker.build_history:
-            zbuild = self.launcher.builds.get(build.uuid)
-            if not zbuild:
-                # It has already been reported
-                continue
-            # It hasn't been reported yet.
-            return False
-        # Make sure that none of the worker connections are in GRAB_WAIT
-        for connection in self.worker.active_connections:
-            if connection.state == 'GRAB_WAIT':
-                return False
-        return True
-
-    def areAllBuildsWaiting(self):
-        ret = True
-
-        builds = self.launcher.builds.values()
-        for build in builds:
-            client_job = None
-            for conn in self.launcher.gearman.active_connections:
-                for j in conn.related_jobs.values():
-                    if j.unique == build.uuid:
-                        client_job = j
-                        break
-            if not client_job:
-                self.log.debug("%s is not known to the gearman client" %
-                               build)
-                ret = False
-                continue
-            if not client_job.handle:
-                self.log.debug("%s has no handle" % client_job)
-                ret = False
-                continue
-            server_job = self.gearman_server.jobs.get(client_job.handle)
-            if not server_job:
-                self.log.debug("%s is not known to the gearman server" %
-                               client_job)
-                ret = False
-                continue
-            if not hasattr(server_job, 'waiting'):
-                self.log.debug("%s is being enqueued" % server_job)
-                ret = False
-                continue
-            if server_job.waiting:
-                continue
-            worker_job = self.worker.gearman_jobs.get(server_job.unique)
-            if worker_job:
-                if worker_job.build.isWaiting():
-                    continue
-                else:
-                    self.log.debug("%s is running" % worker_job)
-                    ret = False
-            else:
-                self.log.debug("%s is unassigned" % server_job)
-                ret = False
-        return ret
-
-    def waitUntilSettled(self):
-        self.log.debug("Waiting until settled...")
-        start = time.time()
-        while True:
-            if time.time() - start > 10:
-                print 'queue status:',
-                print self.sched.trigger_event_queue.empty(),
-                print self.sched.result_event_queue.empty(),
-                print self.fake_gerrit.event_queue.empty(),
-                print self.areAllBuildsWaiting()
-                raise Exception("Timeout waiting for Zuul to settle")
-            # Make sure no new events show up while we're checking
-            self.worker.lock.acquire()
-            # have all build states propogated to zuul?
-            if self.haveAllBuildsReported():
-                # Join ensures that the queue is empty _and_ events have been
-                # processed
-                self.fake_gerrit.event_queue.join()
-                self.sched.trigger_event_queue.join()
-                self.sched.result_event_queue.join()
-                self.sched.run_handler_lock.acquire()
-                if (self.sched.trigger_event_queue.empty() and
-                    self.sched.result_event_queue.empty() and
-                    self.fake_gerrit.event_queue.empty() and
-                    not self.merge_client.build_sets and
-                    self.haveAllBuildsReported() and
-                    self.areAllBuildsWaiting()):
-                    self.sched.run_handler_lock.release()
-                    self.worker.lock.release()
-                    self.log.debug("...settled.")
-                    return
-                self.sched.run_handler_lock.release()
-            self.worker.lock.release()
-            self.sched.wake_event.wait(0.1)
-
-    def countJobResults(self, jobs, result):
-        jobs = filter(lambda x: x.result == result, jobs)
-        return len(jobs)
-
-    def getJobFromHistory(self, name):
-        history = self.worker.build_history
-        for job in history:
-            if job.name == name:
-                return job
-        raise Exception("Unable to find job %s in history" % name)
-
-    def assertEmptyQueues(self):
-        # Make sure there are no orphaned jobs
-        for pipeline in self.sched.layout.pipelines.values():
-            for queue in pipeline.queues:
-                if len(queue.queue) != 0:
-                    print 'pipeline %s queue %s contents %s' % (
-                        pipeline.name, queue.name, queue.queue)
-                self.assertEqual(len(queue.queue), 0)
-
-    def assertReportedStat(self, key, value=None, kind=None):
-        start = time.time()
-        while time.time() < (start + 5):
-            for stat in self.statsd.stats:
-                pprint.pprint(self.statsd.stats)
-                k, v = stat.split(':')
-                if key == k:
-                    if value is None and kind is None:
-                        return
-                    elif value:
-                        if value == v:
-                            return
-                    elif kind:
-                        if v.endswith('|' + kind):
-                            return
-            time.sleep(0.1)
-
-        pprint.pprint(self.statsd.stats)
-        raise Exception("Key %s not found in reported stats" % key)
-
+class TestScheduler(ZuulTestCase):
     def test_jobs_launched(self):
         "Test that jobs are launched and a change is merged"
 
@@ -1777,7 +633,7 @@
         self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
         self.waitUntilSettled()
 
-        self.log.debug("len %s " % self.gerrit._change_cache.keys())
+        self.log.debug("len %s" % self.gerrit._change_cache.keys())
         # there should still be changes in the cache
         self.assertNotEqual(len(self.gerrit._change_cache.keys()), 0)
 
@@ -1808,6 +664,31 @@
         self.assertTrue(trigger.canMerge(a, mgr.getSubmitAllowNeeds()))
         trigger.maintainCache([])
 
+    def test_pipeline_requirements_closed_change(self):
+        "Test that pipeline requirements for closed changes are effective"
+        self.config.set('zuul', 'layout_config',
+                        'tests/fixtures/layout-pipeline-requirements.yaml')
+        self.sched.reconfigure(self.config)
+
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+                                           status='MERGED')
+        self.fake_gerrit.addEvent(A.addApproval('CRVW', 2))
+        self.waitUntilSettled()
+        self.assertEqual(len(self.history), 0)
+        self.assertEqual(len(self.builds), 0)
+
+        B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B',
+                                           status='MERGED')
+        B.addApproval('CRVW', 2)
+        B.addApproval('VRFY', 1)
+        self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
+        self.waitUntilSettled()
+        self.assertEqual(len(self.history), 0)
+        self.assertEqual(len(self.builds), 0)
+
+        for pipeline in self.sched.layout.pipelines.values():
+            pipeline.trigger.maintainCache([])
+
     def test_build_configuration(self):
         "Test that zuul merges the right commits for testing"
 
@@ -2770,13 +1651,23 @@
         self.assertEqual(D.data['status'], 'MERGED')
         self.assertEqual(D.reported, 2)
 
+    def test_pipeline_requirements_approval_check_and_gate(self):
+        "Test pipeline requirements triggers both check and gate"
+        self.config.set('zuul', 'layout_config',
+                        'tests/fixtures/layout-pipeline-requirements.yaml')
+        self.sched.reconfigure(self.config)
+        self.registerJobs()
+        self._test_required_approval_check_and_gate()
+
     def test_required_approval_check_and_gate(self):
         "Test required-approval triggers both check and gate"
         self.config.set('zuul', 'layout_config',
                         'tests/fixtures/layout-require-approval.yaml')
         self.sched.reconfigure(self.config)
         self.registerJobs()
+        self._test_required_approval_check_and_gate()
 
+    def _test_required_approval_check_and_gate(self):
         A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
         A.addApproval('CRVW', 2)
         # Add a too-old +1
@@ -2797,12 +1688,27 @@
         self.assertEqual(len(self.history), 2)
         self.assertEqual(self.history[1].name, 'project-gate')
 
+    def test_pipeline_requirements_approval_newer(self):
+        "Test pipeline requirements newer trigger parameter"
+        self.config.set('zuul', 'layout_config',
+                        'tests/fixtures/layout-pipeline-requirements.yaml')
+        self.sched.reconfigure(self.config)
+        self.registerJobs()
+        self._test_required_approval_newer()
+
     def test_required_approval_newer(self):
         "Test required-approval newer trigger parameter"
         self.config.set('zuul', 'layout_config',
                         'tests/fixtures/layout-require-approval.yaml')
         self.sched.reconfigure(self.config)
         self.registerJobs()
+        self._test_required_approval_newer()
+
+    def _test_required_approval_newer(self):
+        self.config.set('zuul', 'layout_config',
+                        'tests/fixtures/layout-require-approval.yaml')
+        self.sched.reconfigure(self.config)
+        self.registerJobs()
 
         A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
         A.addApproval('CRVW', 2)
@@ -2826,12 +1732,27 @@
         self.assertEqual(len(self.history), 2)
         self.assertEqual(self.history[1].name, 'project-gate')
 
+    def test_pipeline_requirements_approval_older(self):
+        "Test pipeline requirements older trigger parameter"
+        self.config.set('zuul', 'layout_config',
+                        'tests/fixtures/layout-pipeline-requirements.yaml')
+        self.sched.reconfigure(self.config)
+        self.registerJobs()
+        self._test_required_approval_older()
+
     def test_required_approval_older(self):
         "Test required-approval older trigger parameter"
         self.config.set('zuul', 'layout_config',
                         'tests/fixtures/layout-require-approval.yaml')
         self.sched.reconfigure(self.config)
         self.registerJobs()
+        self._test_required_approval_older()
+
+    def _test_required_approval_older(self):
+        self.config.set('zuul', 'layout_config',
+                        'tests/fixtures/layout-require-approval.yaml')
+        self.sched.reconfigure(self.config)
+        self.registerJobs()
 
         A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
         crvw = A.addApproval('CRVW', 2)
@@ -2880,10 +1801,10 @@
         statsd = extras.try_import('statsd.statsd')
         statsd.incr('test-incr')
         statsd.timing('test-timing', 3)
-        statsd.gauge('test-guage', 12)
+        statsd.gauge('test-gauge', 12)
         self.assertReportedStat('test-incr', '1|c')
         self.assertReportedStat('test-timing', '3|ms')
-        self.assertReportedStat('test-guage', '12|g')
+        self.assertReportedStat('test-gauge', '12|g')
 
     def test_stuck_job_cleanup(self):
         "Test that pending jobs are cleaned up if removed from layout"
@@ -2939,7 +1860,7 @@
         sched = zuul.scheduler.Scheduler()
         sched.registerTrigger(None, 'gerrit')
         sched.registerTrigger(None, 'timer')
-        sched.testConfig(CONFIG.get('zuul', 'layout_config'))
+        sched.testConfig(self.config.get('zuul', 'layout_config'))
 
     def test_build_description(self):
         "Test that build descriptions update"
@@ -2959,6 +1880,21 @@
         self.assertTrue(re.search("project-test2.*SUCCESS", desc))
         self.assertTrue(re.search("Reported result.*SUCCESS", desc))
 
+    def test_queue_names(self):
+        "Test shared change queue names"
+        project1 = self.sched.layout.projects['org/project1']
+        project2 = self.sched.layout.projects['org/project2']
+        q1 = self.sched.layout.pipelines['gate'].getQueue(project1)
+        q2 = self.sched.layout.pipelines['gate'].getQueue(project2)
+        self.assertEqual(q1.name, 'integration')
+        self.assertEqual(q2.name, 'integration')
+
+        self.config.set('zuul', 'layout_config',
+                        'tests/fixtures/layout-bad-queue.yaml')
+        with testtools.ExpectedException(
+            Exception, "More than one name assigned to change queue"):
+            self.sched.reconfigure(self.config)
+
     def test_queue_precedence(self):
         "Test that queue precedence works"
 
@@ -2999,7 +1935,14 @@
 
         port = self.webapp.server.socket.getsockname()[1]
 
-        f = urllib.urlopen("http://localhost:%s/status.json" % port)
+        req = urllib2.Request("http://localhost:%s/status.json" % port)
+        f = urllib2.urlopen(req)
+        headers = f.info()
+        self.assertIn('Content-Length', headers)
+        self.assertIn('Content-Type', headers)
+        self.assertEqual(headers['Content-Type'],
+                         'application/json; charset=UTF-8')
+        self.assertIn('Last-Modified', headers)
         data = f.read()
 
         self.worker.hold_jobs_in_build = False
@@ -3162,25 +2105,25 @@
         self.sched.reconfigure(self.config)
         self.registerJobs()
 
-        start = time.time()
-        failed = True
-        while ((time.time() - start) < 30):
-            if len(self.builds) == 2:
-                failed = False
-                break
-            else:
-                time.sleep(1)
-
-        if failed:
-            raise Exception("Expected jobs never ran")
-
+        # The pipeline triggers every second, so we should have seen
+        # several by now.
+        time.sleep(5)
         self.waitUntilSettled()
+
+        self.assertEqual(len(self.builds), 2)
+
         port = self.webapp.server.socket.getsockname()[1]
 
         f = urllib.urlopen("http://localhost:%s/status.json" % port)
         data = f.read()
 
         self.worker.hold_jobs_in_build = False
+        # Stop queuing timer triggered jobs so that the assertions
+        # below don't race against more jobs being queued.
+        self.config.set('zuul', 'layout_config',
+                        'tests/fixtures/layout-no-timer.yaml')
+        self.sched.reconfigure(self.config)
+        self.registerJobs()
         self.worker.release()
         self.waitUntilSettled()
 
@@ -3204,29 +2147,33 @@
     def test_idle(self):
         "Test that frequent periodic jobs work"
         self.worker.hold_jobs_in_build = True
-        self.config.set('zuul', 'layout_config',
-                        'tests/fixtures/layout-idle.yaml')
-        self.sched.reconfigure(self.config)
-        self.registerJobs()
 
-        # The pipeline triggers every second, so we should have seen
-        # several by now.
-        time.sleep(5)
-        self.waitUntilSettled()
-        self.assertEqual(len(self.builds), 2)
-        self.worker.release('.*')
-        self.waitUntilSettled()
-        self.assertEqual(len(self.builds), 0)
-        self.assertEqual(len(self.history), 2)
+        for x in range(1, 3):
+            # Test that timer triggers periodic jobs even across
+            # layout config reloads.
+            # Start timer trigger
+            self.config.set('zuul', 'layout_config',
+                            'tests/fixtures/layout-idle.yaml')
+            self.sched.reconfigure(self.config)
+            self.registerJobs()
 
-        time.sleep(5)
-        self.waitUntilSettled()
-        self.assertEqual(len(self.builds), 2)
-        self.assertEqual(len(self.history), 2)
-        self.worker.release('.*')
-        self.waitUntilSettled()
-        self.assertEqual(len(self.builds), 0)
-        self.assertEqual(len(self.history), 4)
+            # The pipeline triggers every second, so we should have seen
+            # several by now.
+            time.sleep(5)
+            self.waitUntilSettled()
+
+            # Stop queuing timer triggered jobs so that the assertions
+            # below don't race against more jobs being queued.
+            self.config.set('zuul', 'layout_config',
+                            'tests/fixtures/layout-no-timer.yaml')
+            self.sched.reconfigure(self.config)
+            self.registerJobs()
+
+            self.assertEqual(len(self.builds), 2)
+            self.worker.release('.*')
+            self.waitUntilSettled()
+            self.assertEqual(len(self.builds), 0)
+            self.assertEqual(len(self.history), x * 2)
 
     def test_check_smtp_pool(self):
         self.config.set('zuul', 'layout_config',
@@ -3261,25 +2208,22 @@
 
     def test_timer_smtp(self):
         "Test that a periodic job is triggered"
+        self.worker.hold_jobs_in_build = True
         self.config.set('zuul', 'layout_config',
                         'tests/fixtures/layout-timer-smtp.yaml')
         self.sched.reconfigure(self.config)
         self.registerJobs()
 
-        start = time.time()
-        failed = True
-        while ((time.time() - start) < 30):
-            if len(self.history) == 2:
-                failed = False
-                break
-            else:
-                time.sleep(1)
-
-        if failed:
-            raise Exception("Expected jobs never ran")
-
+        # The pipeline triggers every second, so we should have seen
+        # several by now.
+        time.sleep(5)
         self.waitUntilSettled()
 
+        self.assertEqual(len(self.builds), 2)
+        self.worker.release('.*')
+        self.waitUntilSettled()
+        self.assertEqual(len(self.history), 2)
+
         self.assertEqual(self.getJobFromHistory(
             'project-bitrot-stable-old').result, 'SUCCESS')
         self.assertEqual(self.getJobFromHistory(
@@ -3298,6 +2242,15 @@
         self.assertIn('Subject: Periodic check for org/project succeeded',
                       self.smtp_messages[0]['headers'])
 
+        # Stop queuing timer triggered jobs and let any that may have
+        # queued through so that end of test assertions pass.
+        self.config.set('zuul', 'layout_config',
+                        'tests/fixtures/layout-no-timer.yaml')
+        self.sched.reconfigure(self.config)
+        self.registerJobs()
+        self.worker.release('.*')
+        self.waitUntilSettled()
+
     def test_client_enqueue(self):
         "Test that the RPC client can enqueue a change"
         A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
@@ -3956,3 +2909,47 @@
 
         running_items = client.get_running_jobs()
         self.assertEqual(0, len(running_items))
+
+    def test_nonvoting_pipeline(self):
+        "Test that a nonvoting pipeline (experimental) can still report"
+
+        A = self.fake_gerrit.addFakeChange('org/experimental-project',
+                                           'master', 'A')
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+        self.assertEqual(
+            self.getJobFromHistory('experimental-project-test').result,
+            'SUCCESS')
+        self.assertEqual(A.reported, 1)
+
+    def test_old_patchset_doesnt_trigger(self):
+        "Test that jobs never run against old patchsets"
+        self.config.set('zuul', 'layout_config',
+                        'tests/fixtures/layout-current-patchset.yaml')
+        self.sched.reconfigure(self.config)
+        self.registerJobs()
+        # Create two patchsets and let their tests settle out. Then
+        # comment on first patchset and check that no additional
+        # jobs are run.
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
+        # Added because the layout file really wants an approval but this
+        # doesn't match anyways.
+        self.fake_gerrit.addEvent(A.addApproval('CRVW', 1))
+        self.waitUntilSettled()
+        A.addPatchset()
+        self.fake_gerrit.addEvent(A.addApproval('CRVW', 1))
+        self.waitUntilSettled()
+
+        old_history_count = len(self.history)
+        self.assertEqual(old_history_count, 2)  # one job for each ps
+        self.fake_gerrit.addEvent(A.getChangeCommentEvent(1))
+        self.waitUntilSettled()
+
+        # Assert no new jobs ran after event for old patchset.
+        self.assertEqual(len(self.history), old_history_count)
+
+        # The last thing we did was add an event for a change then do
+        # nothing with a pipeline, so it will be in the cache;
+        # clean it up so it does not fail the test.
+        for pipeline in self.sched.layout.pipelines.values():
+            pipeline.trigger.maintainCache([])
diff --git a/tests/test_stack_dump.py b/tests/test_stack_dump.py
index cc8cf8f..824e04c 100644
--- a/tests/test_stack_dump.py
+++ b/tests/test_stack_dump.py
@@ -17,7 +17,7 @@
 import signal
 import testtools
 
-import zuul.cmd.server
+import zuul.cmd
 
 
 class TestStackDump(testtools.TestCase):
@@ -29,6 +29,6 @@
     def test_stack_dump_logs(self):
         "Test that stack dumps end up in logs."
 
-        zuul.cmd.server.stack_dump_handler(signal.SIGUSR2, None)
+        zuul.cmd.stack_dump_handler(signal.SIGUSR2, None)
         self.assertIn("Thread", self.log_fixture.output)
         self.assertIn("test_stack_dump_logs", self.log_fixture.output)
diff --git a/zuul/cmd/__init__.py b/zuul/cmd/__init__.py
index e69de29..8ac3368 100644
--- a/zuul/cmd/__init__.py
+++ b/zuul/cmd/__init__.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+# Copyright 2012 Hewlett-Packard Development Company, L.P.
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import ConfigParser
+import cStringIO
+import extras
+import logging
+import logging.config
+import os
+import signal
+import sys
+import traceback
+
+yappi = extras.try_import('yappi')
+
+# No zuul imports here because they pull in paramiko which must not be
+# imported until after the daemonization.
+# https://github.com/paramiko/paramiko/issues/59
+# Similar situation with gear and statsd.
+
+
+def stack_dump_handler(signum, frame):
+    signal.signal(signal.SIGUSR2, signal.SIG_IGN)
+    log_str = ""
+    for thread_id, stack_frame in sys._current_frames().items():
+        log_str += "Thread: %s\n" % thread_id
+        log_str += "".join(traceback.format_stack(stack_frame))
+    log = logging.getLogger("zuul.stack_dump")
+    log.debug(log_str)
+    if yappi:
+        if not yappi.is_running():
+            yappi.start()
+        else:
+            yappi.stop()
+            yappi_out = cStringIO.StringIO()
+            yappi.get_func_stats().print_all(out=yappi_out)
+            yappi.get_thread_stats().print_all(out=yappi_out)
+            log.debug(yappi_out.getvalue())
+            yappi_out.close()
+            yappi.clear_stats()
+    signal.signal(signal.SIGUSR2, stack_dump_handler)
+
+
+class ZuulApp(object):
+
+    def __init__(self):
+        self.args = None
+        self.config = None
+
+    def _get_version(self):
+        from zuul.version import version_info as zuul_version_info
+        return "Zuul version: %s" % zuul_version_info.release_string()
+
+    def read_config(self):
+        self.config = ConfigParser.ConfigParser()
+        if self.args.config:
+            locations = [self.args.config]
+        else:
+            locations = ['/etc/zuul/zuul.conf',
+                         '~/zuul.conf']
+        for fp in locations:
+            if os.path.exists(os.path.expanduser(fp)):
+                self.config.read(os.path.expanduser(fp))
+                return
+        raise Exception("Unable to locate config file in %s" % locations)
+
+    def setup_logging(self, section, parameter):
+        if self.config.has_option(section, parameter):
+            fp = os.path.expanduser(self.config.get(section, parameter))
+            if not os.path.exists(fp):
+                raise Exception("Unable to read logging config file at %s" %
+                                fp)
+            logging.config.fileConfig(fp)
+        else:
+            logging.basicConfig(level=logging.DEBUG)
diff --git a/zuul/cmd/client.py b/zuul/cmd/client.py
index 147fade..766a4ef 100644
--- a/zuul/cmd/client.py
+++ b/zuul/cmd/client.py
@@ -16,26 +16,20 @@
 
 import argparse
 import babel.dates
-import ConfigParser
 import datetime
 import logging
-import logging.config
-import os
 import prettytable
 import sys
 import time
 
+
 import zuul.rpcclient
+import zuul.cmd
 
 
-class Client(object):
+class Client(zuul.cmd.ZuulApp):
     log = logging.getLogger("zuul.Client")
 
-    def __init__(self):
-        self.args = None
-        self.config = None
-        self.gear_server_pid = None
-
     def parse_arguments(self):
         parser = argparse.ArgumentParser(
             description='Zuul Project Gating System Client.')
@@ -89,24 +83,8 @@
 
         self.args = parser.parse_args()
 
-    def _get_version(self):
-        from zuul.version import version_info as zuul_version_info
-        return "Zuul version: %s" % zuul_version_info.version_string()
-
-    def read_config(self):
-        self.config = ConfigParser.ConfigParser()
-        if self.args.config:
-            locations = [self.args.config]
-        else:
-            locations = ['/etc/zuul/zuul.conf',
-                         '~/zuul.conf']
-        for fp in locations:
-            if os.path.exists(os.path.expanduser(fp)):
-                self.config.read(os.path.expanduser(fp))
-                return
-        raise Exception("Unable to locate config file in %s" % locations)
-
     def setup_logging(self):
+        """Client logging does not rely on conf file"""
         if self.args.verbose:
             logging.basicConfig(level=logging.DEBUG)
 
diff --git a/zuul/cmd/merger.py b/zuul/cmd/merger.py
index edf8da9..dc3484a 100644
--- a/zuul/cmd/merger.py
+++ b/zuul/cmd/merger.py
@@ -15,7 +15,6 @@
 # under the License.
 
 import argparse
-import ConfigParser
 import daemon
 import extras
 
@@ -23,12 +22,11 @@
 # instead it depends on lockfile-0.9.1 which uses pidfile.
 pid_file_module = extras.try_imports(['daemon.pidlockfile', 'daemon.pidfile'])
 
-import logging
-import logging.config
 import os
 import sys
 import signal
-import traceback
+
+import zuul.cmd
 
 # No zuul imports here because they pull in paramiko which must not be
 # imported until after the daemonization.
@@ -36,21 +34,7 @@
 # Similar situation with gear and statsd.
 
 
-def stack_dump_handler(signum, frame):
-    signal.signal(signal.SIGUSR2, signal.SIG_IGN)
-    log_str = ""
-    for thread_id, stack_frame in sys._current_frames().items():
-        log_str += "Thread: %s\n" % thread_id
-        log_str += "".join(traceback.format_stack(stack_frame))
-    log = logging.getLogger("zuul.stack_dump")
-    log.debug(log_str)
-    signal.signal(signal.SIGUSR2, stack_dump_handler)
-
-
-class Merger(object):
-    def __init__(self):
-        self.args = None
-        self.config = None
+class Merger(zuul.cmd.ZuulApp):
 
     def parse_arguments(self):
         parser = argparse.ArgumentParser(description='Zuul merge worker.')
@@ -58,33 +42,11 @@
                             help='specify the config file')
         parser.add_argument('-d', dest='nodaemon', action='store_true',
                             help='do not run as a daemon')
-        parser.add_argument('--version', dest='version', action='store_true',
+        parser.add_argument('--version', dest='version', action='version',
+                            version=self._get_version(),
                             help='show zuul version')
         self.args = parser.parse_args()
 
-    def read_config(self):
-        self.config = ConfigParser.ConfigParser()
-        if self.args.config:
-            locations = [self.args.config]
-        else:
-            locations = ['/etc/zuul/zuul.conf',
-                         '~/zuul.conf']
-        for fp in locations:
-            if os.path.exists(os.path.expanduser(fp)):
-                self.config.read(os.path.expanduser(fp))
-                return
-        raise Exception("Unable to locate config file in %s" % locations)
-
-    def setup_logging(self, section, parameter):
-        if self.config.has_option(section, parameter):
-            fp = os.path.expanduser(self.config.get(section, parameter))
-            if not os.path.exists(fp):
-                raise Exception("Unable to read logging config file at %s" %
-                                fp)
-            logging.config.fileConfig(fp)
-        else:
-            logging.basicConfig(level=logging.DEBUG)
-
     def exit_handler(self, signum, frame):
         signal.signal(signal.SIGUSR1, signal.SIG_IGN)
         self.merger.stop()
@@ -100,7 +62,7 @@
         self.merger.start()
 
         signal.signal(signal.SIGUSR1, self.exit_handler)
-        signal.signal(signal.SIGUSR2, stack_dump_handler)
+        signal.signal(signal.SIGUSR2, zuul.cmd.stack_dump_handler)
         while True:
             try:
                 signal.pause()
@@ -113,11 +75,6 @@
     server = Merger()
     server.parse_arguments()
 
-    if server.args.version:
-        from zuul.version import version_info as zuul_version_info
-        print "Zuul version: %s" % zuul_version_info.version_string()
-        sys.exit(0)
-
     server.read_config()
 
     if server.config.has_option('zuul', 'state_dir'):
diff --git a/zuul/cmd/server.py b/zuul/cmd/server.py
index 8caa1fd..d7de85a 100755
--- a/zuul/cmd/server.py
+++ b/zuul/cmd/server.py
@@ -15,7 +15,6 @@
 # under the License.
 
 import argparse
-import ConfigParser
 import daemon
 import extras
 
@@ -24,11 +23,11 @@
 pid_file_module = extras.try_imports(['daemon.pidlockfile', 'daemon.pidfile'])
 
 import logging
-import logging.config
 import os
 import sys
 import signal
-import traceback
+
+import zuul.cmd
 
 # No zuul imports here because they pull in paramiko which must not be
 # imported until after the daemonization.
@@ -36,21 +35,9 @@
 # Similar situation with gear and statsd.
 
 
-def stack_dump_handler(signum, frame):
-    signal.signal(signal.SIGUSR2, signal.SIG_IGN)
-    log_str = ""
-    for thread_id, stack_frame in sys._current_frames().items():
-        log_str += "Thread: %s\n" % thread_id
-        log_str += "".join(traceback.format_stack(stack_frame))
-    log = logging.getLogger("zuul.stack_dump")
-    log.debug(log_str)
-    signal.signal(signal.SIGUSR2, stack_dump_handler)
-
-
-class Server(object):
+class Server(zuul.cmd.ZuulApp):
     def __init__(self):
-        self.args = None
-        self.config = None
+        super(Server, self).__init__()
         self.gear_server_pid = None
 
     def parse_arguments(self):
@@ -71,33 +58,6 @@
                             help='show zuul version')
         self.args = parser.parse_args()
 
-    def _get_version(self):
-        from zuul.version import version_info as zuul_version_info
-        return "Zuul version: %s" % zuul_version_info.version_string()
-
-    def read_config(self):
-        self.config = ConfigParser.ConfigParser()
-        if self.args.config:
-            locations = [self.args.config]
-        else:
-            locations = ['/etc/zuul/zuul.conf',
-                         '~/zuul.conf']
-        for fp in locations:
-            if os.path.exists(os.path.expanduser(fp)):
-                self.config.read(os.path.expanduser(fp))
-                return
-        raise Exception("Unable to locate config file in %s" % locations)
-
-    def setup_logging(self, section, parameter):
-        if self.config.has_option(section, parameter):
-            fp = os.path.expanduser(self.config.get(section, parameter))
-            if not os.path.exists(fp):
-                raise Exception("Unable to read logging config file at %s" %
-                                fp)
-            logging.config.fileConfig(fp)
-        else:
-            logging.basicConfig(level=logging.DEBUG)
-
     def reconfigure_handler(self, signum, frame):
         signal.signal(signal.SIGHUP, signal.SIG_IGN)
         self.read_config()
@@ -203,7 +163,11 @@
         merger = zuul.merger.client.MergeClient(self.config, self.sched)
         gerrit = zuul.trigger.gerrit.Gerrit(self.config, self.sched)
         timer = zuul.trigger.timer.Timer(self.config, self.sched)
-        webapp = zuul.webapp.WebApp(self.sched)
+        if self.config.has_option('zuul', 'status_expiry'):
+            cache_expiry = self.config.getint('zuul', 'status_expiry')
+        else:
+            cache_expiry = 1
+        webapp = zuul.webapp.WebApp(self.sched, cache_expiry=cache_expiry)
         rpc = zuul.rpclistener.RPCListener(self.config, self.sched)
         gerrit_reporter = zuul.reporter.gerrit.Reporter(gerrit)
         smtp_reporter = zuul.reporter.smtp.Reporter(
@@ -235,7 +199,7 @@
 
         signal.signal(signal.SIGHUP, self.reconfigure_handler)
         signal.signal(signal.SIGUSR1, self.exit_handler)
-        signal.signal(signal.SIGUSR2, stack_dump_handler)
+        signal.signal(signal.SIGUSR2, zuul.cmd.stack_dump_handler)
         signal.signal(signal.SIGTERM, self.term_handler)
         while True:
             try:
diff --git a/zuul/launcher/gearman.py b/zuul/launcher/gearman.py
index b0d8546..ada9c70 100644
--- a/zuul/launcher/gearman.py
+++ b/zuul/launcher/gearman.py
@@ -436,7 +436,7 @@
 
     def onWorkStatus(self, job):
         data = getJobData(job)
-        self.log.debug("Build %s update %s " % (job, data))
+        self.log.debug("Build %s update %s" % (job, data))
         build = self.builds.get(job.unique)
         if build:
             # Allow URL to be updated
diff --git a/zuul/layoutvalidator.py b/zuul/layoutvalidator.py
index 15aa687..9a448a3 100644
--- a/zuul/layoutvalidator.py
+++ b/zuul/layoutvalidator.py
@@ -68,6 +68,12 @@
                                'subject': str,
                                },
                       }
+
+    require = {'approval': toList(require_approval),
+               'open': bool,
+               'current-patchset': bool,
+               'status': toList(str)}
+
     window = v.All(int, v.Range(min=0))
     window_floor = v.All(int, v.Range(min=1))
     window_type = v.Any('linear', 'exponential')
@@ -77,6 +83,7 @@
                 v.Required('manager'): manager,
                 'precedence': precedence,
                 'description': str,
+                'require': require,
                 'success-message': str,
                 'failure-message': str,
                 'merge-failure-message': str,
@@ -108,6 +115,7 @@
              }
 
     job = {v.Required('name'): str,
+           'queue-name': str,
            'failure-message': str,
            'success-message': str,
            'failure-pattern': str,
diff --git a/zuul/merger/merger.py b/zuul/merger/merger.py
index f150771..9e1f4c1 100644
--- a/zuul/merger/merger.py
+++ b/zuul/merger/merger.py
@@ -132,14 +132,14 @@
 
     def createZuulRef(self, ref, commit='HEAD'):
         repo = self.createRepoObject()
-        self.log.debug("CreateZuulRef %s at %s " % (ref, commit))
+        self.log.debug("CreateZuulRef %s at %s" % (ref, commit))
         ref = ZuulReference.create(repo, ref, commit)
         return ref.commit
 
     def push(self, local, remote):
         repo = self.createRepoObject()
-        self.log.debug("Pushing %s:%s to %s " % (local, remote,
-                                                 self.remote_url))
+        self.log.debug("Pushing %s:%s to %s" % (local, remote,
+                                                self.remote_url))
         repo.remotes.origin.push('%s:%s' % (local, remote))
 
     def update(self):
diff --git a/zuul/model.py b/zuul/model.py
index 2f4110f..a97c541 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -57,6 +57,11 @@
     raise Exception("Unable to parse time value: %s" % s)
 
 
+def normalizeCategory(name):
+    name = name.lower()
+    return re.sub(' ', '-', name)
+
+
 class Pipeline(object):
     """A top-level pipeline such as check, gate, post, etc."""
     def __init__(self, name):
@@ -240,17 +245,6 @@
             items.extend(shared_queue.queue)
         return items
 
-    def formatStatusHTML(self):
-        ret = ''
-        for queue in self.queues:
-            if len(self.queues) > 1:
-                s = 'Change queue: %s' % queue.name
-                ret += s + '\n'
-                ret += '-' * len(s) + '\n'
-            for item in queue.queue:
-                ret += self.formatStatus(item, html=True)
-        return ret
-
     def formatStatusJSON(self):
         j_pipeline = dict(name=self.name,
                           description=self.description)
@@ -282,7 +276,7 @@
 
 
 class ActionReporter(object):
-    """An ActionReporter has a reporter and its configured paramaters"""
+    """An ActionReporter has a reporter and its configured parameters"""
 
     def __repr__(self):
         return '<ActionReporter %s, %s>' % (self.reporter, self.params)
@@ -314,6 +308,8 @@
                  window_decrease_type='exponential', window_decrease_factor=2):
         self.pipeline = pipeline
         self.name = ''
+        self.assigned_name = None
+        self.generated_name = None
         self.projects = []
         self._jobs = set()
         self.queue = []
@@ -334,10 +330,21 @@
     def addProject(self, project):
         if project not in self.projects:
             self.projects.append(project)
+            self._jobs |= set(self.pipeline.getJobTree(project).getJobs())
+
             names = [x.name for x in self.projects]
             names.sort()
-            self.name = ', '.join(names)
-            self._jobs |= set(self.pipeline.getJobTree(project).getJobs())
+            self.generated_name = ', '.join(names)
+
+            for job in self._jobs:
+                if job.queue_name:
+                    if (self.assigned_name and
+                        job.queue_name != self.assigned_name):
+                        raise Exception("More than one name assigned to "
+                                        "change queue: %s != %s" %
+                                        (self.assigned_name, job.queue_name))
+                    self.assigned_name = job.queue_name
+            self.name = self.assigned_name or self.generated_name
 
     def enqueueChange(self, change):
         item = QueueItem(self.pipeline, change)
@@ -431,6 +438,7 @@
     def __init__(self, name):
         # If you add attributes here, be sure to add them to the copy method.
         self.name = name
+        self.queue_name = None
         self.failure_message = None
         self.success_message = None
         self.failure_pattern = None
@@ -830,6 +838,8 @@
         self.is_merged = False
         self.failed_to_merge = False
         self.approvals = []
+        self.open = None
+        self.status = None
 
     def _id(self):
         return '%s,%s' % (self.number, self.patchset)
@@ -1009,6 +1019,9 @@
         if self.event_approvals:
             ret += ' event_approvals: %s' % ', '.join(
                 ['%s:%s' % a for a in self.event_approvals.items()])
+        if self.require_approvals:
+            ret += ' require_approvals: %s' % ', '.join(
+                ['%s' % a for a in self.require_approvals])
         if self._comment_filters:
             ret += ' comment_filters: %s' % ', '.join(self._comment_filters)
         if self._email_filters:
@@ -1022,10 +1035,6 @@
         return ret
 
     def matches(self, event, change):
-        def normalizeCategory(name):
-            name = name.lower()
-            return re.sub(' ', '-', name)
-
         # event types are ORed
         matches_type = False
         for etype in self.types:
@@ -1140,6 +1149,90 @@
         return True
 
 
+class ChangeishFilter(object):
+    def __init__(self, open=None, current_patchset=None,
+                 statuses=[], approvals=[]):
+        self.open = open
+        self.current_patchset = current_patchset
+        self.statuses = statuses
+        self.approvals = approvals
+
+        for a in self.approvals:
+            if 'older-than' in a:
+                a['older-than'] = time_to_seconds(a['older-than'])
+            if 'newer-than' in a:
+                a['newer-than'] = time_to_seconds(a['newer-than'])
+            if 'email-filter' in a:
+                a['email-filter'] = re.compile(a['email-filter'])
+
+    def __repr__(self):
+        ret = '<ChangeishFilter'
+
+        if self.open is not None:
+            ret += ' open: %s' % self.open
+        if self.current_patchset is not None:
+            ret += ' current-patchset: %s' % self.current_patchset
+        if self.statuses:
+            ret += ' statuses: %s' % ', '.join(self.statuses)
+        if self.approvals:
+            ret += ' approvals: %s' % str(self.approvals)
+        ret += '>'
+
+        return ret
+
+    def matches(self, change):
+        if self.open is not None:
+            if self.open != change.open:
+                return False
+
+        if self.current_patchset is not None:
+            if self.current_patchset != change.is_current_patchset:
+                return False
+
+        if self.statuses:
+            if change.status not in self.statuses:
+                return False
+
+        if self.approvals and not change.approvals:
+            # A change with no approvals can not match
+            return False
+
+        now = time.time()
+        for rapproval in self.approvals:
+            matches_approval = False
+            for approval in change.approvals:
+                if 'description' not in approval:
+                    continue
+                found_approval = True
+                by = approval.get('by', {})
+                for k, v in rapproval.items():
+                    if k == 'username':
+                        if (by.get('username', '') != v):
+                            found_approval = False
+                    elif k == 'email-filter':
+                        if (not v.search(by.get('email', ''))):
+                            found_approval = False
+                    elif k == 'newer-than':
+                        t = now - v
+                        if (approval['grantedOn'] < t):
+                            found_approval = False
+                    elif k == 'older-than':
+                        t = now - v
+                        if (approval['grantedOn'] >= t):
+                            found_approval = False
+                    else:
+                        if (normalizeCategory(approval['description']) != k or
+                            int(approval['value']) != v):
+                            found_approval = False
+                if found_approval:
+                    matches_approval = True
+                    break
+            if not matches_approval:
+                return False
+
+        return True
+
+
 class Layout(object):
     def __init__(self):
         self.projects = {}
diff --git a/zuul/reporter/gerrit.py b/zuul/reporter/gerrit.py
index cceacca..7c4774b 100644
--- a/zuul/reporter/gerrit.py
+++ b/zuul/reporter/gerrit.py
@@ -30,10 +30,6 @@
         """Send a message to gerrit."""
         self.log.debug("Report change %s, params %s, message: %s" %
                        (change, params, message))
-        if not params:
-            self.log.debug("Not reporting change %s: No params specified." %
-                           change)
-            return
         changeid = '%s,%s' % (change.number, change.patchset)
         change._ref_sha = self.trigger.getRefSha(change.project.name,
                                                  'refs/heads/' + change.branch)
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index f5d6629..922d815 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -29,7 +29,8 @@
 
 import layoutvalidator
 import model
-from model import ActionReporter, Pipeline, Project, ChangeQueue, EventFilter
+from model import ActionReporter, Pipeline, Project, ChangeQueue
+from model import EventFilter, ChangeishFilter
 from zuul import version as zuul_version
 
 statsd = extras.try_import('statsd.statsd')
@@ -273,6 +274,15 @@
             pipeline.setManager(manager)
             layout.pipelines[conf_pipeline['name']] = pipeline
 
+            if 'require' in conf_pipeline:
+                require = conf_pipeline['require']
+                f = ChangeishFilter(
+                    open=require.get('open'),
+                    current_patchset=require.get('current-patchset'),
+                    statuses=toList(require.get('status')),
+                    approvals=toList(require.get('approval')))
+                manager.changeish_filters.append(f)
+
             # TODO: move this into triggers (may require pluggable
             # configuration)
             if 'gerrit' in conf_pipeline['trigger']:
@@ -315,6 +325,9 @@
             job = layout.getJob(config_job['name'])
             # Be careful to only set attributes explicitly present on
             # this job, to avoid squashing attributes set by a meta-job.
+            m = config_job.get('queue-name', None)
+            if m:
+                job.queue_name = m
             m = config_job.get('failure-message', None)
             if m:
                 job.failure_message = m
@@ -838,29 +851,6 @@
             return
         pipeline.manager.onMergeCompleted(event)
 
-    def formatStatusHTML(self):
-        ret = '<html><pre>'
-        if self._pause:
-            ret += '<p><b>Queue only mode:</b> preparing to '
-            if self._exit:
-                ret += 'exit'
-            ret += ', queue length: %s' % self.trigger_event_queue.qsize()
-            ret += '</p>'
-
-        if self.last_reconfigured:
-            ret += '<p>Last reconfigured: %s</p>' % self.last_reconfigured
-
-        keys = self.layout.pipelines.keys()
-        for key in keys:
-            pipeline = self.layout.pipelines[key]
-            s = 'Pipeline: %s' % pipeline.name
-            ret += s + '\n'
-            ret += '-' * len(s) + '\n'
-            ret += pipeline.formatStatusHTML()
-            ret += '\n'
-        ret += '</pre></html>'
-        return ret
-
     def formatStatusJSON(self):
         data = {}
 
@@ -900,6 +890,7 @@
         self.sched = sched
         self.pipeline = pipeline
         self.event_filters = []
+        self.changeish_filters = []
         if self.sched.config and self.sched.config.has_option(
             'zuul', 'report_times'):
             self.report_times = self.sched.config.getboolean(
@@ -912,6 +903,9 @@
 
     def _postConfig(self, layout):
         self.log.info("Configured Pipeline Manager %s" % self.pipeline.name)
+        self.log.info("  Requirements:")
+        for f in self.changeish_filters:
+            self.log.info("    %s" % f)
         self.log.info("  Events:")
         for e in self.event_filters:
             self.log.info("    %s" % e)
@@ -1081,6 +1075,12 @@
                            change)
             return False
 
+        for f in self.changeish_filters:
+            if not f.matches(change):
+                self.log.debug("Change %s does not match pipeline "
+                               "requirements" % change)
+                return False
+
         if not self.enqueueChangesAhead(change, quiet):
             self.log.debug("Failed to enqueue changes ahead of %s" % change)
             return False
@@ -1114,7 +1114,7 @@
 
     def removeChange(self, change):
         # Remove a change from the queue, probably because it has been
-        # superceded by another change.
+        # superseded by another change.
         for item in self.pipeline.getAllItems():
             if item.change == change:
                 self.log.debug("Canceling builds behind change: %s "
@@ -1665,7 +1665,8 @@
         self.log.info("  Shared change queues:")
         for queue in new_change_queues:
             self.pipeline.addQueue(queue)
-            self.log.info("    %s" % queue)
+            self.log.info("    %s containing %s" % (
+                queue, queue.generated_name))
 
     def combineChangeQueues(self, change_queues):
         self.log.debug("Combining shared queues")
diff --git a/zuul/trigger/gerrit.py b/zuul/trigger/gerrit.py
index dc66f97..a6eedb1 100644
--- a/zuul/trigger/gerrit.py
+++ b/zuul/trigger/gerrit.py
@@ -363,6 +363,8 @@
                     change.needed_by_changes.append(dep)
 
         change.approvals = data['currentPatchSet'].get('approvals', [])
+        change.open = data['open']
+        change.status = data['status']
 
         return change
 
diff --git a/zuul/webapp.py b/zuul/webapp.py
index 6b04384..4d6115f 100644
--- a/zuul/webapp.py
+++ b/zuul/webapp.py
@@ -15,19 +15,24 @@
 
 import logging
 import threading
+import time
 from paste import httpserver
-from webob import Request
+import webob
+from webob import dec
 
 
 class WebApp(threading.Thread):
     log = logging.getLogger("zuul.WebApp")
 
-    def __init__(self, scheduler, port=8001):
+    def __init__(self, scheduler, port=8001, cache_expiry=1):
         threading.Thread.__init__(self)
         self.scheduler = scheduler
         self.port = port
+        self.cache_expiry = cache_expiry
+        self.cache_time = 0
+        self.cache = None
         self.daemon = True
-        self.server = httpserver.serve(self.app, host='0.0.0.0',
+        self.server = httpserver.serve(dec.wsgify(self.app), host='0.0.0.0',
                                        port=self.port, start_loop=False)
 
     def run(self):
@@ -36,17 +41,21 @@
     def stop(self):
         self.server.server_close()
 
-    def app(self, environ, start_response):
-        request = Request(environ)
-        if request.path == '/status.json':
+    def app(self, request):
+        if request.path != '/status.json':
+            raise webob.exc.HTTPNotFound()
+        if (not self.cache or
+            (time.time() - self.cache_time) > self.cache_expiry):
             try:
-                ret = self.scheduler.formatStatusJSON()
+                self.cache = self.scheduler.formatStatusJSON()
+                # Call time.time() again because formatting above may take
+                # longer than the cache timeout.
+                self.cache_time = time.time()
             except:
                 self.log.exception("Exception formatting status:")
                 raise
-            start_response('200 OK', [('content-type', 'application/json'),
-                                      ('Access-Control-Allow-Origin', '*')])
-            return [ret]
-        else:
-            start_response('404 Not Found', [('content-type', 'text/plain')])
-            return ['Not found.']
+        response = webob.Response(body=self.cache,
+                                  content_type='application/json')
+        response.headers['Access-Control-Allow-Origin'] = '*'
+        response.last_modified = self.cache_time
+        return response