Merge "cleaning up index.rst file"
diff --git a/etc/status/public_html/app.js b/etc/status/public_html/app.js
deleted file mode 100644
index 8616201..0000000
--- a/etc/status/public_html/app.js
+++ /dev/null
@@ -1,917 +0,0 @@
-// Client script for Zuul status page
-//
-// Copyright 2012 OpenStack Foundation
-// Copyright 2013 Timo Tijhof
-// Copyright 2013 Wikimedia Foundation
-// Copyright 2014 Rackspace Australia
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-'use strict';
-
-(function ($) {
-    var $container, $msg, $indicator, $queueInfo, $queueEventsNum,
-        $queueResultsNum, $pipelines, $jq;
-    var xhr, zuul,
-        zuul_graph_update_count = 0,
-        zuul_sparkline_urls = {},
-        current_filter = '',
-        demo = location.search.match(/[?&]demo=([^?&]*)/),
-        source_url = location.search.match(/[?&]source_url=([^?&]*)/),
-        source = demo ?
-            './status-' + (demo[1] || 'basic') + '.json-sample' :
-            'status.json';
-    source = source_url ? source_url[1] : source;
-
-    function set_cookie(name, value) {
-        document.cookie = name + '=' + value + '; path=/';
-    }
-
-    function read_cookie(name, default_value) {
-        var nameEQ = name + '=';
-        var ca = document.cookie.split(';');
-        for(var i=0;i < ca.length;i++) {
-            var c = ca[i];
-            while (c.charAt(0) === ' ') {
-                c = c.substring(1, c.length);
-            }
-            if (c.indexOf(nameEQ) === 0) {
-                return c.substring(nameEQ.length, c.length);
-            }
-        }
-        return default_value;
-    }
-
-
-    zuul = {
-        enabled: true,
-        graphite_url: '',
-        collapsed_exceptions: [],
-
-        schedule: function () {
-            if (!zuul.enabled) {
-                setTimeout(zuul.schedule, 5000);
-                return;
-            }
-            zuul.update().complete(function () {
-                setTimeout(zuul.schedule, 5000);
-            });
-
-            /* Only update graphs every minute */
-            if (zuul_graph_update_count > 11) {
-                zuul_graph_update_count = 0;
-                zuul.update_sparklines();
-            }
-        },
-
-        /** @return {jQuery.Promise} */
-        update: function () {
-            // Cancel the previous update if it hasn't completed yet.
-            if (xhr) {
-                xhr.abort();
-            }
-
-            zuul.emit('update-start');
-
-            xhr = $.getJSON(source)
-                .done(function (data) {
-                    if ('message' in data) {
-                        $msg.removeClass('alert-danger').addClass('alert-info');
-                        $msg.text(data.message);
-                        $msg.show();
-                    } else {
-                        $msg.empty();
-                        $msg.hide();
-                    }
-
-                    if ('zuul_version' in data) {
-                        $('#zuul-version-span').text(data.zuul_version);
-                    }
-                    if ('last_reconfigured' in data) {
-                        var last_reconfigured =
-                            new Date(data.last_reconfigured);
-                        $('#last-reconfigured-span').text(
-                            last_reconfigured.toString());
-                    }
-
-                    $pipelines.html('');
-                    $.each(data.pipelines, function (i, pipeline) {
-                        $pipelines.append(zuul.format.pipeline(pipeline));
-                    });
-
-                    $queueEventsNum.text(
-                        data.trigger_event_queue ?
-                            data.trigger_event_queue.length : '0'
-                    );
-                    $queueResultsNum.text(
-                        data.result_event_queue ?
-                            data.result_event_queue.length : '0'
-                    );
-                })
-                .fail(function (err, jqXHR, errMsg) {
-                    $msg.text(source + ': ' + errMsg).show();
-                    $msg.removeClass('zuul-msg-wrap-off');
-                })
-                .complete(function () {
-                    xhr = undefined;
-                    zuul.emit('update-end');
-                });
-
-            return xhr;
-        },
-
-        update_sparklines: function() {
-            $.each(zuul_sparkline_urls, function(name, url) {
-                var newimg = new Image();
-                var parts = url.split('#');
-                newimg.src = parts[0] + '#' + new Date().getTime();
-                $(newimg).load(function (x) {
-                    zuul_sparkline_urls[name] = newimg.src;
-                });
-            });
-        },
-
-        format: {
-            job: function(job) {
-                var $job_line = $('<span />');
-
-                if (job.url !== null) {
-                    $job_line.append(
-                        $('<a />')
-                            .addClass('zuul-job-name')
-                            .attr('href', job.url)
-                            .text(job.name)
-                    );
-                }
-                else {
-                    $job_line.append(
-                        $('<span />')
-                            .addClass('zuul-job-name')
-                            .text(job.name)
-                    );
-                }
-
-                $job_line.append(zuul.format.job_status(job));
-
-                if (job.voting === false) {
-                    $job_line.append(
-                        $(' <small />')
-                            .addClass('zuul-non-voting-desc')
-                            .text(' (non-voting)')
-                    );
-                }
-
-                return $job_line;
-            },
-
-            job_status: function(job) {
-                var result = job.result ? job.result.toLowerCase() : null;
-                if (result === null) {
-                    result = job.url ? 'in progress' : 'queued';
-                }
-
-                if (result === 'in progress') {
-                    return zuul.format.job_progress_bar(job.elapsed_time,
-                                                        job.remaining_time);
-                }
-                else {
-                    return zuul.format.status_label(result);
-                }
-            },
-
-            status_label: function(result) {
-                var $status = $('<span />');
-                $status.addClass('zuul-job-result label');
-
-                switch (result) {
-                    case 'success':
-                        $status.addClass('label-success');
-                        break;
-                    case 'failure':
-                        $status.addClass('label-danger');
-                        break;
-                    case 'unstable':
-                        $status.addClass('label-warning');
-                        break;
-                    case 'in progress':
-                    case 'queued':
-                    case 'lost':
-                        $status.addClass('label-default');
-                        break;
-                }
-                $status.text(result);
-                return $status;
-            },
-
-            job_progress_bar: function(elapsed_time, remaining_time) {
-                var progress_percent = 100 * (elapsed_time / (elapsed_time +
-                                                              remaining_time));
-                var $bar_inner = $('<div />')
-                    .addClass('progress-bar')
-                    .attr('role', 'progressbar')
-                    .attr('aria-valuenow', 'progressbar')
-                    .attr('aria-valuemin', progress_percent)
-                    .attr('aria-valuemin', '0')
-                    .attr('aria-valuemax', '100')
-                    .css('width', progress_percent + '%');
-
-                var $bar_outter = $('<div />')
-                    .addClass('progress zuul-job-result')
-                    .append($bar_inner);
-
-                return $bar_outter;
-            },
-
-            enqueue_time: function(ms) {
-                // Special format case for enqueue time to add style
-                var hours = 60 * 60 * 1000;
-                var now = Date.now();
-                var delta = now - ms;
-                var status = 'text-success';
-                var text = zuul.format.time(delta, true);
-                if (delta > (4 * hours)) {
-                    status = 'text-danger';
-                } else if (delta > (2 * hours)) {
-                    status = 'text-warning';
-                }
-                return '<span class="' + status + '">' + text + '</span>';
-            },
-
-            time: function(ms, words) {
-                if (typeof(words) === 'undefined') {
-                    words = false;
-                }
-                var seconds = (+ms)/1000;
-                var minutes = Math.floor(seconds/60);
-                var hours = Math.floor(minutes/60);
-                seconds = Math.floor(seconds % 60);
-                minutes = Math.floor(minutes % 60);
-                var r = '';
-                if (words) {
-                    if (hours) {
-                        r += hours;
-                        r += ' hr ';
-                    }
-                    r += minutes + ' min';
-                } else {
-                    if (hours < 10) {
-                        r += '0';
-                    }
-                    r += hours + ':';
-                    if (minutes < 10) {
-                        r += '0';
-                    }
-                    r += minutes + ':';
-                    if (seconds < 10) {
-                        r += '0';
-                    }
-                    r += seconds;
-                }
-                return r;
-            },
-
-            change_total_progress_bar: function(change) {
-                var job_percent = Math.floor(100 / change.jobs.length);
-                var $bar_outter = $('<div />')
-                    .addClass('progress zuul-change-total-result');
-
-                $.each(change.jobs, function (i, job) {
-                    var result = job.result ? job.result.toLowerCase() : null;
-                    if (result === null) {
-                        result = job.url ? 'in progress' : 'queued';
-                    }
-
-                    if (result !== 'queued') {
-                        var $bar_inner = $('<div />')
-                            .addClass('progress-bar');
-
-                        switch (result) {
-                            case 'success':
-                                $bar_inner.addClass('progress-bar-success');
-                                break;
-                            case 'lost':
-                            case 'failure':
-                                $bar_inner.addClass('progress-bar-danger');
-                                break;
-                            case 'unstable':
-                                $bar_inner.addClass('progress-bar-warning');
-                                break;
-                            case 'in progress':
-                            case 'queued':
-                                break;
-                        }
-                        $bar_inner.attr('title', job.name)
-                            .css('width', job_percent + '%');
-                        $bar_outter.append($bar_inner);
-                    }
-                });
-                return $bar_outter;
-            },
-
-            change_header: function(change) {
-                var change_id = change.id || 'NA';
-                if (change_id.length === 40) {
-                    change_id = change_id.substr(0, 7);
-                }
-
-                var $change_link = $('<small />');
-                if (change.url !== null) {
-                    $change_link.append(
-                        $('<a />').attr('href', change.url).text(change.id)
-                    );
-                }
-                else {
-                    $change_link.text(change_id);
-                }
-
-                var $change_progress_row_left = $('<div />')
-                    .addClass('col-xs-3')
-                    .append($change_link);
-                var $change_progress_row_right = $('<div />')
-                    .addClass('col-xs-9')
-                    .append(zuul.format.change_total_progress_bar(change));
-
-                var $change_progress_row = $('<div />')
-                    .addClass('row')
-                    .append($change_progress_row_left)
-                    .append($change_progress_row_right);
-
-                var $project_span = $('<span />')
-                    .addClass('change_project')
-                    .text(change.project);
-
-                var $left = $('<div />')
-                    .addClass('col-xs-8')
-                    .append($project_span, $change_progress_row);
-
-                var remaining_time = zuul.format.time(
-                        change.remaining_time, true);
-                var enqueue_time = zuul.format.enqueue_time(
-                        change.enqueue_time);
-                var $remaining_time = $('<small />').addClass('time')
-                    .attr('title', 'Remaining Time').html(remaining_time);
-                var $enqueue_time = $('<small />').addClass('time')
-                    .attr('title', 'Elapsed Time').html(enqueue_time);
-
-                var $right = $('<div />')
-                    .addClass('col-xs-4 text-right')
-                    .append($remaining_time, $('<br />'), $enqueue_time);
-
-                var $header = $('<div />')
-                    .addClass('row')
-                    .append($left, $right);
-                return $header;
-            },
-
-            change_list: function(jobs) {
-                var $list = $('<ul />')
-                    .addClass('list-group zuul-patchset-body');
-
-                $.each(jobs, function (i, job) {
-                    var $item = $('<li />')
-                        .addClass('list-group-item')
-                        .addClass('zuul-change-job')
-                        .append(zuul.format.job(job));
-                    $list.append($item);
-                });
-
-                return $list;
-            },
-
-            change_panel: function (change) {
-                var $header = $('<div />')
-                    .addClass('panel-heading zuul-patchset-header')
-                    .append(zuul.format.change_header(change));
-
-                var panel_id = change.id ? change.id.replace(',', '_')
-                                         : change.project.replace('/', '_') +
-                                           '-' + change.enqueue_time;
-                var $panel = $('<div />')
-                    .attr('id', panel_id)
-                    .addClass('panel panel-default zuul-change')
-                    .append($header)
-                    .append(zuul.format.change_list(change.jobs));
-
-                $header.click(zuul.toggle_patchset);
-                return $panel;
-            },
-
-            change_status_icon: function(change) {
-                var icon_name = 'green.png';
-                var icon_title = 'Succeeding';
-
-                if (change.active !== true) {
-                    // Grey icon
-                    icon_name = 'grey.png';
-                    icon_title = 'Waiting until closer to head of queue to' +
-                        ' start jobs';
-                }
-                else if (change.failing_reasons &&
-                         change.failing_reasons.length > 0) {
-                    var reason = change.failing_reasons.join(', ');
-                    icon_title = 'Failing because ' + reason;
-                    if (reason.match(/merge conflict/)) {
-                        // Black icon
-                        icon_name = 'black.png';
-                    }
-                    else {
-                        // Red icon
-                        icon_name = 'red.png';
-                    }
-                }
-
-                var $icon = $('<img />')
-                    .attr('src', 'images/' + icon_name)
-                    .attr('title', icon_title)
-                    .css('margin-top', '-6px');
-
-                return $icon;
-            },
-
-            change_with_status_tree: function(change, change_queue) {
-                var $change_row = $('<tr />');
-
-                for (var i = 0; i < change_queue._tree_columns; i++) {
-                    var $tree_cell  = $('<td />')
-                        .css('height', '100%')
-                        .css('padding', '0 0 10px 0')
-                        .css('margin', '0')
-                        .css('width', '16px')
-                        .css('min-width', '16px')
-                        .css('overflow', 'hidden')
-                        .css('vertical-align', 'top');
-
-                    if (i < change._tree.length && change._tree[i] !== null) {
-                        $tree_cell.css('background-image',
-                                       'url(\'images/line.png\')')
-                            .css('background-repeat', 'repeat-y');
-                    }
-
-                    if (i === change._tree_index) {
-                        $tree_cell.append(
-                            zuul.format.change_status_icon(change));
-                    }
-                    if (change._tree_branches.indexOf(i) !== -1) {
-                        var $image = $('<img />')
-                            .css('vertical-align', 'baseline');
-                        if (change._tree_branches.indexOf(i) ===
-                            change._tree_branches.length - 1) {
-                            // Angle line
-                            $image.attr('src', 'images/line-angle.png');
-                        }
-                        else {
-                            // T line
-                            $image.attr('src', 'images/line-t.png');
-                        }
-                        $tree_cell.append($image);
-                    }
-                    $change_row.append($tree_cell);
-                }
-
-                var change_width = 360 - 16*change_queue._tree_columns;
-                var $change_column = $('<td />')
-                    .css('width', change_width + 'px')
-                    .addClass('zuul-change-cell')
-                    .append(zuul.format.change_panel(change));
-
-                $change_row.append($change_column);
-
-                var $change_table = $('<table />')
-                    .addClass('zuul-change-box')
-                    .css('-moz-box-sizing', 'content-box')
-                    .css('box-sizing', 'content-box')
-                    .append($change_row);
-
-                return $change_table;
-            },
-
-            pipeline_sparkline: function(pipeline_name) {
-                if (zuul.graphite_url !== '') {
-                    var $sparkline = $('<img />')
-                        .addClass('pull-right')
-                        .attr('src', zuul.get_sparkline_url(pipeline_name));
-                    return $sparkline;
-                }
-                return false;
-            },
-
-            pipeline_header: function(pipeline, count) {
-                // Format the pipeline name, sparkline and description
-                var $header_div = $('<div />')
-                    .addClass('zuul-pipeline-header');
-
-                var $heading = $('<h3 />')
-                    .css('vertical-align', 'middle')
-                    .text(pipeline.name)
-                    .append(
-                        $('<span />')
-                            .addClass('badge pull-right')
-                            .css('vertical-align', 'middle')
-                            .css('margin-top', '0.5em')
-                            .text(count)
-                    )
-                    .append(zuul.format.pipeline_sparkline(pipeline.name));
-
-                $header_div.append($heading);
-
-                if (typeof pipeline.description === 'string') {
-                    $header_div.append(
-                        $('<p />').append(
-                            $('<small />').text(pipeline.description)
-                        )
-                    );
-                }
-                return $header_div;
-            },
-
-            pipeline: function (pipeline) {
-                var count = zuul.create_tree(pipeline);
-                var $html = $('<div />')
-                    .addClass('zuul-pipeline col-md-4')
-                    .append(zuul.format.pipeline_header(pipeline, count));
-
-                $.each(pipeline.change_queues,
-                       function (queue_i, change_queue) {
-                    $.each(change_queue.heads, function (head_i, changes) {
-                        if (pipeline.change_queues.length > 1 &&
-                            head_i === 0) {
-                            var name = change_queue.name;
-                            var short_name = name;
-                            if (short_name.length > 32) {
-                                short_name = short_name.substr(0, 32) + '...';
-                            }
-                            $html.append(
-                                $('<p />')
-                                    .text('Queue: ')
-                                    .append(
-                                        $('<abbr />')
-                                            .attr('title', name)
-                                            .text(short_name)
-                                    )
-                            );
-                        }
-
-                        $.each(changes, function (change_i, change) {
-                            var $change_box =
-                                zuul.format.change_with_status_tree(
-                                    change, change_queue);
-                            $html.append($change_box);
-                            zuul.display_patchset($change_box);
-                        });
-                    });
-                });
-                return $html;
-            },
-
-            filter_form_group: function() {
-                // Update the filter form with a clear button if required
-
-                var $label = $('<label />')
-                    .addClass('control-label')
-                    .attr('for', 'filter_string')
-                    .text('Filters')
-                    .css('padding-right', '0.5em');
-
-                var $input = $('<input />')
-                    .attr('type', 'text')
-                    .attr('id', 'filter_string')
-                    .addClass('form-control')
-                    .attr('title',
-                          'project(s), pipeline(s) or review(s) comma ' +
-                          'separated')
-                    .attr('value', current_filter);
-
-                $input.change(zuul.handle_filter_change);
-
-                var $clear_icon = $('<span />')
-                    .addClass('form-control-feedback')
-                    .addClass('glyphicon glyphicon-remove-circle')
-                    .attr('id', 'filter_form_clear_box')
-                    .attr('title', 'clear filter')
-                    .css('cursor', 'pointer');
-
-                $clear_icon.click(function() {
-                    $('#filter_string').val('').change();
-                });
-
-                if (current_filter === '') {
-                    $clear_icon.hide();
-                }
-
-                var $form_group = $('<div />')
-                    .addClass('form-group has-feedback')
-                    .append($label, $input, $clear_icon);
-                return $form_group;
-            },
-
-            expand_form_group: function() {
-                var expand_by_default = (
-                    read_cookie('zuul_expand_by_default', false) === 'true');
-
-                var $checkbox = $('<input />')
-                    .attr('type', 'checkbox')
-                    .attr('id', 'expand_by_default')
-                    .prop('checked', expand_by_default)
-                    .change(zuul.handle_expand_by_default);
-
-                var $label = $('<label />')
-                    .css('padding-left', '1em')
-                    .html('Expand by default: ')
-                    .append($checkbox);
-
-                var $form_group = $('<div />')
-                    .addClass('checkbox')
-                    .append($label);
-                return $form_group;
-            },
-
-            control_form: function() {
-                // Build the filter form filling anything from cookies
-
-                var $control_form = $('<form />')
-                    .attr('role', 'form')
-                    .addClass('form-inline')
-                    .submit(zuul.handle_filter_change);
-
-                $control_form
-                    .append(zuul.format.filter_form_group())
-                    .append(zuul.format.expand_form_group());
-
-                return $control_form;
-            },
-        },
-
-        emit: function () {
-            $jq.trigger.apply($jq, arguments);
-            return this;
-        },
-        on: function () {
-            $jq.on.apply($jq, arguments);
-            return this;
-        },
-        one: function () {
-            $jq.one.apply($jq, arguments);
-            return this;
-        },
-
-        toggle_patchset: function(e) {
-            // Toggle showing/hiding the patchset when the header is clicked
-            // Grab the patchset panel
-            var $panel = $(e.target).parents('.zuul-change');
-            var $body = $panel.children('.zuul-patchset-body');
-            $body.toggle(200);
-            var collapsed_index = zuul.collapsed_exceptions.indexOf(
-                $panel.attr('id'));
-            if (collapsed_index === -1 ) {
-                // Currently not an exception, add it to list
-                zuul.collapsed_exceptions.push($panel.attr('id'));
-            }
-            else {
-                // Currently an except, remove from exceptions
-                zuul.collapsed_exceptions.splice(collapsed_index, 1);
-            }
-        },
-
-        display_patchset: function($change_box, animate) {
-            // Determine if to show or hide the patchset and/or the results
-            // when loaded
-
-            // See if we should hide the body/results
-            var $panel = $change_box.find('.zuul-change');
-            var panel_change = $panel.attr('id');
-            var $body = $panel.children('.zuul-patchset-body');
-            var expand_by_default = $('#expand_by_default').prop('checked');
-
-            var collapsed_index = zuul.collapsed_exceptions.indexOf(panel_change);
-
-            if (expand_by_default && collapsed_index === -1 ||
-                !expand_by_default && collapsed_index !== -1) {
-                // Expand by default, or is an exception
-                $body.show(animate);
-            }
-            else {
-                $body.hide(animate);
-            }
-
-            // Check if we should hide the whole panel
-            var panel_project = $panel.find('.change_project').text()
-                .toLowerCase();
-
-
-            var panel_pipeline = $change_box
-                .parents('.zuul-pipeline')
-                .find('.zuul-pipeline-header > h3')
-                .html()
-                .toLowerCase();
-
-            if (current_filter !== '') {
-                var show_panel = false;
-                var filter = current_filter.trim().split(/[\s,]+/);
-                $.each(filter, function(index, f_val) {
-                    if (f_val !== '') {
-                        f_val = f_val.toLowerCase();
-                        if (panel_project.indexOf(f_val) !== -1 ||
-                            panel_pipeline.indexOf(f_val) !== -1 ||
-                            panel_change.indexOf(f_val) !== -1) {
-                            show_panel = true;
-                        }
-                    }
-                });
-                if (show_panel === true) {
-                    $change_box.show(animate);
-                }
-                else {
-                    $change_box.hide(animate);
-                }
-            }
-            else {
-                $change_box.show(animate);
-            }
-        },
-
-        handle_filter_change: function() {
-            // Update the filter and save it to a cookie
-            current_filter = $('#filter_string').val();
-            set_cookie('zuul_filter_string', current_filter);
-            if (current_filter === '') {
-                $('#filter_form_clear_box').hide();
-            }
-            else {
-                $('#filter_form_clear_box').show();
-            }
-
-            $('.zuul-change-box').each(function(index, obj) {
-                var $change_box = $(obj);
-                zuul.display_patchset($change_box, 200);
-            });
-            return false;
-        },
-
-        handle_expand_by_default: function(e) {
-            // Handle toggling expand by default
-            set_cookie('zuul_expand_by_default', e.target.checked);
-            zuul.collapsed_exceptions = [];
-            $('.zuul-change-box').each(function(index, obj) {
-                var $change_box = $(obj);
-                zuul.display_patchset($change_box, 200);
-            });
-        },
-
-        create_tree: function(pipeline) {
-            var count = 0;
-            var pipeline_max_tree_columns = 1;
-            $.each(pipeline.change_queues, function(change_queue_i,
-                                                       change_queue) {
-                var tree = [];
-                var max_tree_columns = 1;
-                var changes = [];
-                var last_tree_length = 0;
-                $.each(change_queue.heads, function(head_i, head) {
-                    $.each(head, function(change_i, change) {
-                        changes[change.id] = change;
-                        change._tree_position = change_i;
-                    });
-                });
-                $.each(change_queue.heads, function(head_i, head) {
-                    $.each(head, function(change_i, change) {
-                        count += 1;
-                        var idx = tree.indexOf(change.id);
-                        if (idx > -1) {
-                            change._tree_index = idx;
-                            // remove...
-                            tree[idx] = null;
-                            while (tree[tree.length - 1] === null) {
-                                tree.pop();
-                            }
-                        } else {
-                            change._tree_index = 0;
-                        }
-                        change._tree_branches = [];
-                        change._tree = [];
-                        if (typeof(change.items_behind) === 'undefined') {
-                            change.items_behind = [];
-                        }
-                        change.items_behind.sort(function(a, b) {
-                            return (changes[b]._tree_position -
-                                    changes[a]._tree_position);
-                        });
-                        $.each(change.items_behind, function(i, id) {
-                            tree.push(id);
-                            if (tree.length>last_tree_length &&
-                                last_tree_length > 0) {
-                                change._tree_branches.push(
-                                    tree.length - 1);
-                            }
-                        });
-                        if (tree.length > max_tree_columns) {
-                            max_tree_columns = tree.length;
-                        }
-                        if (tree.length > pipeline_max_tree_columns) {
-                            pipeline_max_tree_columns = tree.length;
-                        }
-                        change._tree = tree.slice(0);  // make a copy
-                        last_tree_length = tree.length;
-                    });
-                });
-                change_queue._tree_columns = max_tree_columns;
-            });
-            pipeline._tree_columns = pipeline_max_tree_columns;
-            return count;
-        },
-
-        get_sparkline_url: function(pipeline_name) {
-            if (zuul.graphite_url !== '') {
-                if (!(pipeline_name in zuul_sparkline_urls)) {
-                    zuul_sparkline_urls[pipeline_name] = $.fn.graphite.geturl({
-                        url: zuul.graphite_url,
-                        from: "-8hours",
-                        width: 100,
-                        height: 26,
-                        margin: 0,
-                        hideLegend: true,
-                        hideAxes: true,
-                        hideGrid: true,
-                        target: [
-                            "color(stats.gauges.zuul.pipeline."+pipeline_name+".current_changes, '6b8182')"
-                        ]
-                    });
-                }
-                return zuul_sparkline_urls[pipeline_name];
-            }
-            return false;
-        },
-    };
-
-    current_filter = read_cookie('zuul_filter_string', current_filter);
-
-    $jq = $(zuul);
-
-    $jq.on('update-start', function () {
-        $container.addClass('zuul-container-loading');
-        $indicator.addClass('zuul-spinner-on');
-    });
-
-    $jq.on('update-end', function () {
-        $container.removeClass('zuul-container-loading');
-        setTimeout(function () {
-            $indicator.removeClass('zuul-spinner-on');
-        }, 500);
-    });
-
-    $jq.one('update-end', function () {
-        // Do this asynchronous so that if the first update adds a message, it
-        // will not animate while we fade in the content. Instead it simply
-        // appears with the rest of the content.
-        setTimeout(function () {
-            // Fade in the content
-            $container.addClass('zuul-container-ready');
-        });
-    });
-
-    $(function ($) {
-        $msg = $('<div />').addClass('alert').hide();
-        $indicator = $('<button class="btn pull-right zuul-spinner">' +
-                       'updating ' +
-                       '<span class="glyphicon glyphicon-refresh"></span>' +
-                       '</button>');
-        $queueInfo = $('<p>Queue lengths: <span>0</span> events, ' +
-                       '<span>0</span> results.</p>');
-        $queueEventsNum = $queueInfo.find('span').eq(0);
-        $queueResultsNum = $queueEventsNum.next();
-
-        var $control_form = zuul.format.control_form();
-
-        $pipelines = $('<div class="row"></div>');
-        var $zuulVersion = $('<p>Zuul version: <span id="zuul-version-span">' +
-                         '</span></p>');
-        var $lastReconf = $('<p>Last reconfigured: ' +
-                        '<span id="last-reconfigured-span"></span></p>');
-
-        $container = $('#zuul-container').append($msg, $indicator,
-                                                 $queueInfo, $control_form,
-                                                 $pipelines, $zuulVersion,
-                                                 $lastReconf);
-
-        //zuul.graphite_url = 'http://graphite.openstack.org/render/'
-        zuul.schedule();
-
-        $(document).on({
-            'show.visibility': function () {
-                zuul.enabled = true;
-                zuul.update();
-            },
-            'hide.visibility': function () {
-                zuul.enabled = false;
-            }
-        });
-    });
-}(jQuery));
diff --git a/etc/status/public_html/index.html b/etc/status/public_html/index.html
index bea1a79..d77470b 100644
--- a/etc/status/public_html/index.html
+++ b/etc/status/public_html/index.html
@@ -24,15 +24,17 @@
     <link rel="stylesheet" href="styles/zuul.css" />
 </head>
 <body>
-    <div class="container">
-        <h1>Zuul Status</h1>
-        <p>Real-time status monitor of Zuul, the pipeline manager between Gerrit and Workers.</p>
 
-        <div class="zuul-container" id="zuul-container"></div>
-    </div>
+    <div id="zuul_container"></div>
+
     <script src="jquery.min.js"></script>
     <script src="jquery-visibility.min.js"></script>
     <script src="jquery.graphite.js"></script>
-    <script src="app.js"></script>
+    <script src="jquery.zuul.js"></script>
+    <script src="zuul.app.js"></script>
+    <script>
+        zuul_build_dom(jQuery, '#zuul_container');
+        zuul_start(jQuery);
+    </script>
 </body>
 </html>
diff --git a/etc/status/public_html/jquery.zuul.js b/etc/status/public_html/jquery.zuul.js
new file mode 100644
index 0000000..9a3c1fa
--- /dev/null
+++ b/etc/status/public_html/jquery.zuul.js
@@ -0,0 +1,877 @@
+// jquery plugin for Zuul status page
+//
+// Copyright 2012 OpenStack Foundation
+// Copyright 2013 Timo Tijhof
+// Copyright 2013 Wikimedia Foundation
+// Copyright 2014 Rackspace Australia
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+'use strict';
+
+(function ($) {
+    function set_cookie(name, value) {
+        document.cookie = name + '=' + value + '; path=/';
+    }
+
+    function read_cookie(name, default_value) {
+        var nameEQ = name + '=';
+        var ca = document.cookie.split(';');
+        for(var i=0;i < ca.length;i++) {
+            var c = ca[i];
+            while (c.charAt(0) === ' ') {
+                c = c.substring(1, c.length);
+            }
+            if (c.indexOf(nameEQ) === 0) {
+                return c.substring(nameEQ.length, c.length);
+            }
+        }
+        return default_value;
+    }
+
+    $.zuul = function(options) {
+        var options = $.extend({
+            'enabled': true,
+            'graphite_url': '',
+            'source': 'status.json',
+            'msg_id': '#zuul_msg',
+            'pipelines_id': '#zuul_pipelines',
+            'queue_events_num': '#zuul_queue_events_num',
+            'queue_results_num': '#zuul_queue_results_num',
+        }, options);
+
+        var collapsed_exceptions = [];
+        var current_filter = read_cookie('zuul_filter_string', current_filter);
+        var $jq;
+
+        var xhr,
+            zuul_graph_update_count = 0,
+            zuul_sparkline_urls = {};
+
+        function get_sparkline_url(pipeline_name) {
+            if (options.graphite_url !== '') {
+                if (!(pipeline_name in zuul_sparkline_urls)) {
+                    zuul_sparkline_urls[pipeline_name] = $.fn.graphite
+                        .geturl({
+                        url: options.graphite_url,
+                        from: "-8hours",
+                        width: 100,
+                        height: 26,
+                        margin: 0,
+                        hideLegend: true,
+                        hideAxes: true,
+                        hideGrid: true,
+                        target: [
+                            "color(stats.gauges.zuul.pipeline." + pipeline_name
+                            + ".current_changes, '6b8182')"
+                        ]
+                    });
+                }
+                return zuul_sparkline_urls[pipeline_name];
+            }
+            return false;
+        }
+
+        var format = {
+            job: function(job) {
+                var $job_line = $('<span />');
+
+                if (job.url !== null) {
+                    $job_line.append(
+                        $('<a />')
+                            .addClass('zuul-job-name')
+                            .attr('href', job.url)
+                            .text(job.name)
+                    );
+                }
+                else {
+                    $job_line.append(
+                        $('<span />')
+                            .addClass('zuul-job-name')
+                            .text(job.name)
+                    );
+                }
+
+                $job_line.append(this.job_status(job));
+
+                if (job.voting === false) {
+                    $job_line.append(
+                        $(' <small />')
+                            .addClass('zuul-non-voting-desc')
+                            .text(' (non-voting)')
+                    );
+                }
+
+                return $job_line;
+            },
+
+            job_status: function(job) {
+                var result = job.result ? job.result.toLowerCase() : null;
+                if (result === null) {
+                    result = job.url ? 'in progress' : 'queued';
+                }
+
+                if (result === 'in progress') {
+                    return this.job_progress_bar(job.elapsed_time,
+                                                        job.remaining_time);
+                }
+                else {
+                    return this.status_label(result);
+                }
+            },
+
+            status_label: function(result) {
+                var $status = $('<span />');
+                $status.addClass('zuul-job-result label');
+
+                switch (result) {
+                    case 'success':
+                        $status.addClass('label-success');
+                        break;
+                    case 'failure':
+                        $status.addClass('label-danger');
+                        break;
+                    case 'unstable':
+                        $status.addClass('label-warning');
+                        break;
+                    case 'in progress':
+                    case 'queued':
+                    case 'lost':
+                        $status.addClass('label-default');
+                        break;
+                }
+                $status.text(result);
+                return $status;
+            },
+
+            job_progress_bar: function(elapsed_time, remaining_time) {
+                var progress_percent = 100 * (elapsed_time / (elapsed_time +
+                                                              remaining_time));
+                var $bar_inner = $('<div />')
+                    .addClass('progress-bar')
+                    .attr('role', 'progressbar')
+                    .attr('aria-valuenow', 'progressbar')
+                    .attr('aria-valuemin', progress_percent)
+                    .attr('aria-valuemin', '0')
+                    .attr('aria-valuemax', '100')
+                    .css('width', progress_percent + '%');
+
+                var $bar_outter = $('<div />')
+                    .addClass('progress zuul-job-result')
+                    .append($bar_inner);
+
+                return $bar_outter;
+            },
+
+            enqueue_time: function(ms) {
+                // Special format case for enqueue time to add style
+                var hours = 60 * 60 * 1000;
+                var now = Date.now();
+                var delta = now - ms;
+                var status = 'text-success';
+                var text = this.time(delta, true);
+                if (delta > (4 * hours)) {
+                    status = 'text-danger';
+                } else if (delta > (2 * hours)) {
+                    status = 'text-warning';
+                }
+                return '<span class="' + status + '">' + text + '</span>';
+            },
+
+            time: function(ms, words) {
+                if (typeof(words) === 'undefined') {
+                    words = false;
+                }
+                var seconds = (+ms)/1000;
+                var minutes = Math.floor(seconds/60);
+                var hours = Math.floor(minutes/60);
+                seconds = Math.floor(seconds % 60);
+                minutes = Math.floor(minutes % 60);
+                var r = '';
+                if (words) {
+                    if (hours) {
+                        r += hours;
+                        r += ' hr ';
+                    }
+                    r += minutes + ' min';
+                } else {
+                    if (hours < 10) {
+                        r += '0';
+                    }
+                    r += hours + ':';
+                    if (minutes < 10) {
+                        r += '0';
+                    }
+                    r += minutes + ':';
+                    if (seconds < 10) {
+                        r += '0';
+                    }
+                    r += seconds;
+                }
+                return r;
+            },
+
+            change_total_progress_bar: function(change) {
+                var job_percent = Math.floor(100 / change.jobs.length);
+                var $bar_outter = $('<div />')
+                    .addClass('progress zuul-change-total-result');
+
+                $.each(change.jobs, function (i, job) {
+                    var result = job.result ? job.result.toLowerCase() : null;
+                    if (result === null) {
+                        result = job.url ? 'in progress' : 'queued';
+                    }
+
+                    if (result !== 'queued') {
+                        var $bar_inner = $('<div />')
+                            .addClass('progress-bar');
+
+                        switch (result) {
+                            case 'success':
+                                $bar_inner.addClass('progress-bar-success');
+                                break;
+                            case 'lost':
+                            case 'failure':
+                                $bar_inner.addClass('progress-bar-danger');
+                                break;
+                            case 'unstable':
+                                $bar_inner.addClass('progress-bar-warning');
+                                break;
+                            case 'in progress':
+                            case 'queued':
+                                break;
+                        }
+                        $bar_inner.attr('title', job.name)
+                            .css('width', job_percent + '%');
+                        $bar_outter.append($bar_inner);
+                    }
+                });
+                return $bar_outter;
+            },
+
+            change_header: function(change) {
+                var change_id = change.id || 'NA';
+                if (change_id.length === 40) {
+                    change_id = change_id.substr(0, 7);
+                }
+
+                var $change_link = $('<small />');
+                if (change.url !== null) {
+                    $change_link.append(
+                        $('<a />').attr('href', change.url).text(change.id)
+                    );
+                }
+                else {
+                    $change_link.text(change_id);
+                }
+
+                var $change_progress_row_left = $('<div />')
+                    .addClass('col-xs-3')
+                    .append($change_link);
+                var $change_progress_row_right = $('<div />')
+                    .addClass('col-xs-9')
+                    .append(this.change_total_progress_bar(change));
+
+                var $change_progress_row = $('<div />')
+                    .addClass('row')
+                    .append($change_progress_row_left)
+                    .append($change_progress_row_right);
+
+                var $project_span = $('<span />')
+                    .addClass('change_project')
+                    .text(change.project);
+
+                var $left = $('<div />')
+                    .addClass('col-xs-8')
+                    .append($project_span, $change_progress_row);
+
+                var remaining_time = this.time(
+                        change.remaining_time, true);
+                var enqueue_time = this.enqueue_time(
+                        change.enqueue_time);
+                var $remaining_time = $('<small />').addClass('time')
+                    .attr('title', 'Remaining Time').html(remaining_time);
+                var $enqueue_time = $('<small />').addClass('time')
+                    .attr('title', 'Elapsed Time').html(enqueue_time);
+
+                var $right = $('<div />')
+                    .addClass('col-xs-4 text-right')
+                    .append($remaining_time, $('<br />'), $enqueue_time);
+
+                var $header = $('<div />')
+                    .addClass('row')
+                    .append($left, $right);
+                return $header;
+            },
+
+            change_list: function(jobs) {
+                var format = this;
+                var $list = $('<ul />')
+                    .addClass('list-group zuul-patchset-body');
+
+                $.each(jobs, function (i, job) {
+                    var $item = $('<li />')
+                        .addClass('list-group-item')
+                        .addClass('zuul-change-job')
+                        .append(format.job(job));
+                    $list.append($item);
+                });
+
+                return $list;
+            },
+
+            change_panel: function (change) {
+                var $header = $('<div />')
+                    .addClass('panel-heading zuul-patchset-header')
+                    .append(this.change_header(change));
+
+                var panel_id = change.id ? change.id.replace(',', '_')
+                                         : change.project.replace('/', '_') +
+                                           '-' + change.enqueue_time;
+                var $panel = $('<div />')
+                    .attr('id', panel_id)
+                    .addClass('panel panel-default zuul-change')
+                    .append($header)
+                    .append(this.change_list(change.jobs));
+
+                $header.click(this.toggle_patchset);
+                return $panel;
+            },
+
+            change_status_icon: function(change) {
+                var icon_name = 'green.png';
+                var icon_title = 'Succeeding';
+
+                if (change.active !== true) {
+                    // Grey icon
+                    icon_name = 'grey.png';
+                    icon_title = 'Waiting until closer to head of queue to' +
+                        ' start jobs';
+                }
+                else if (change.failing_reasons &&
+                         change.failing_reasons.length > 0) {
+                    var reason = change.failing_reasons.join(', ');
+                    icon_title = 'Failing because ' + reason;
+                    if (reason.match(/merge conflict/)) {
+                        // Black icon
+                        icon_name = 'black.png';
+                    }
+                    else {
+                        // Red icon
+                        icon_name = 'red.png';
+                    }
+                }
+
+                var $icon = $('<img />')
+                    .attr('src', 'images/' + icon_name)
+                    .attr('title', icon_title)
+                    .css('margin-top', '-6px');
+
+                return $icon;
+            },
+
+            change_with_status_tree: function(change, change_queue) {
+                var $change_row = $('<tr />');
+
+                for (var i = 0; i < change_queue._tree_columns; i++) {
+                    var $tree_cell  = $('<td />')
+                        .css('height', '100%')
+                        .css('padding', '0 0 10px 0')
+                        .css('margin', '0')
+                        .css('width', '16px')
+                        .css('min-width', '16px')
+                        .css('overflow', 'hidden')
+                        .css('vertical-align', 'top');
+
+                    if (i < change._tree.length && change._tree[i] !== null) {
+                        $tree_cell.css('background-image',
+                                       'url(\'images/line.png\')')
+                            .css('background-repeat', 'repeat-y');
+                    }
+
+                    if (i === change._tree_index) {
+                        $tree_cell.append(
+                            this.change_status_icon(change));
+                    }
+                    if (change._tree_branches.indexOf(i) !== -1) {
+                        var $image = $('<img />')
+                            .css('vertical-align', 'baseline');
+                        if (change._tree_branches.indexOf(i) ===
+                            change._tree_branches.length - 1) {
+                            // Angle line
+                            $image.attr('src', 'images/line-angle.png');
+                        }
+                        else {
+                            // T line
+                            $image.attr('src', 'images/line-t.png');
+                        }
+                        $tree_cell.append($image);
+                    }
+                    $change_row.append($tree_cell);
+                }
+
+                var change_width = 360 - 16*change_queue._tree_columns;
+                var $change_column = $('<td />')
+                    .css('width', change_width + 'px')
+                    .addClass('zuul-change-cell')
+                    .append(this.change_panel(change));
+
+                $change_row.append($change_column);
+
+                var $change_table = $('<table />')
+                    .addClass('zuul-change-box')
+                    .css('-moz-box-sizing', 'content-box')
+                    .css('box-sizing', 'content-box')
+                    .append($change_row);
+
+                return $change_table;
+            },
+
+            pipeline_sparkline: function(pipeline_name) {
+                if (options.graphite_url !== '') {
+                    var $sparkline = $('<img />')
+                        .addClass('pull-right')
+                        .attr('src', get_sparkline_url(pipeline_name));
+                    return $sparkline;
+                }
+                return false;
+            },
+
+            pipeline_header: function(pipeline, count) {
+                // Format the pipeline name, sparkline and description
+                var $header_div = $('<div />')
+                    .addClass('zuul-pipeline-header');
+
+                var $heading = $('<h3 />')
+                    .css('vertical-align', 'middle')
+                    .text(pipeline.name)
+                    .append(
+                        $('<span />')
+                            .addClass('badge pull-right')
+                            .css('vertical-align', 'middle')
+                            .css('margin-top', '0.5em')
+                            .text(count)
+                    )
+                    .append(this.pipeline_sparkline(pipeline.name));
+
+                $header_div.append($heading);
+
+                if (typeof pipeline.description === 'string') {
+                    $header_div.append(
+                        $('<p />').append(
+                            $('<small />').text(pipeline.description)
+                        )
+                    );
+                }
+                return $header_div;
+            },
+
+            pipeline: function (pipeline, count) {
+                var format = this;
+                var $html = $('<div />')
+                    .addClass('zuul-pipeline col-md-4')
+                    .append(this.pipeline_header(pipeline, count));
+
+                $.each(pipeline.change_queues,
+                       function (queue_i, change_queue) {
+                    $.each(change_queue.heads, function (head_i, changes) {
+                        if (pipeline.change_queues.length > 1 &&
+                            head_i === 0) {
+                            var name = change_queue.name;
+                            var short_name = name;
+                            if (short_name.length > 32) {
+                                short_name = short_name.substr(0, 32) + '...';
+                            }
+                            $html.append(
+                                $('<p />')
+                                    .text('Queue: ')
+                                    .append(
+                                        $('<abbr />')
+                                            .attr('title', name)
+                                            .text(short_name)
+                                    )
+                            );
+                        }
+
+                        $.each(changes, function (change_i, change) {
+                            var $change_box =
+                                format.change_with_status_tree(
+                                    change, change_queue);
+                            $html.append($change_box);
+                            format.display_patchset($change_box);
+                        });
+                    });
+                });
+                return $html;
+            },
+
+            toggle_patchset: function(e) {
+                // Toggle showing/hiding the patchset when the header is
+                // clicked.
+
+                // Grab the patchset panel
+                var $panel = $(e.target).parents('.zuul-change');
+                var $body = $panel.children('.zuul-patchset-body');
+                $body.toggle(200);
+                var collapsed_index = collapsed_exceptions.indexOf(
+                    $panel.attr('id'));
+                if (collapsed_index === -1 ) {
+                    // Currently not an exception, add it to list
+                    collapsed_exceptions.push($panel.attr('id'));
+                }
+                else {
+                    // Currently an except, remove from exceptions
+                    collapsed_exceptions.splice(collapsed_index, 1);
+                }
+            },
+
+            display_patchset: function($change_box, animate) {
+                // Determine if to show or hide the patchset and/or the results
+                // when loaded
+
+                // See if we should hide the body/results
+                var $panel = $change_box.find('.zuul-change');
+                var panel_change = $panel.attr('id');
+                var $body = $panel.children('.zuul-patchset-body');
+                var expand_by_default = $('#expand_by_default')
+                    .prop('checked');
+
+                var collapsed_index = collapsed_exceptions
+                    .indexOf(panel_change);
+
+                if (expand_by_default && collapsed_index === -1 ||
+                    !expand_by_default && collapsed_index !== -1) {
+                    // Expand by default, or is an exception
+                    $body.show(animate);
+                }
+                else {
+                    $body.hide(animate);
+                }
+
+                // Check if we should hide the whole panel
+                var panel_project = $panel.find('.change_project').text()
+                    .toLowerCase();
+
+
+                var panel_pipeline = $change_box
+                    .parents('.zuul-pipeline')
+                    .find('.zuul-pipeline-header > h3')
+                    .html()
+                    .toLowerCase();
+
+                if (current_filter !== '') {
+                    var show_panel = false;
+                    var filter = current_filter.trim().split(/[\s,]+/);
+                    $.each(filter, function(index, f_val) {
+                        if (f_val !== '') {
+                            f_val = f_val.toLowerCase();
+                            if (panel_project.indexOf(f_val) !== -1 ||
+                                panel_pipeline.indexOf(f_val) !== -1 ||
+                                panel_change.indexOf(f_val) !== -1) {
+                                show_panel = true;
+                            }
+                        }
+                    });
+                    if (show_panel === true) {
+                        $change_box.show(animate);
+                    }
+                    else {
+                        $change_box.hide(animate);
+                    }
+                }
+                else {
+                    $change_box.show(animate);
+                }
+            },
+        };
+
+        var app = {
+            schedule: function (app) {
+                var app = app || this;
+                if (!options.enabled) {
+                    setTimeout(function() {app.schedule(app);}, 5000);
+                    return;
+                }
+                app.update().complete(function () {
+                    setTimeout(function() {app.schedule(app);}, 5000);
+                });
+
+                /* Only update graphs every minute */
+                if (zuul_graph_update_count > 11) {
+                    zuul_graph_update_count = 0;
+                    zuul.update_sparklines();
+                }
+            },
+
+            /** @return {jQuery.Promise} */
+            update: function () {
+                // Cancel the previous update if it hasn't completed yet.
+                if (xhr) {
+                    xhr.abort();
+                }
+
+                this.emit('update-start');
+                var app = this;
+
+                var $msg = $(options.msg_id)
+                xhr = $.getJSON(options.source)
+                    .done(function (data) {
+                        if ('message' in data) {
+                            $msg.removeClass('alert-danger')
+                                .addClass('alert-info')
+                                .text(data.message)
+                                .show();
+                        } else {
+                            $msg.empty()
+                                .hide();
+                        }
+
+                        if ('zuul_version' in data) {
+                            $('#zuul-version-span').text(data.zuul_version);
+                        }
+                        if ('last_reconfigured' in data) {
+                            var last_reconfigured =
+                                new Date(data.last_reconfigured);
+                            $('#last-reconfigured-span').text(
+                                last_reconfigured.toString());
+                        }
+
+                        var $pipelines = $(options.pipelines_id);
+                        $pipelines.html('');
+                        $.each(data.pipelines, function (i, pipeline) {
+                            var count = app.create_tree(pipeline);
+                            $pipelines.append(
+                                format.pipeline(pipeline, count));
+                        });
+
+                        $(options.queue_events_num).text(
+                            data.trigger_event_queue ?
+                                data.trigger_event_queue.length : '0'
+                        );
+                        $(options.queue_results_num).text(
+                            data.result_event_queue ?
+                                data.result_event_queue.length : '0'
+                        );
+                    })
+                    .fail(function (err, jqXHR, errMsg) {
+                        $msg.text(source + ': ' + errMsg).show();
+                        $msg.removeClass('zuul-msg-wrap-off');
+                    })
+                    .complete(function () {
+                        xhr = undefined;
+                        app.emit('update-end');
+                    });
+
+                return xhr;
+            },
+
+            update_sparklines: function() {
+                $.each(zuul_sparkline_urls, function(name, url) {
+                    var newimg = new Image();
+                    var parts = url.split('#');
+                    newimg.src = parts[0] + '#' + new Date().getTime();
+                    $(newimg).load(function (x) {
+                        zuul_sparkline_urls[name] = newimg.src;
+                    });
+                });
+            },
+
+            emit: function () {
+                $jq.trigger.apply($jq, arguments);
+                return this;
+            },
+            on: function () {
+                $jq.on.apply($jq, arguments);
+                return this;
+            },
+            one: function () {
+                $jq.one.apply($jq, arguments);
+                return this;
+            },
+
+            control_form: function() {
+                // Build the filter form filling anything from cookies
+
+                var $control_form = $('<form />')
+                    .attr('role', 'form')
+                    .addClass('form-inline')
+                    .submit(this.handle_filter_change);
+
+                $control_form
+                    .append(this.filter_form_group())
+                    .append(this.expand_form_group());
+
+                return $control_form;
+            },
+
+            filter_form_group: function() {
+                // Update the filter form with a clear button if required
+
+                var $label = $('<label />')
+                    .addClass('control-label')
+                    .attr('for', 'filter_string')
+                    .text('Filters')
+                    .css('padding-right', '0.5em');
+
+                var $input = $('<input />')
+                    .attr('type', 'text')
+                    .attr('id', 'filter_string')
+                    .addClass('form-control')
+                    .attr('title',
+                          'project(s), pipeline(s) or review(s) comma ' +
+                          'separated')
+                    .attr('value', current_filter);
+
+                $input.change(this.handle_filter_change);
+
+                var $clear_icon = $('<span />')
+                    .addClass('form-control-feedback')
+                    .addClass('glyphicon glyphicon-remove-circle')
+                    .attr('id', 'filter_form_clear_box')
+                    .attr('title', 'clear filter')
+                    .css('cursor', 'pointer');
+
+                $clear_icon.click(function() {
+                    $('#filter_string').val('').change();
+                });
+
+                if (current_filter === '') {
+                    $clear_icon.hide();
+                }
+
+                var $form_group = $('<div />')
+                    .addClass('form-group has-feedback')
+                    .append($label, $input, $clear_icon);
+                return $form_group;
+            },
+
+            expand_form_group: function() {
+                var expand_by_default = (
+                    read_cookie('zuul_expand_by_default', false) === 'true');
+
+                var $checkbox = $('<input />')
+                    .attr('type', 'checkbox')
+                    .attr('id', 'expand_by_default')
+                    .prop('checked', expand_by_default)
+                    .change(this.handle_expand_by_default);
+
+                var $label = $('<label />')
+                    .css('padding-left', '1em')
+                    .html('Expand by default: ')
+                    .append($checkbox);
+
+                var $form_group = $('<div />')
+                    .addClass('checkbox')
+                    .append($label);
+                return $form_group;
+            },
+
+            handle_filter_change: function() {
+                // Update the filter and save it to a cookie
+                current_filter = $('#filter_string').val();
+                set_cookie('zuul_filter_string', current_filter);
+                if (current_filter === '') {
+                    $('#filter_form_clear_box').hide();
+                }
+                else {
+                    $('#filter_form_clear_box').show();
+                }
+
+                $('.zuul-change-box').each(function(index, obj) {
+                    var $change_box = $(obj);
+                    format.display_patchset($change_box, 200);
+                });
+                return false;
+            },
+
+            handle_expand_by_default: function(e) {
+                // Handle toggling expand by default
+                set_cookie('zuul_expand_by_default', e.target.checked);
+                collapsed_exceptions = [];
+                $('.zuul-change-box').each(function(index, obj) {
+                    var $change_box = $(obj);
+                    format.display_patchset($change_box, 200);
+                });
+            },
+
+            create_tree: function(pipeline) {
+                var count = 0;
+                var pipeline_max_tree_columns = 1;
+                $.each(pipeline.change_queues, function(change_queue_i,
+                                                           change_queue) {
+                    var tree = [];
+                    var max_tree_columns = 1;
+                    var changes = [];
+                    var last_tree_length = 0;
+                    $.each(change_queue.heads, function(head_i, head) {
+                        $.each(head, function(change_i, change) {
+                            changes[change.id] = change;
+                            change._tree_position = change_i;
+                        });
+                    });
+                    $.each(change_queue.heads, function(head_i, head) {
+                        $.each(head, function(change_i, change) {
+                            count += 1;
+                            var idx = tree.indexOf(change.id);
+                            if (idx > -1) {
+                                change._tree_index = idx;
+                                // remove...
+                                tree[idx] = null;
+                                while (tree[tree.length - 1] === null) {
+                                    tree.pop();
+                                }
+                            } else {
+                                change._tree_index = 0;
+                            }
+                            change._tree_branches = [];
+                            change._tree = [];
+                            if (typeof(change.items_behind) === 'undefined') {
+                                change.items_behind = [];
+                            }
+                            change.items_behind.sort(function(a, b) {
+                                return (changes[b]._tree_position -
+                                        changes[a]._tree_position);
+                            });
+                            $.each(change.items_behind, function(i, id) {
+                                tree.push(id);
+                                if (tree.length>last_tree_length &&
+                                    last_tree_length > 0) {
+                                    change._tree_branches.push(
+                                        tree.length - 1);
+                                }
+                            });
+                            if (tree.length > max_tree_columns) {
+                                max_tree_columns = tree.length;
+                            }
+                            if (tree.length > pipeline_max_tree_columns) {
+                                pipeline_max_tree_columns = tree.length;
+                            }
+                            change._tree = tree.slice(0);  // make a copy
+                            last_tree_length = tree.length;
+                        });
+                    });
+                    change_queue._tree_columns = max_tree_columns;
+                });
+                pipeline._tree_columns = pipeline_max_tree_columns;
+                return count;
+            },
+        };
+
+        $jq = $(app);
+        return {
+            options: options,
+            format: format,
+            app: app,
+            jq: $jq
+        };
+    }
+}(jQuery));
diff --git a/etc/status/public_html/zuul.app.js b/etc/status/public_html/zuul.app.js
new file mode 100644
index 0000000..6f87a92
--- /dev/null
+++ b/etc/status/public_html/zuul.app.js
@@ -0,0 +1,97 @@
+// Client script for Zuul status page
+//
+// Copyright 2013 OpenStack Foundation
+// Copyright 2013 Timo Tijhof
+// Copyright 2013 Wikimedia Foundation
+// Copyright 2014 Rackspace Australia
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+
+function zuul_build_dom($, container) {
+    // Build a default-looking DOM
+    default_layout = '<div class="container">'
+        + '<h1>Zuul Status</h1>'
+        + '<p>Real-time status monitor of Zuul, the pipeline manager between Gerrit and Workers.</p>'
+        + '<div class="zuul-container" id="zuul-container">'
+        + '<div style="display: none;" class="alert" id="zuul_msg"></div>'
+        + '<button class="btn pull-right zuul-spinner">updating <span class="glyphicon glyphicon-refresh"></span></button>'
+        + '<p>Queue lengths: <span id="zuul_queue_events_num">0</span> events, <span id="zuul_queue_results_num">0</span> results.</p>'
+        + '<div id="zuul_controls"></div>'
+        + '<div id="zuul_pipelines" class="row"></div>'
+        + '<p>Zuul version: <span id="zuul-version-span"></span></p>'
+        + '<p>Last reconfigured: <span id="last-reconfigured-span"></span></p>'
+        + '</div></div>';
+
+    $(function ($) {
+        // DOM ready
+        $container = $(container);
+        $container.html(default_layout);
+    });
+}
+
+function zuul_start($) {
+    // Start the zuul app (expects default dom)
+
+    var $container, $indicator;
+    var demo = location.search.match(/[?&]demo=([^?&]*)/),
+        source_url = location.search.match(/[?&]source_url=([^?&]*)/),
+        source = demo ? './status-' + (demo[1] || 'basic') + '.json-sample' :
+            'status.json';
+    source = source_url ? source_url[1] : source;
+
+    var zuul = $.zuul({
+        source: source,
+        //graphite_url: 'http://graphite.openstack.org/render/'
+    });
+
+    zuul.jq.on('update-start', function () {
+        $container.addClass('zuul-container-loading');
+        $indicator.addClass('zuul-spinner-on');
+    });
+
+    zuul.jq.on('update-end', function () {
+        $container.removeClass('zuul-container-loading');
+        setTimeout(function () {
+            $indicator.removeClass('zuul-spinner-on');
+        }, 500);
+    });
+
+    zuul.jq.one('update-end', function () {
+        // Do this asynchronous so that if the first update adds a
+        // message, it will not animate while we fade in the content.
+        // Instead it simply appears with the rest of the content.
+        setTimeout(function () {
+            // Fade in the content
+            $container.addClass('zuul-container-ready');
+        });
+    });
+
+    $(function ($) {
+        // DOM ready
+        $container = $('#zuul-container');
+        $indicator = $('#zuul-spinner');
+        $('#zuul_controls').append(zuul.app.control_form());
+
+        zuul.app.schedule();
+
+        $(document).on({
+            'show.visibility': function () {
+                zuul.options.enabled = true;
+                zuul.app.update();
+            },
+            'hide.visibility': function () {
+                zuul.options.enabled = false;
+            }
+        });
+    });
+}
\ No newline at end of file
diff --git a/tests/base.py b/tests/base.py
new file mode 100755
index 0000000..01b42f2
--- /dev/null
+++ b/tests/base.py
@@ -0,0 +1,1204 @@
+#!/usr/bin/env python
+
+# Copyright 2012 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import ConfigParser
+import gc
+import hashlib
+import json
+import logging
+import os
+import pprint
+import Queue
+import random
+import re
+import select
+import shutil
+import socket
+import string
+import subprocess
+import swiftclient
+import threading
+import time
+import urllib2
+
+import git
+import gear
+import fixtures
+import six.moves.urllib.parse as urlparse
+import statsd
+import testtools
+
+import zuul.scheduler
+import zuul.webapp
+import zuul.rpclistener
+import zuul.launcher.gearman
+import zuul.lib.swift
+import zuul.merger.server
+import zuul.merger.client
+import zuul.reporter.gerrit
+import zuul.reporter.smtp
+import zuul.trigger.gerrit
+import zuul.trigger.timer
+
+FIXTURE_DIR = os.path.join(os.path.dirname(__file__),
+                           'fixtures')
+
+logging.basicConfig(level=logging.DEBUG,
+                    format='%(asctime)s %(name)-32s '
+                    '%(levelname)-8s %(message)s')
+
+
+def repack_repo(path):
+    cmd = ['git', '--git-dir=%s/.git' % path, 'repack', '-afd']
+    output = subprocess.Popen(cmd, close_fds=True,
+                              stdout=subprocess.PIPE,
+                              stderr=subprocess.PIPE)
+    out = output.communicate()
+    if output.returncode:
+        raise Exception("git repack returned %d" % output.returncode)
+    return out
+
+
+def random_sha1():
+    return hashlib.sha1(str(random.random())).hexdigest()
+
+
+class ChangeReference(git.Reference):
+    _common_path_default = "refs/changes"
+    _points_to_commits_only = True
+
+
+class FakeChange(object):
+    categories = {'APRV': ('Approved', -1, 1),
+                  'CRVW': ('Code-Review', -2, 2),
+                  'VRFY': ('Verified', -2, 2)}
+
+    def __init__(self, gerrit, number, project, branch, subject,
+                 status='NEW', upstream_root=None):
+        self.gerrit = gerrit
+        self.reported = 0
+        self.queried = 0
+        self.patchsets = []
+        self.number = number
+        self.project = project
+        self.branch = branch
+        self.subject = subject
+        self.latest_patchset = 0
+        self.depends_on_change = None
+        self.needed_by_changes = []
+        self.fail_merge = False
+        self.messages = []
+        self.data = {
+            'branch': branch,
+            'comments': [],
+            'commitMessage': subject,
+            'createdOn': time.time(),
+            'id': 'I' + random_sha1(),
+            'lastUpdated': time.time(),
+            'number': str(number),
+            'open': status == 'NEW',
+            'owner': {'email': 'user@example.com',
+                      'name': 'User Name',
+                      'username': 'username'},
+            'patchSets': self.patchsets,
+            'project': project,
+            'status': status,
+            'subject': subject,
+            'submitRecords': [],
+            'url': 'https://hostname/%s' % number}
+
+        self.upstream_root = upstream_root
+        self.addPatchset()
+        self.data['submitRecords'] = self.getSubmitRecords()
+        self.open = status == 'NEW'
+
+    def add_fake_change_to_repo(self, msg, fn, large):
+        path = os.path.join(self.upstream_root, self.project)
+        repo = git.Repo(path)
+        ref = ChangeReference.create(repo, '1/%s/%s' % (self.number,
+                                                        self.latest_patchset),
+                                     'refs/tags/init')
+        repo.head.reference = ref
+        repo.head.reset(index=True, working_tree=True)
+        repo.git.clean('-x', '-f', '-d')
+
+        path = os.path.join(self.upstream_root, self.project)
+        if not large:
+            fn = os.path.join(path, fn)
+            f = open(fn, 'w')
+            f.write("test %s %s %s\n" %
+                    (self.branch, self.number, self.latest_patchset))
+            f.close()
+            repo.index.add([fn])
+        else:
+            for fni in range(100):
+                fn = os.path.join(path, str(fni))
+                f = open(fn, 'w')
+                for ci in range(4096):
+                    f.write(random.choice(string.printable))
+                f.close()
+                repo.index.add([fn])
+
+        r = repo.index.commit(msg)
+        repo.head.reference = 'master'
+        repo.head.reset(index=True, working_tree=True)
+        repo.git.clean('-x', '-f', '-d')
+        repo.heads['master'].checkout()
+        return r
+
+    def addPatchset(self, files=[], large=False):
+        self.latest_patchset += 1
+        if files:
+            fn = files[0]
+        else:
+            fn = '%s-%s' % (self.branch, self.number)
+        msg = self.subject + '-' + str(self.latest_patchset)
+        c = self.add_fake_change_to_repo(msg, fn, large)
+        ps_files = [{'file': '/COMMIT_MSG',
+                     'type': 'ADDED'},
+                    {'file': 'README',
+                     'type': 'MODIFIED'}]
+        for f in files:
+            ps_files.append({'file': f, 'type': 'ADDED'})
+        d = {'approvals': [],
+             'createdOn': time.time(),
+             'files': ps_files,
+             'number': str(self.latest_patchset),
+             'ref': 'refs/changes/1/%s/%s' % (self.number,
+                                              self.latest_patchset),
+             'revision': c.hexsha,
+             'uploader': {'email': 'user@example.com',
+                          'name': 'User name',
+                          'username': 'user'}}
+        self.data['currentPatchSet'] = d
+        self.patchsets.append(d)
+        self.data['submitRecords'] = self.getSubmitRecords()
+
+    def getPatchsetCreatedEvent(self, patchset):
+        event = {"type": "patchset-created",
+                 "change": {"project": self.project,
+                            "branch": self.branch,
+                            "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
+                            "number": str(self.number),
+                            "subject": self.subject,
+                            "owner": {"name": "User Name"},
+                            "url": "https://hostname/3"},
+                 "patchSet": self.patchsets[patchset - 1],
+                 "uploader": {"name": "User Name"}}
+        return event
+
+    def getChangeRestoredEvent(self):
+        event = {"type": "change-restored",
+                 "change": {"project": self.project,
+                            "branch": self.branch,
+                            "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
+                            "number": str(self.number),
+                            "subject": self.subject,
+                            "owner": {"name": "User Name"},
+                            "url": "https://hostname/3"},
+                 "restorer": {"name": "User Name"},
+                 "reason": ""}
+        return event
+
+    def getChangeCommentEvent(self, patchset):
+        event = {"type": "comment-added",
+                 "change": {"project": self.project,
+                            "branch": self.branch,
+                            "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
+                            "number": str(self.number),
+                            "subject": self.subject,
+                            "owner": {"name": "User Name"},
+                            "url": "https://hostname/3"},
+                 "patchSet": self.patchsets[patchset - 1],
+                 "author": {"name": "User Name"},
+                 "approvals": [{"type": "Code-Review",
+                                "description": "Code-Review",
+                                "value": "0"}],
+                 "comment": "This is a comment"}
+        return event
+
+    def addApproval(self, category, value, username='jenkins',
+                    granted_on=None):
+        if not granted_on:
+            granted_on = time.time()
+        approval = {'description': self.categories[category][0],
+                    'type': category,
+                    'value': str(value),
+                    'by': {
+                        'username': username,
+                        'email': username + '@example.com',
+                    },
+                    'grantedOn': int(granted_on)}
+        for i, x in enumerate(self.patchsets[-1]['approvals'][:]):
+            if x['by']['username'] == username and x['type'] == category:
+                del self.patchsets[-1]['approvals'][i]
+        self.patchsets[-1]['approvals'].append(approval)
+        event = {'approvals': [approval],
+                 'author': {'email': 'user@example.com',
+                            'name': 'User Name',
+                            'username': 'username'},
+                 'change': {'branch': self.branch,
+                            'id': 'Iaa69c46accf97d0598111724a38250ae76a22c87',
+                            'number': str(self.number),
+                            'owner': {'email': 'user@example.com',
+                                      'name': 'User Name',
+                                      'username': 'username'},
+                            'project': self.project,
+                            'subject': self.subject,
+                            'topic': 'master',
+                            'url': 'https://hostname/459'},
+                 'comment': '',
+                 'patchSet': self.patchsets[-1],
+                 'type': 'comment-added'}
+        self.data['submitRecords'] = self.getSubmitRecords()
+        return json.loads(json.dumps(event))
+
+    def getSubmitRecords(self):
+        status = {}
+        for cat in self.categories.keys():
+            status[cat] = 0
+
+        for a in self.patchsets[-1]['approvals']:
+            cur = status[a['type']]
+            cat_min, cat_max = self.categories[a['type']][1:]
+            new = int(a['value'])
+            if new == cat_min:
+                cur = new
+            elif abs(new) > abs(cur):
+                cur = new
+            status[a['type']] = cur
+
+        labels = []
+        ok = True
+        for typ, cat in self.categories.items():
+            cur = status[typ]
+            cat_min, cat_max = cat[1:]
+            if cur == cat_min:
+                value = 'REJECT'
+                ok = False
+            elif cur == cat_max:
+                value = 'OK'
+            else:
+                value = 'NEED'
+                ok = False
+            labels.append({'label': cat[0], 'status': value})
+        if ok:
+            return [{'status': 'OK'}]
+        return [{'status': 'NOT_READY',
+                 'labels': labels}]
+
+    def setDependsOn(self, other, patchset):
+        self.depends_on_change = other
+        d = {'id': other.data['id'],
+             'number': other.data['number'],
+             'ref': other.patchsets[patchset - 1]['ref']
+             }
+        self.data['dependsOn'] = [d]
+
+        other.needed_by_changes.append(self)
+        needed = other.data.get('neededBy', [])
+        d = {'id': self.data['id'],
+             'number': self.data['number'],
+             'ref': self.patchsets[patchset - 1]['ref'],
+             'revision': self.patchsets[patchset - 1]['revision']
+             }
+        needed.append(d)
+        other.data['neededBy'] = needed
+
+    def query(self):
+        self.queried += 1
+        d = self.data.get('dependsOn')
+        if d:
+            d = d[0]
+            if (self.depends_on_change.patchsets[-1]['ref'] == d['ref']):
+                d['isCurrentPatchSet'] = True
+            else:
+                d['isCurrentPatchSet'] = False
+        return json.loads(json.dumps(self.data))
+
+    def setMerged(self):
+        if (self.depends_on_change and
+            self.depends_on_change.data['status'] != 'MERGED'):
+            return
+        if self.fail_merge:
+            return
+        self.data['status'] = 'MERGED'
+        self.open = False
+
+        path = os.path.join(self.upstream_root, self.project)
+        repo = git.Repo(path)
+        repo.heads[self.branch].commit = \
+            repo.commit(self.patchsets[-1]['revision'])
+
+    def setReported(self):
+        self.reported += 1
+
+
+class FakeGerrit(object):
+    def __init__(self, *args, **kw):
+        self.event_queue = Queue.Queue()
+        self.fixture_dir = os.path.join(FIXTURE_DIR, 'gerrit')
+        self.change_number = 0
+        self.changes = {}
+
+    def addFakeChange(self, project, branch, subject, status='NEW'):
+        self.change_number += 1
+        c = FakeChange(self, self.change_number, project, branch, subject,
+                       upstream_root=self.upstream_root,
+                       status=status)
+        self.changes[self.change_number] = c
+        return c
+
+    def addEvent(self, data):
+        return self.event_queue.put(data)
+
+    def getEvent(self):
+        return self.event_queue.get()
+
+    def eventDone(self):
+        self.event_queue.task_done()
+
+    def review(self, project, changeid, message, action):
+        number, ps = changeid.split(',')
+        change = self.changes[int(number)]
+        change.messages.append(message)
+        if 'submit' in action:
+            change.setMerged()
+        if message:
+            change.setReported()
+
+    def query(self, number):
+        change = self.changes.get(int(number))
+        if change:
+            return change.query()
+        return {}
+
+    def startWatching(self, *args, **kw):
+        pass
+
+
+class BuildHistory(object):
+    def __init__(self, **kw):
+        self.__dict__.update(kw)
+
+    def __repr__(self):
+        return ("<Completed build, result: %s name: %s #%s changes: %s>" %
+                (self.result, self.name, self.number, self.changes))
+
+
+class FakeURLOpener(object):
+    def __init__(self, upstream_root, fake_gerrit, url):
+        self.upstream_root = upstream_root
+        self.fake_gerrit = fake_gerrit
+        self.url = url
+
+    def read(self):
+        res = urlparse.urlparse(self.url)
+        path = res.path
+        project = '/'.join(path.split('/')[2:-2])
+        ret = '001e# service=git-upload-pack\n'
+        ret += ('000000a31270149696713ba7e06f1beb760f20d359c4abed HEAD\x00'
+                'multi_ack thin-pack side-band side-band-64k ofs-delta '
+                'shallow no-progress include-tag multi_ack_detailed no-done\n')
+        path = os.path.join(self.upstream_root, project)
+        repo = git.Repo(path)
+        for ref in repo.refs:
+            r = ref.object.hexsha + ' ' + ref.path + '\n'
+            ret += '%04x%s' % (len(r) + 4, r)
+        ret += '0000'
+        return ret
+
+
+class FakeGerritTrigger(zuul.trigger.gerrit.Gerrit):
+    name = 'gerrit'
+
+    def __init__(self, upstream_root, *args):
+        super(FakeGerritTrigger, self).__init__(*args)
+        self.upstream_root = upstream_root
+
+    def getGitUrl(self, project):
+        return os.path.join(self.upstream_root, project.name)
+
+
+class FakeStatsd(threading.Thread):
+    def __init__(self):
+        threading.Thread.__init__(self)
+        self.daemon = True
+        self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+        self.sock.bind(('', 0))
+        self.port = self.sock.getsockname()[1]
+        self.wake_read, self.wake_write = os.pipe()
+        self.stats = []
+
+    def run(self):
+        while True:
+            poll = select.poll()
+            poll.register(self.sock, select.POLLIN)
+            poll.register(self.wake_read, select.POLLIN)
+            ret = poll.poll()
+            for (fd, event) in ret:
+                if fd == self.sock.fileno():
+                    data = self.sock.recvfrom(1024)
+                    if not data:
+                        return
+                    self.stats.append(data[0])
+                if fd == self.wake_read:
+                    return
+
+    def stop(self):
+        os.write(self.wake_write, '1\n')
+
+
+class FakeBuild(threading.Thread):
+    log = logging.getLogger("zuul.test")
+
+    def __init__(self, worker, job, number, node):
+        threading.Thread.__init__(self)
+        self.daemon = True
+        self.worker = worker
+        self.job = job
+        self.name = job.name.split(':')[1]
+        self.number = number
+        self.node = node
+        self.parameters = json.loads(job.arguments)
+        self.unique = self.parameters['ZUUL_UUID']
+        self.wait_condition = threading.Condition()
+        self.waiting = False
+        self.aborted = False
+        self.created = time.time()
+        self.description = ''
+        self.run_error = False
+
+    def release(self):
+        self.wait_condition.acquire()
+        self.wait_condition.notify()
+        self.waiting = False
+        self.log.debug("Build %s released" % self.unique)
+        self.wait_condition.release()
+
+    def isWaiting(self):
+        self.wait_condition.acquire()
+        if self.waiting:
+            ret = True
+        else:
+            ret = False
+        self.wait_condition.release()
+        return ret
+
+    def _wait(self):
+        self.wait_condition.acquire()
+        self.waiting = True
+        self.log.debug("Build %s waiting" % self.unique)
+        self.wait_condition.wait()
+        self.wait_condition.release()
+
+    def run(self):
+        data = {
+            'url': 'https://server/job/%s/%s/' % (self.name, self.number),
+            'name': self.name,
+            'number': self.number,
+            'manager': self.worker.worker_id,
+            'worker_name': 'My Worker',
+            'worker_hostname': 'localhost',
+            'worker_ips': ['127.0.0.1', '192.168.1.1'],
+            'worker_fqdn': 'zuul.example.org',
+            'worker_program': 'FakeBuilder',
+            'worker_version': 'v1.1',
+            'worker_extra': {'something': 'else'}
+        }
+
+        self.log.debug('Running build %s' % self.unique)
+
+        self.job.sendWorkData(json.dumps(data))
+        self.log.debug('Sent WorkData packet with %s' % json.dumps(data))
+        self.job.sendWorkStatus(0, 100)
+
+        if self.worker.hold_jobs_in_build:
+            self.log.debug('Holding build %s' % self.unique)
+            self._wait()
+        self.log.debug("Build %s continuing" % self.unique)
+
+        self.worker.lock.acquire()
+
+        result = 'SUCCESS'
+        if (('ZUUL_REF' in self.parameters) and
+            self.worker.shouldFailTest(self.name,
+                                       self.parameters['ZUUL_REF'])):
+            result = 'FAILURE'
+        if self.aborted:
+            result = 'ABORTED'
+
+        if self.run_error:
+            work_fail = True
+            result = 'RUN_ERROR'
+        else:
+            data['result'] = result
+            work_fail = False
+
+        changes = None
+        if 'ZUUL_CHANGE_IDS' in self.parameters:
+            changes = self.parameters['ZUUL_CHANGE_IDS']
+
+        self.worker.build_history.append(
+            BuildHistory(name=self.name, number=self.number,
+                         result=result, changes=changes, node=self.node,
+                         uuid=self.unique, description=self.description,
+                         pipeline=self.parameters['ZUUL_PIPELINE'])
+        )
+
+        self.job.sendWorkData(json.dumps(data))
+        if work_fail:
+            self.job.sendWorkFail()
+        else:
+            self.job.sendWorkComplete(json.dumps(data))
+        del self.worker.gearman_jobs[self.job.unique]
+        self.worker.running_builds.remove(self)
+        self.worker.lock.release()
+
+
+class FakeWorker(gear.Worker):
+    def __init__(self, worker_id, test):
+        super(FakeWorker, self).__init__(worker_id)
+        self.gearman_jobs = {}
+        self.build_history = []
+        self.running_builds = []
+        self.build_counter = 0
+        self.fail_tests = {}
+        self.test = test
+
+        self.hold_jobs_in_build = False
+        self.lock = threading.Lock()
+        self.__work_thread = threading.Thread(target=self.work)
+        self.__work_thread.daemon = True
+        self.__work_thread.start()
+
+    def handleJob(self, job):
+        parts = job.name.split(":")
+        cmd = parts[0]
+        name = parts[1]
+        if len(parts) > 2:
+            node = parts[2]
+        else:
+            node = None
+        if cmd == 'build':
+            self.handleBuild(job, name, node)
+        elif cmd == 'stop':
+            self.handleStop(job, name)
+        elif cmd == 'set_description':
+            self.handleSetDescription(job, name)
+
+    def handleBuild(self, job, name, node):
+        build = FakeBuild(self, job, self.build_counter, node)
+        job.build = build
+        self.gearman_jobs[job.unique] = job
+        self.build_counter += 1
+
+        self.running_builds.append(build)
+        build.start()
+
+    def handleStop(self, job, name):
+        self.log.debug("handle stop")
+        parameters = json.loads(job.arguments)
+        name = parameters['name']
+        number = parameters['number']
+        for build in self.running_builds:
+            if build.name == name and build.number == number:
+                build.aborted = True
+                build.release()
+                job.sendWorkComplete()
+                return
+        job.sendWorkFail()
+
+    def handleSetDescription(self, job, name):
+        self.log.debug("handle set description")
+        parameters = json.loads(job.arguments)
+        name = parameters['name']
+        number = parameters['number']
+        descr = parameters['html_description']
+        for build in self.running_builds:
+            if build.name == name and build.number == number:
+                build.description = descr
+                job.sendWorkComplete()
+                return
+        for build in self.build_history:
+            if build.name == name and build.number == number:
+                build.description = descr
+                job.sendWorkComplete()
+                return
+        job.sendWorkFail()
+
+    def work(self):
+        while self.running:
+            try:
+                job = self.getJob()
+            except gear.InterruptedError:
+                continue
+            try:
+                self.handleJob(job)
+            except:
+                self.log.exception("Worker exception:")
+
+    def addFailTest(self, name, change):
+        l = self.fail_tests.get(name, [])
+        l.append(change)
+        self.fail_tests[name] = l
+
+    def shouldFailTest(self, name, ref):
+        l = self.fail_tests.get(name, [])
+        for change in l:
+            if self.test.ref_has_change(ref, change):
+                return True
+        return False
+
+    def release(self, regex=None):
+        builds = self.running_builds[:]
+        self.log.debug("releasing build %s (%s)" % (regex,
+                                                    len(self.running_builds)))
+        for build in builds:
+            if not regex or re.match(regex, build.name):
+                self.log.debug("releasing build %s" %
+                               (build.parameters['ZUUL_UUID']))
+                build.release()
+            else:
+                self.log.debug("not releasing build %s" %
+                               (build.parameters['ZUUL_UUID']))
+        self.log.debug("done releasing builds %s (%s)" %
+                       (regex, len(self.running_builds)))
+
+
+class FakeGearmanServer(gear.Server):
+    def __init__(self):
+        self.hold_jobs_in_queue = False
+        super(FakeGearmanServer, self).__init__(0)
+
+    def getJobForConnection(self, connection, peek=False):
+        for queue in [self.high_queue, self.normal_queue, self.low_queue]:
+            for job in queue:
+                if not hasattr(job, 'waiting'):
+                    if job.name.startswith('build:'):
+                        job.waiting = self.hold_jobs_in_queue
+                    else:
+                        job.waiting = False
+                if job.waiting:
+                    continue
+                if job.name in connection.functions:
+                    if not peek:
+                        queue.remove(job)
+                        connection.related_jobs[job.handle] = job
+                        job.worker_connection = connection
+                    job.running = True
+                    return job
+        return None
+
+    def release(self, regex=None):
+        released = False
+        qlen = (len(self.high_queue) + len(self.normal_queue) +
+                len(self.low_queue))
+        self.log.debug("releasing queued job %s (%s)" % (regex, qlen))
+        for job in self.getQueue():
+            cmd, name = job.name.split(':')
+            if cmd != 'build':
+                continue
+            if not regex or re.match(regex, name):
+                self.log.debug("releasing queued job %s" %
+                               job.unique)
+                job.waiting = False
+                released = True
+            else:
+                self.log.debug("not releasing queued job %s" %
+                               job.unique)
+        if released:
+            self.wakeConnections()
+        qlen = (len(self.high_queue) + len(self.normal_queue) +
+                len(self.low_queue))
+        self.log.debug("done releasing queued jobs %s (%s)" % (regex, qlen))
+
+
+class FakeSMTP(object):
+    log = logging.getLogger('zuul.FakeSMTP')
+
+    def __init__(self, messages, server, port):
+        self.server = server
+        self.port = port
+        self.messages = messages
+
+    def sendmail(self, from_email, to_email, msg):
+        self.log.info("Sending email from %s, to %s, with msg %s" % (
+                      from_email, to_email, msg))
+
+        headers = msg.split('\n\n', 1)[0]
+        body = msg.split('\n\n', 1)[1]
+
+        self.messages.append(dict(
+            from_email=from_email,
+            to_email=to_email,
+            msg=msg,
+            headers=headers,
+            body=body,
+        ))
+
+        return True
+
+    def quit(self):
+        return True
+
+
+class FakeSwiftClientConnection(swiftclient.client.Connection):
+    def post_account(self, headers):
+        # Do nothing
+        pass
+
+    def get_auth(self):
+        # Returns endpoint and (unused) auth token
+        endpoint = os.path.join('https://storage.example.org', 'V1',
+                                'AUTH_account')
+        return endpoint, ''
+
+
+class ZuulTestCase(testtools.TestCase):
+    log = logging.getLogger("zuul.test")
+
+    def setUp(self):
+        super(ZuulTestCase, self).setUp()
+        test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
+        try:
+            test_timeout = int(test_timeout)
+        except ValueError:
+            # If timeout value is invalid do not set a timeout.
+            test_timeout = 0
+        if test_timeout > 0:
+            self.useFixture(fixtures.Timeout(test_timeout, gentle=False))
+
+        if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or
+            os.environ.get('OS_STDOUT_CAPTURE') == '1'):
+            stdout = self.useFixture(fixtures.StringStream('stdout')).stream
+            self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
+        if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or
+            os.environ.get('OS_STDERR_CAPTURE') == '1'):
+            stderr = self.useFixture(fixtures.StringStream('stderr')).stream
+            self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
+        if (os.environ.get('OS_LOG_CAPTURE') == 'True' or
+            os.environ.get('OS_LOG_CAPTURE') == '1'):
+            self.useFixture(fixtures.FakeLogger(
+                level=logging.DEBUG,
+                format='%(asctime)s %(name)-32s '
+                '%(levelname)-8s %(message)s'))
+        tmp_root = self.useFixture(fixtures.TempDir(
+            rootdir=os.environ.get("ZUUL_TEST_ROOT"))).path
+        self.test_root = os.path.join(tmp_root, "zuul-test")
+        self.upstream_root = os.path.join(self.test_root, "upstream")
+        self.git_root = os.path.join(self.test_root, "git")
+
+        if os.path.exists(self.test_root):
+            shutil.rmtree(self.test_root)
+        os.makedirs(self.test_root)
+        os.makedirs(self.upstream_root)
+        os.makedirs(self.git_root)
+
+        # Make per test copy of Configuration.
+        self.setup_config()
+        self.config.set('zuul', 'layout_config',
+                        os.path.join(FIXTURE_DIR, "layout.yaml"))
+        self.config.set('merger', 'git_dir', self.git_root)
+
+        # For each project in config:
+        self.init_repo("org/project")
+        self.init_repo("org/project1")
+        self.init_repo("org/project2")
+        self.init_repo("org/project3")
+        self.init_repo("org/one-job-project")
+        self.init_repo("org/nonvoting-project")
+        self.init_repo("org/templated-project")
+        self.init_repo("org/layered-project")
+        self.init_repo("org/node-project")
+        self.init_repo("org/conflict-project")
+        self.init_repo("org/noop-project")
+        self.init_repo("org/experimental-project")
+
+        self.statsd = FakeStatsd()
+        os.environ['STATSD_HOST'] = 'localhost'
+        os.environ['STATSD_PORT'] = str(self.statsd.port)
+        self.statsd.start()
+        # the statsd client object is configured in the statsd module import
+        reload(statsd)
+        reload(zuul.scheduler)
+
+        self.gearman_server = FakeGearmanServer()
+
+        self.config.set('gearman', 'port', str(self.gearman_server.port))
+
+        self.worker = FakeWorker('fake_worker', self)
+        self.worker.addServer('127.0.0.1', self.gearman_server.port)
+        self.gearman_server.worker = self.worker
+
+        self.merge_server = zuul.merger.server.MergeServer(self.config)
+        self.merge_server.start()
+
+        self.sched = zuul.scheduler.Scheduler()
+
+        self.useFixture(fixtures.MonkeyPatch('swiftclient.client.Connection',
+                                             FakeSwiftClientConnection))
+        self.swift = zuul.lib.swift.Swift(self.config)
+
+        def URLOpenerFactory(*args, **kw):
+            if isinstance(args[0], urllib2.Request):
+                return old_urlopen(*args, **kw)
+            args = [self.fake_gerrit] + list(args)
+            return FakeURLOpener(self.upstream_root, *args, **kw)
+
+        old_urlopen = urllib2.urlopen
+        urllib2.urlopen = URLOpenerFactory
+
+        self.launcher = zuul.launcher.gearman.Gearman(self.config, self.sched,
+                                                      self.swift)
+        self.merge_client = zuul.merger.client.MergeClient(
+            self.config, self.sched)
+
+        self.smtp_messages = []
+
+        def FakeSMTPFactory(*args, **kw):
+            args = [self.smtp_messages] + list(args)
+            return FakeSMTP(*args, **kw)
+
+        zuul.lib.gerrit.Gerrit = FakeGerrit
+        self.useFixture(fixtures.MonkeyPatch('smtplib.SMTP', FakeSMTPFactory))
+
+        self.gerrit = FakeGerritTrigger(
+            self.upstream_root, self.config, self.sched)
+        self.gerrit.replication_timeout = 1.5
+        self.gerrit.replication_retry_interval = 0.5
+        self.fake_gerrit = self.gerrit.gerrit
+        self.fake_gerrit.upstream_root = self.upstream_root
+
+        self.webapp = zuul.webapp.WebApp(self.sched, port=0)
+        self.rpc = zuul.rpclistener.RPCListener(self.config, self.sched)
+
+        self.sched.setLauncher(self.launcher)
+        self.sched.setMerger(self.merge_client)
+        self.sched.registerTrigger(self.gerrit)
+        self.timer = zuul.trigger.timer.Timer(self.config, self.sched)
+        self.sched.registerTrigger(self.timer)
+
+        self.sched.registerReporter(
+            zuul.reporter.gerrit.Reporter(self.gerrit))
+        self.smtp_reporter = zuul.reporter.smtp.Reporter(
+            self.config.get('smtp', 'default_from'),
+            self.config.get('smtp', 'default_to'),
+            self.config.get('smtp', 'server'))
+        self.sched.registerReporter(self.smtp_reporter)
+
+        self.sched.start()
+        self.sched.reconfigure(self.config)
+        self.sched.resume()
+        self.webapp.start()
+        self.rpc.start()
+        self.launcher.gearman.waitForServer()
+        self.registerJobs()
+        self.builds = self.worker.running_builds
+        self.history = self.worker.build_history
+
+        self.addCleanup(self.assertFinalState)
+        self.addCleanup(self.shutdown)
+
+    def setup_config(self):
+        """Per test config object. Override to set different config."""
+        self.config = ConfigParser.ConfigParser()
+        self.config.read(os.path.join(FIXTURE_DIR, "zuul.conf"))
+
+    def assertFinalState(self):
+        # Make sure that the change cache is cleared
+        self.assertEqual(len(self.gerrit._change_cache.keys()), 0)
+        # Make sure that git.Repo objects have been garbage collected.
+        repos = []
+        gc.collect()
+        for obj in gc.get_objects():
+            if isinstance(obj, git.Repo):
+                repos.append(obj)
+        self.assertEqual(len(repos), 0)
+        self.assertEmptyQueues()
+
+    def shutdown(self):
+        self.log.debug("Shutting down after tests")
+        self.launcher.stop()
+        self.merge_server.stop()
+        self.merge_server.join()
+        self.merge_client.stop()
+        self.worker.shutdown()
+        self.gerrit.stop()
+        self.timer.stop()
+        self.sched.stop()
+        self.sched.join()
+        self.statsd.stop()
+        self.statsd.join()
+        self.webapp.stop()
+        self.webapp.join()
+        self.rpc.stop()
+        self.rpc.join()
+        self.gearman_server.shutdown()
+        threads = threading.enumerate()
+        if len(threads) > 1:
+            self.log.error("More than one thread is running: %s" % threads)
+        super(ZuulTestCase, self).tearDown()
+
+    def init_repo(self, project):
+        parts = project.split('/')
+        path = os.path.join(self.upstream_root, *parts[:-1])
+        if not os.path.exists(path):
+            os.makedirs(path)
+        path = os.path.join(self.upstream_root, project)
+        repo = git.Repo.init(path)
+
+        repo.config_writer().set_value('user', 'email', 'user@example.com')
+        repo.config_writer().set_value('user', 'name', 'User Name')
+        repo.config_writer().write()
+
+        fn = os.path.join(path, 'README')
+        f = open(fn, 'w')
+        f.write("test\n")
+        f.close()
+        repo.index.add([fn])
+        repo.index.commit('initial commit')
+        master = repo.create_head('master')
+        repo.create_tag('init')
+
+        mp = repo.create_head('mp')
+        repo.head.reference = mp
+        f = open(fn, 'a')
+        f.write("test mp\n")
+        f.close()
+        repo.index.add([fn])
+        repo.index.commit('mp commit')
+
+        repo.head.reference = master
+        repo.head.reset(index=True, working_tree=True)
+        repo.git.clean('-x', '-f', '-d')
+
+    def ref_has_change(self, ref, change):
+        path = os.path.join(self.git_root, change.project)
+        repo = git.Repo(path)
+        for commit in repo.iter_commits(ref):
+            if commit.message.strip() == ('%s-1' % change.subject):
+                return True
+        return False
+
+    def job_has_changes(self, *args):
+        job = args[0]
+        commits = args[1:]
+        if isinstance(job, FakeBuild):
+            parameters = job.parameters
+        else:
+            parameters = json.loads(job.arguments)
+        project = parameters['ZUUL_PROJECT']
+        path = os.path.join(self.git_root, project)
+        repo = git.Repo(path)
+        ref = parameters['ZUUL_REF']
+        sha = parameters['ZUUL_COMMIT']
+        repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
+        repo_shas = [c.hexsha for c in repo.iter_commits(ref)]
+        commit_messages = ['%s-1' % commit.subject for commit in commits]
+        self.log.debug("Checking if job %s has changes; commit_messages %s;"
+                       " repo_messages %s; sha %s" % (job, commit_messages,
+                                                      repo_messages, sha))
+        for msg in commit_messages:
+            if msg not in repo_messages:
+                self.log.debug("  messages do not match")
+                return False
+        if repo_shas[0] != sha:
+            self.log.debug("  sha does not match")
+            return False
+        self.log.debug("  OK")
+        return True
+
+    def registerJobs(self):
+        count = 0
+        for job in self.sched.layout.jobs.keys():
+            self.worker.registerFunction('build:' + job)
+            count += 1
+        self.worker.registerFunction('stop:' + self.worker.worker_id)
+        count += 1
+
+        while len(self.gearman_server.functions) < count:
+            time.sleep(0)
+
+    def release(self, job):
+        if isinstance(job, FakeBuild):
+            job.release()
+        else:
+            job.waiting = False
+            self.log.debug("Queued job %s released" % job.unique)
+            self.gearman_server.wakeConnections()
+
+    def getParameter(self, job, name):
+        if isinstance(job, FakeBuild):
+            return job.parameters[name]
+        else:
+            parameters = json.loads(job.arguments)
+            return parameters[name]
+
+    def resetGearmanServer(self):
+        self.worker.setFunctions([])
+        while True:
+            done = True
+            for connection in self.gearman_server.active_connections:
+                if (connection.functions and
+                    connection.client_id not in ['Zuul RPC Listener',
+                                                 'Zuul Merger']):
+                    done = False
+            if done:
+                break
+            time.sleep(0)
+        self.gearman_server.functions = set()
+        self.rpc.register()
+        self.merge_server.register()
+
+    def haveAllBuildsReported(self):
+        # See if Zuul is waiting on a meta job to complete
+        if self.launcher.meta_jobs:
+            return False
+        # Find out if every build that the worker has completed has been
+        # reported back to Zuul.  If it hasn't then that means a Gearman
+        # event is still in transit and the system is not stable.
+        for build in self.worker.build_history:
+            zbuild = self.launcher.builds.get(build.uuid)
+            if not zbuild:
+                # It has already been reported
+                continue
+            # It hasn't been reported yet.
+            return False
+        # Make sure that none of the worker connections are in GRAB_WAIT
+        for connection in self.worker.active_connections:
+            if connection.state == 'GRAB_WAIT':
+                return False
+        return True
+
+    def areAllBuildsWaiting(self):
+        ret = True
+
+        builds = self.launcher.builds.values()
+        for build in builds:
+            client_job = None
+            for conn in self.launcher.gearman.active_connections:
+                for j in conn.related_jobs.values():
+                    if j.unique == build.uuid:
+                        client_job = j
+                        break
+            if not client_job:
+                self.log.debug("%s is not known to the gearman client" %
+                               build)
+                ret = False
+                continue
+            if not client_job.handle:
+                self.log.debug("%s has no handle" % client_job)
+                ret = False
+                continue
+            server_job = self.gearman_server.jobs.get(client_job.handle)
+            if not server_job:
+                self.log.debug("%s is not known to the gearman server" %
+                               client_job)
+                ret = False
+                continue
+            if not hasattr(server_job, 'waiting'):
+                self.log.debug("%s is being enqueued" % server_job)
+                ret = False
+                continue
+            if server_job.waiting:
+                continue
+            worker_job = self.worker.gearman_jobs.get(server_job.unique)
+            if worker_job:
+                if worker_job.build.isWaiting():
+                    continue
+                else:
+                    self.log.debug("%s is running" % worker_job)
+                    ret = False
+            else:
+                self.log.debug("%s is unassigned" % server_job)
+                ret = False
+        return ret
+
+    def waitUntilSettled(self):
+        self.log.debug("Waiting until settled...")
+        start = time.time()
+        while True:
+            if time.time() - start > 10:
+                print 'queue status:',
+                print self.sched.trigger_event_queue.empty(),
+                print self.sched.result_event_queue.empty(),
+                print self.fake_gerrit.event_queue.empty(),
+                print self.areAllBuildsWaiting()
+                raise Exception("Timeout waiting for Zuul to settle")
+            # Make sure no new events show up while we're checking
+            self.worker.lock.acquire()
+            # have all build states propogated to zuul?
+            if self.haveAllBuildsReported():
+                # Join ensures that the queue is empty _and_ events have been
+                # processed
+                self.fake_gerrit.event_queue.join()
+                self.sched.trigger_event_queue.join()
+                self.sched.result_event_queue.join()
+                self.sched.run_handler_lock.acquire()
+                if (self.sched.trigger_event_queue.empty() and
+                    self.sched.result_event_queue.empty() and
+                    self.fake_gerrit.event_queue.empty() and
+                    not self.merge_client.build_sets and
+                    self.haveAllBuildsReported() and
+                    self.areAllBuildsWaiting()):
+                    self.sched.run_handler_lock.release()
+                    self.worker.lock.release()
+                    self.log.debug("...settled.")
+                    return
+                self.sched.run_handler_lock.release()
+            self.worker.lock.release()
+            self.sched.wake_event.wait(0.1)
+
+    def countJobResults(self, jobs, result):
+        jobs = filter(lambda x: x.result == result, jobs)
+        return len(jobs)
+
+    def getJobFromHistory(self, name):
+        history = self.worker.build_history
+        for job in history:
+            if job.name == name:
+                return job
+        raise Exception("Unable to find job %s in history" % name)
+
+    def assertEmptyQueues(self):
+        # Make sure there are no orphaned jobs
+        for pipeline in self.sched.layout.pipelines.values():
+            for queue in pipeline.queues:
+                if len(queue.queue) != 0:
+                    print 'pipeline %s queue %s contents %s' % (
+                        pipeline.name, queue.name, queue.queue)
+                self.assertEqual(len(queue.queue), 0)
+
+    def assertReportedStat(self, key, value=None, kind=None):
+        start = time.time()
+        while time.time() < (start + 5):
+            for stat in self.statsd.stats:
+                pprint.pprint(self.statsd.stats)
+                k, v = stat.split(':')
+                if key == k:
+                    if value is None and kind is None:
+                        return
+                    elif value:
+                        if value == v:
+                            return
+                    elif kind:
+                        if v.endswith('|' + kind):
+                            return
+            time.sleep(0.1)
+
+        pprint.pprint(self.statsd.stats)
+        raise Exception("Key %s not found in reported stats" % key)
diff --git a/tests/fixtures/layout-idle.yaml b/tests/fixtures/layout-idle.yaml
index e4574fa..0870788 100644
--- a/tests/fixtures/layout-idle.yaml
+++ b/tests/fixtures/layout-idle.yaml
@@ -8,5 +8,5 @@
 projects:
   - name: org/project
     periodic:
-      - project-test1
-      - project-test2
+      - project-bitrot-stable-old
+      - project-bitrot-stable-older
diff --git a/tests/fixtures/layout-no-timer.yaml b/tests/fixtures/layout-no-timer.yaml
new file mode 100644
index 0000000..9436821
--- /dev/null
+++ b/tests/fixtures/layout-no-timer.yaml
@@ -0,0 +1,14 @@
+pipelines:
+  - name: periodic
+    manager: IndependentPipelineManager
+    # Trigger is required, set it to one that is a noop
+    # during tests that check the timer trigger.
+    trigger:
+      gerrit:
+        - event: patchset-created
+
+projects:
+  - name: org/project
+    periodic:
+      - project-bitrot-stable-old
+      - project-bitrot-stable-older
diff --git a/tests/fixtures/layout-timer-smtp.yaml b/tests/fixtures/layout-timer-smtp.yaml
index ac59df4..b5a6ce0 100644
--- a/tests/fixtures/layout-timer-smtp.yaml
+++ b/tests/fixtures/layout-timer-smtp.yaml
@@ -3,7 +3,7 @@
     manager: IndependentPipelineManager
     trigger:
       timer:
-        - time: '* * * * * */10'
+        - time: '* * * * * */1'
     success:
       smtp:
         to: alternative_me@example.com
diff --git a/tests/fixtures/layout-timer.yaml b/tests/fixtures/layout-timer.yaml
index 9e0f66b..4904f87 100644
--- a/tests/fixtures/layout-timer.yaml
+++ b/tests/fixtures/layout-timer.yaml
@@ -15,7 +15,7 @@
     manager: IndependentPipelineManager
     trigger:
       timer:
-        - time: '* * * * * */10'
+        - time: '* * * * * */1'
 
 projects:
   - name: org/project
diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py
index d489ff1..f698d59 100755
--- a/tests/test_scheduler.py
+++ b/tests/test_scheduler.py
@@ -14,1199 +14,33 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
-import ConfigParser
 from cStringIO import StringIO
-import gc
 import gzip
-import hashlib
 import json
 import logging
 import os
-import pprint
-import Queue
-import random
 import re
-import select
 import shutil
-import socket
-import string
-import subprocess
-import swiftclient
-import threading
 import time
 import urllib
 import urllib2
 
 import git
-import gear
-import fixtures
-import six.moves.urllib.parse as urlparse
-import statsd
 import testtools
 
 import zuul.scheduler
-import zuul.webapp
-import zuul.rpclistener
 import zuul.rpcclient
-import zuul.launcher.gearman
-import zuul.lib.swift
-import zuul.merger.server
-import zuul.merger.client
 import zuul.reporter.gerrit
 import zuul.reporter.smtp
-import zuul.trigger.gerrit
-import zuul.trigger.timer
 
-FIXTURE_DIR = os.path.join(os.path.dirname(__file__),
-                           'fixtures')
-CONFIG = ConfigParser.ConfigParser()
-CONFIG.read(os.path.join(FIXTURE_DIR, "zuul.conf"))
-
-CONFIG.set('zuul', 'layout_config',
-           os.path.join(FIXTURE_DIR, "layout.yaml"))
+from tests.base import ZuulTestCase, repack_repo
 
 logging.basicConfig(level=logging.DEBUG,
                     format='%(asctime)s %(name)-32s '
                     '%(levelname)-8s %(message)s')
 
 
-def repack_repo(path):
-    cmd = ['git', '--git-dir=%s/.git' % path, 'repack', '-afd']
-    output = subprocess.Popen(cmd, close_fds=True,
-                              stdout=subprocess.PIPE,
-                              stderr=subprocess.PIPE)
-    out = output.communicate()
-    if output.returncode:
-        raise Exception("git repack returned %d" % output.returncode)
-    return out
-
-
-def random_sha1():
-    return hashlib.sha1(str(random.random())).hexdigest()
-
-
-class ChangeReference(git.Reference):
-    _common_path_default = "refs/changes"
-    _points_to_commits_only = True
-
-
-class FakeChange(object):
-    categories = {'APRV': ('Approved', -1, 1),
-                  'CRVW': ('Code-Review', -2, 2),
-                  'VRFY': ('Verified', -2, 2)}
-
-    def __init__(self, gerrit, number, project, branch, subject,
-                 status='NEW', upstream_root=None):
-        self.gerrit = gerrit
-        self.reported = 0
-        self.queried = 0
-        self.patchsets = []
-        self.number = number
-        self.project = project
-        self.branch = branch
-        self.subject = subject
-        self.latest_patchset = 0
-        self.depends_on_change = None
-        self.needed_by_changes = []
-        self.fail_merge = False
-        self.messages = []
-        self.data = {
-            'branch': branch,
-            'comments': [],
-            'commitMessage': subject,
-            'createdOn': time.time(),
-            'id': 'I' + random_sha1(),
-            'lastUpdated': time.time(),
-            'number': str(number),
-            'open': status == 'NEW',
-            'owner': {'email': 'user@example.com',
-                      'name': 'User Name',
-                      'username': 'username'},
-            'patchSets': self.patchsets,
-            'project': project,
-            'status': status,
-            'subject': subject,
-            'submitRecords': [],
-            'url': 'https://hostname/%s' % number}
-
-        self.upstream_root = upstream_root
-        self.addPatchset()
-        self.data['submitRecords'] = self.getSubmitRecords()
-        self.open = True
-
-    def add_fake_change_to_repo(self, msg, fn, large):
-        path = os.path.join(self.upstream_root, self.project)
-        repo = git.Repo(path)
-        ref = ChangeReference.create(repo, '1/%s/%s' % (self.number,
-                                                        self.latest_patchset),
-                                     'refs/tags/init')
-        repo.head.reference = ref
-        repo.head.reset(index=True, working_tree=True)
-        repo.git.clean('-x', '-f', '-d')
-
-        path = os.path.join(self.upstream_root, self.project)
-        if not large:
-            fn = os.path.join(path, fn)
-            f = open(fn, 'w')
-            f.write("test %s %s %s\n" %
-                    (self.branch, self.number, self.latest_patchset))
-            f.close()
-            repo.index.add([fn])
-        else:
-            for fni in range(100):
-                fn = os.path.join(path, str(fni))
-                f = open(fn, 'w')
-                for ci in range(4096):
-                    f.write(random.choice(string.printable))
-                f.close()
-                repo.index.add([fn])
-
-        r = repo.index.commit(msg)
-        repo.head.reference = 'master'
-        repo.head.reset(index=True, working_tree=True)
-        repo.git.clean('-x', '-f', '-d')
-        repo.heads['master'].checkout()
-        return r
-
-    def addPatchset(self, files=[], large=False):
-        self.latest_patchset += 1
-        if files:
-            fn = files[0]
-        else:
-            fn = '%s-%s' % (self.branch, self.number)
-        msg = self.subject + '-' + str(self.latest_patchset)
-        c = self.add_fake_change_to_repo(msg, fn, large)
-        ps_files = [{'file': '/COMMIT_MSG',
-                     'type': 'ADDED'},
-                    {'file': 'README',
-                     'type': 'MODIFIED'}]
-        for f in files:
-            ps_files.append({'file': f, 'type': 'ADDED'})
-        d = {'approvals': [],
-             'createdOn': time.time(),
-             'files': ps_files,
-             'number': str(self.latest_patchset),
-             'ref': 'refs/changes/1/%s/%s' % (self.number,
-                                              self.latest_patchset),
-             'revision': c.hexsha,
-             'uploader': {'email': 'user@example.com',
-                          'name': 'User name',
-                          'username': 'user'}}
-        self.data['currentPatchSet'] = d
-        self.patchsets.append(d)
-        self.data['submitRecords'] = self.getSubmitRecords()
-
-    def getPatchsetCreatedEvent(self, patchset):
-        event = {"type": "patchset-created",
-                 "change": {"project": self.project,
-                            "branch": self.branch,
-                            "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
-                            "number": str(self.number),
-                            "subject": self.subject,
-                            "owner": {"name": "User Name"},
-                            "url": "https://hostname/3"},
-                 "patchSet": self.patchsets[patchset - 1],
-                 "uploader": {"name": "User Name"}}
-        return event
-
-    def getChangeRestoredEvent(self):
-        event = {"type": "change-restored",
-                 "change": {"project": self.project,
-                            "branch": self.branch,
-                            "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
-                            "number": str(self.number),
-                            "subject": self.subject,
-                            "owner": {"name": "User Name"},
-                            "url": "https://hostname/3"},
-                 "restorer": {"name": "User Name"},
-                 "reason": ""}
-        return event
-
-    def getChangeCommentEvent(self, patchset):
-        event = {"type": "comment-added",
-                 "change": {"project": self.project,
-                            "branch": self.branch,
-                            "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
-                            "number": str(self.number),
-                            "subject": self.subject,
-                            "owner": {"name": "User Name"},
-                            "url": "https://hostname/3"},
-                 "patchSet": self.patchsets[patchset - 1],
-                 "author": {"name": "User Name"},
-                 "approvals": [{"type": "Code-Review",
-                                "description": "Code-Review",
-                                "value": "0"}],
-                 "comment": "This is a comment"}
-        return event
-
-    def addApproval(self, category, value, username='jenkins',
-                    granted_on=None):
-        if not granted_on:
-            granted_on = time.time()
-        approval = {'description': self.categories[category][0],
-                    'type': category,
-                    'value': str(value),
-                    'by': {
-                        'username': username,
-                        'email': username + '@example.com',
-                    },
-                    'grantedOn': int(granted_on)}
-        for i, x in enumerate(self.patchsets[-1]['approvals'][:]):
-            if x['by']['username'] == username and x['type'] == category:
-                del self.patchsets[-1]['approvals'][i]
-        self.patchsets[-1]['approvals'].append(approval)
-        event = {'approvals': [approval],
-                 'author': {'email': 'user@example.com',
-                            'name': 'User Name',
-                            'username': 'username'},
-                 'change': {'branch': self.branch,
-                            'id': 'Iaa69c46accf97d0598111724a38250ae76a22c87',
-                            'number': str(self.number),
-                            'owner': {'email': 'user@example.com',
-                                      'name': 'User Name',
-                                      'username': 'username'},
-                            'project': self.project,
-                            'subject': self.subject,
-                            'topic': 'master',
-                            'url': 'https://hostname/459'},
-                 'comment': '',
-                 'patchSet': self.patchsets[-1],
-                 'type': 'comment-added'}
-        self.data['submitRecords'] = self.getSubmitRecords()
-        return json.loads(json.dumps(event))
-
-    def getSubmitRecords(self):
-        status = {}
-        for cat in self.categories.keys():
-            status[cat] = 0
-
-        for a in self.patchsets[-1]['approvals']:
-            cur = status[a['type']]
-            cat_min, cat_max = self.categories[a['type']][1:]
-            new = int(a['value'])
-            if new == cat_min:
-                cur = new
-            elif abs(new) > abs(cur):
-                cur = new
-            status[a['type']] = cur
-
-        labels = []
-        ok = True
-        for typ, cat in self.categories.items():
-            cur = status[typ]
-            cat_min, cat_max = cat[1:]
-            if cur == cat_min:
-                value = 'REJECT'
-                ok = False
-            elif cur == cat_max:
-                value = 'OK'
-            else:
-                value = 'NEED'
-                ok = False
-            labels.append({'label': cat[0], 'status': value})
-        if ok:
-            return [{'status': 'OK'}]
-        return [{'status': 'NOT_READY',
-                 'labels': labels}]
-
-    def setDependsOn(self, other, patchset):
-        self.depends_on_change = other
-        d = {'id': other.data['id'],
-             'number': other.data['number'],
-             'ref': other.patchsets[patchset - 1]['ref']
-             }
-        self.data['dependsOn'] = [d]
-
-        other.needed_by_changes.append(self)
-        needed = other.data.get('neededBy', [])
-        d = {'id': self.data['id'],
-             'number': self.data['number'],
-             'ref': self.patchsets[patchset - 1]['ref'],
-             'revision': self.patchsets[patchset - 1]['revision']
-             }
-        needed.append(d)
-        other.data['neededBy'] = needed
-
-    def query(self):
-        self.queried += 1
-        d = self.data.get('dependsOn')
-        if d:
-            d = d[0]
-            if (self.depends_on_change.patchsets[-1]['ref'] == d['ref']):
-                d['isCurrentPatchSet'] = True
-            else:
-                d['isCurrentPatchSet'] = False
-        return json.loads(json.dumps(self.data))
-
-    def setMerged(self):
-        if (self.depends_on_change and
-            self.depends_on_change.data['status'] != 'MERGED'):
-            return
-        if self.fail_merge:
-            return
-        self.data['status'] = 'MERGED'
-        self.open = False
-
-        path = os.path.join(self.upstream_root, self.project)
-        repo = git.Repo(path)
-        repo.heads[self.branch].commit = \
-            repo.commit(self.patchsets[-1]['revision'])
-
-    def setReported(self):
-        self.reported += 1
-
-
-class FakeGerrit(object):
-    def __init__(self, *args, **kw):
-        self.event_queue = Queue.Queue()
-        self.fixture_dir = os.path.join(FIXTURE_DIR, 'gerrit')
-        self.change_number = 0
-        self.changes = {}
-
-    def addFakeChange(self, project, branch, subject, status='NEW'):
-        self.change_number += 1
-        c = FakeChange(self, self.change_number, project, branch, subject,
-                       upstream_root=self.upstream_root,
-                       status=status)
-        self.changes[self.change_number] = c
-        return c
-
-    def addEvent(self, data):
-        return self.event_queue.put(data)
-
-    def getEvent(self):
-        return self.event_queue.get()
-
-    def eventDone(self):
-        self.event_queue.task_done()
-
-    def review(self, project, changeid, message, action):
-        number, ps = changeid.split(',')
-        change = self.changes[int(number)]
-        change.messages.append(message)
-        if 'submit' in action:
-            change.setMerged()
-        if message:
-            change.setReported()
-
-    def query(self, number):
-        change = self.changes.get(int(number))
-        if change:
-            return change.query()
-        return {}
-
-    def startWatching(self, *args, **kw):
-        pass
-
-
-class BuildHistory(object):
-    def __init__(self, **kw):
-        self.__dict__.update(kw)
-
-    def __repr__(self):
-        return ("<Completed build, result: %s name: %s #%s changes: %s>" %
-                (self.result, self.name, self.number, self.changes))
-
-
-class FakeURLOpener(object):
-    def __init__(self, upstream_root, fake_gerrit, url):
-        self.upstream_root = upstream_root
-        self.fake_gerrit = fake_gerrit
-        self.url = url
-
-    def read(self):
-        res = urlparse.urlparse(self.url)
-        path = res.path
-        project = '/'.join(path.split('/')[2:-2])
-        ret = '001e# service=git-upload-pack\n'
-        ret += ('000000a31270149696713ba7e06f1beb760f20d359c4abed HEAD\x00'
-                'multi_ack thin-pack side-band side-band-64k ofs-delta '
-                'shallow no-progress include-tag multi_ack_detailed no-done\n')
-        path = os.path.join(self.upstream_root, project)
-        repo = git.Repo(path)
-        for ref in repo.refs:
-            r = ref.object.hexsha + ' ' + ref.path + '\n'
-            ret += '%04x%s' % (len(r) + 4, r)
-        ret += '0000'
-        return ret
-
-
-class FakeGerritTrigger(zuul.trigger.gerrit.Gerrit):
-    name = 'gerrit'
-
-    def __init__(self, upstream_root, *args):
-        super(FakeGerritTrigger, self).__init__(*args)
-        self.upstream_root = upstream_root
-
-    def getGitUrl(self, project):
-        return os.path.join(self.upstream_root, project.name)
-
-
-class FakeStatsd(threading.Thread):
-    def __init__(self):
-        threading.Thread.__init__(self)
-        self.daemon = True
-        self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-        self.sock.bind(('', 0))
-        self.port = self.sock.getsockname()[1]
-        self.wake_read, self.wake_write = os.pipe()
-        self.stats = []
-
-    def run(self):
-        while True:
-            poll = select.poll()
-            poll.register(self.sock, select.POLLIN)
-            poll.register(self.wake_read, select.POLLIN)
-            ret = poll.poll()
-            for (fd, event) in ret:
-                if fd == self.sock.fileno():
-                    data = self.sock.recvfrom(1024)
-                    if not data:
-                        return
-                    self.stats.append(data[0])
-                if fd == self.wake_read:
-                    return
-
-    def stop(self):
-        os.write(self.wake_write, '1\n')
-
-
-class FakeBuild(threading.Thread):
-    log = logging.getLogger("zuul.test")
-
-    def __init__(self, worker, job, number, node):
-        threading.Thread.__init__(self)
-        self.daemon = True
-        self.worker = worker
-        self.job = job
-        self.name = job.name.split(':')[1]
-        self.number = number
-        self.node = node
-        self.parameters = json.loads(job.arguments)
-        self.unique = self.parameters['ZUUL_UUID']
-        self.wait_condition = threading.Condition()
-        self.waiting = False
-        self.aborted = False
-        self.created = time.time()
-        self.description = ''
-        self.run_error = False
-
-    def release(self):
-        self.wait_condition.acquire()
-        self.wait_condition.notify()
-        self.waiting = False
-        self.log.debug("Build %s released" % self.unique)
-        self.wait_condition.release()
-
-    def isWaiting(self):
-        self.wait_condition.acquire()
-        if self.waiting:
-            ret = True
-        else:
-            ret = False
-        self.wait_condition.release()
-        return ret
-
-    def _wait(self):
-        self.wait_condition.acquire()
-        self.waiting = True
-        self.log.debug("Build %s waiting" % self.unique)
-        self.wait_condition.wait()
-        self.wait_condition.release()
-
-    def run(self):
-        data = {
-            'url': 'https://server/job/%s/%s/' % (self.name, self.number),
-            'name': self.name,
-            'number': self.number,
-            'manager': self.worker.worker_id,
-            'worker_name': 'My Worker',
-            'worker_hostname': 'localhost',
-            'worker_ips': ['127.0.0.1', '192.168.1.1'],
-            'worker_fqdn': 'zuul.example.org',
-            'worker_program': 'FakeBuilder',
-            'worker_version': 'v1.1',
-            'worker_extra': {'something': 'else'}
-        }
-
-        self.log.debug('Running build %s' % self.unique)
-
-        self.job.sendWorkData(json.dumps(data))
-        self.log.debug('Sent WorkData packet with %s' % json.dumps(data))
-        self.job.sendWorkStatus(0, 100)
-
-        if self.worker.hold_jobs_in_build:
-            self.log.debug('Holding build %s' % self.unique)
-            self._wait()
-        self.log.debug("Build %s continuing" % self.unique)
-
-        self.worker.lock.acquire()
-
-        result = 'SUCCESS'
-        if (('ZUUL_REF' in self.parameters) and
-            self.worker.shouldFailTest(self.name,
-                                       self.parameters['ZUUL_REF'])):
-            result = 'FAILURE'
-        if self.aborted:
-            result = 'ABORTED'
-
-        if self.run_error:
-            work_fail = True
-            result = 'RUN_ERROR'
-        else:
-            data['result'] = result
-            work_fail = False
-
-        changes = None
-        if 'ZUUL_CHANGE_IDS' in self.parameters:
-            changes = self.parameters['ZUUL_CHANGE_IDS']
-
-        self.worker.build_history.append(
-            BuildHistory(name=self.name, number=self.number,
-                         result=result, changes=changes, node=self.node,
-                         uuid=self.unique, description=self.description,
-                         pipeline=self.parameters['ZUUL_PIPELINE'])
-        )
-
-        self.job.sendWorkData(json.dumps(data))
-        if work_fail:
-            self.job.sendWorkFail()
-        else:
-            self.job.sendWorkComplete(json.dumps(data))
-        del self.worker.gearman_jobs[self.job.unique]
-        self.worker.running_builds.remove(self)
-        self.worker.lock.release()
-
-
-class FakeWorker(gear.Worker):
-    def __init__(self, worker_id, test):
-        super(FakeWorker, self).__init__(worker_id)
-        self.gearman_jobs = {}
-        self.build_history = []
-        self.running_builds = []
-        self.build_counter = 0
-        self.fail_tests = {}
-        self.test = test
-
-        self.hold_jobs_in_build = False
-        self.lock = threading.Lock()
-        self.__work_thread = threading.Thread(target=self.work)
-        self.__work_thread.daemon = True
-        self.__work_thread.start()
-
-    def handleJob(self, job):
-        parts = job.name.split(":")
-        cmd = parts[0]
-        name = parts[1]
-        if len(parts) > 2:
-            node = parts[2]
-        else:
-            node = None
-        if cmd == 'build':
-            self.handleBuild(job, name, node)
-        elif cmd == 'stop':
-            self.handleStop(job, name)
-        elif cmd == 'set_description':
-            self.handleSetDescription(job, name)
-
-    def handleBuild(self, job, name, node):
-        build = FakeBuild(self, job, self.build_counter, node)
-        job.build = build
-        self.gearman_jobs[job.unique] = job
-        self.build_counter += 1
-
-        self.running_builds.append(build)
-        build.start()
-
-    def handleStop(self, job, name):
-        self.log.debug("handle stop")
-        parameters = json.loads(job.arguments)
-        name = parameters['name']
-        number = parameters['number']
-        for build in self.running_builds:
-            if build.name == name and build.number == number:
-                build.aborted = True
-                build.release()
-                job.sendWorkComplete()
-                return
-        job.sendWorkFail()
-
-    def handleSetDescription(self, job, name):
-        self.log.debug("handle set description")
-        parameters = json.loads(job.arguments)
-        name = parameters['name']
-        number = parameters['number']
-        descr = parameters['html_description']
-        for build in self.running_builds:
-            if build.name == name and build.number == number:
-                build.description = descr
-                job.sendWorkComplete()
-                return
-        for build in self.build_history:
-            if build.name == name and build.number == number:
-                build.description = descr
-                job.sendWorkComplete()
-                return
-        job.sendWorkFail()
-
-    def work(self):
-        while self.running:
-            try:
-                job = self.getJob()
-            except gear.InterruptedError:
-                continue
-            try:
-                self.handleJob(job)
-            except:
-                self.log.exception("Worker exception:")
-
-    def addFailTest(self, name, change):
-        l = self.fail_tests.get(name, [])
-        l.append(change)
-        self.fail_tests[name] = l
-
-    def shouldFailTest(self, name, ref):
-        l = self.fail_tests.get(name, [])
-        for change in l:
-            if self.test.ref_has_change(ref, change):
-                return True
-        return False
-
-    def release(self, regex=None):
-        builds = self.running_builds[:]
-        self.log.debug("releasing build %s (%s)" % (regex,
-                                                    len(self.running_builds)))
-        for build in builds:
-            if not regex or re.match(regex, build.name):
-                self.log.debug("releasing build %s" %
-                               (build.parameters['ZUUL_UUID']))
-                build.release()
-            else:
-                self.log.debug("not releasing build %s" %
-                               (build.parameters['ZUUL_UUID']))
-        self.log.debug("done releasing builds %s (%s)" %
-                       (regex, len(self.running_builds)))
-
-
-class FakeGearmanServer(gear.Server):
-    def __init__(self):
-        self.hold_jobs_in_queue = False
-        super(FakeGearmanServer, self).__init__(0)
-
-    def getJobForConnection(self, connection, peek=False):
-        for queue in [self.high_queue, self.normal_queue, self.low_queue]:
-            for job in queue:
-                if not hasattr(job, 'waiting'):
-                    if job.name.startswith('build:'):
-                        job.waiting = self.hold_jobs_in_queue
-                    else:
-                        job.waiting = False
-                if job.waiting:
-                    continue
-                if job.name in connection.functions:
-                    if not peek:
-                        queue.remove(job)
-                        connection.related_jobs[job.handle] = job
-                        job.worker_connection = connection
-                    job.running = True
-                    return job
-        return None
-
-    def release(self, regex=None):
-        released = False
-        qlen = (len(self.high_queue) + len(self.normal_queue) +
-                len(self.low_queue))
-        self.log.debug("releasing queued job %s (%s)" % (regex, qlen))
-        for job in self.getQueue():
-            cmd, name = job.name.split(':')
-            if cmd != 'build':
-                continue
-            if not regex or re.match(regex, name):
-                self.log.debug("releasing queued job %s" %
-                               job.unique)
-                job.waiting = False
-                released = True
-            else:
-                self.log.debug("not releasing queued job %s" %
-                               job.unique)
-        if released:
-            self.wakeConnections()
-        qlen = (len(self.high_queue) + len(self.normal_queue) +
-                len(self.low_queue))
-        self.log.debug("done releasing queued jobs %s (%s)" % (regex, qlen))
-
-
-class FakeSMTP(object):
-    log = logging.getLogger('zuul.FakeSMTP')
-
-    def __init__(self, messages, server, port):
-        self.server = server
-        self.port = port
-        self.messages = messages
-
-    def sendmail(self, from_email, to_email, msg):
-        self.log.info("Sending email from %s, to %s, with msg %s" % (
-                      from_email, to_email, msg))
-
-        headers = msg.split('\n\n', 1)[0]
-        body = msg.split('\n\n', 1)[1]
-
-        self.messages.append(dict(
-            from_email=from_email,
-            to_email=to_email,
-            msg=msg,
-            headers=headers,
-            body=body,
-        ))
-
-        return True
-
-    def quit(self):
-        return True
-
-
-class FakeSwiftClientConnection(swiftclient.client.Connection):
-    def post_account(self, headers):
-        # Do nothing
-        pass
-
-    def get_auth(self):
-        # Returns endpoint and (unused) auth token
-        endpoint = os.path.join('https://storage.example.org', 'V1',
-                                'AUTH_account')
-        return endpoint, ''
-
-
-class TestScheduler(testtools.TestCase):
-    log = logging.getLogger("zuul.test")
-
-    def setUp(self):
-        super(TestScheduler, self).setUp()
-        test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
-        try:
-            test_timeout = int(test_timeout)
-        except ValueError:
-            # If timeout value is invalid do not set a timeout.
-            test_timeout = 0
-        if test_timeout > 0:
-            self.useFixture(fixtures.Timeout(test_timeout, gentle=False))
-
-        if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or
-            os.environ.get('OS_STDOUT_CAPTURE') == '1'):
-            stdout = self.useFixture(fixtures.StringStream('stdout')).stream
-            self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
-        if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or
-            os.environ.get('OS_STDERR_CAPTURE') == '1'):
-            stderr = self.useFixture(fixtures.StringStream('stderr')).stream
-            self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
-        if (os.environ.get('OS_LOG_CAPTURE') == 'True' or
-            os.environ.get('OS_LOG_CAPTURE') == '1'):
-            self.useFixture(fixtures.FakeLogger(
-                level=logging.DEBUG,
-                format='%(asctime)s %(name)-32s '
-                '%(levelname)-8s %(message)s'))
-        tmp_root = self.useFixture(fixtures.TempDir(
-            rootdir=os.environ.get("ZUUL_TEST_ROOT"))).path
-        self.test_root = os.path.join(tmp_root, "zuul-test")
-        self.upstream_root = os.path.join(self.test_root, "upstream")
-        self.git_root = os.path.join(self.test_root, "git")
-
-        CONFIG.set('merger', 'git_dir', self.git_root)
-        if os.path.exists(self.test_root):
-            shutil.rmtree(self.test_root)
-        os.makedirs(self.test_root)
-        os.makedirs(self.upstream_root)
-        os.makedirs(self.git_root)
-
-        # For each project in config:
-        self.init_repo("org/project")
-        self.init_repo("org/project1")
-        self.init_repo("org/project2")
-        self.init_repo("org/project3")
-        self.init_repo("org/one-job-project")
-        self.init_repo("org/nonvoting-project")
-        self.init_repo("org/templated-project")
-        self.init_repo("org/layered-project")
-        self.init_repo("org/node-project")
-        self.init_repo("org/conflict-project")
-        self.init_repo("org/noop-project")
-        self.init_repo("org/experimental-project")
-
-        self.statsd = FakeStatsd()
-        os.environ['STATSD_HOST'] = 'localhost'
-        os.environ['STATSD_PORT'] = str(self.statsd.port)
-        self.statsd.start()
-        # the statsd client object is configured in the statsd module import
-        reload(statsd)
-        reload(zuul.scheduler)
-
-        self.gearman_server = FakeGearmanServer()
-
-        self.config = ConfigParser.ConfigParser()
-        cfg = StringIO()
-        CONFIG.write(cfg)
-        cfg.seek(0)
-        self.config.readfp(cfg)
-        self.config.set('gearman', 'port', str(self.gearman_server.port))
-
-        self.worker = FakeWorker('fake_worker', self)
-        self.worker.addServer('127.0.0.1', self.gearman_server.port)
-        self.gearman_server.worker = self.worker
-
-        self.merge_server = zuul.merger.server.MergeServer(self.config)
-        self.merge_server.start()
-
-        self.sched = zuul.scheduler.Scheduler()
-
-        self.useFixture(fixtures.MonkeyPatch('swiftclient.client.Connection',
-                                             FakeSwiftClientConnection))
-        self.swift = zuul.lib.swift.Swift(self.config)
-
-        def URLOpenerFactory(*args, **kw):
-            if isinstance(args[0], urllib2.Request):
-                return old_urlopen(*args, **kw)
-            args = [self.fake_gerrit] + list(args)
-            return FakeURLOpener(self.upstream_root, *args, **kw)
-
-        old_urlopen = urllib2.urlopen
-        urllib2.urlopen = URLOpenerFactory
-
-        self.launcher = zuul.launcher.gearman.Gearman(self.config, self.sched,
-                                                      self.swift)
-        self.merge_client = zuul.merger.client.MergeClient(
-            self.config, self.sched)
-
-        self.smtp_messages = []
-
-        def FakeSMTPFactory(*args, **kw):
-            args = [self.smtp_messages] + list(args)
-            return FakeSMTP(*args, **kw)
-
-        zuul.lib.gerrit.Gerrit = FakeGerrit
-        self.useFixture(fixtures.MonkeyPatch('smtplib.SMTP', FakeSMTPFactory))
-
-        self.gerrit = FakeGerritTrigger(
-            self.upstream_root, self.config, self.sched)
-        self.gerrit.replication_timeout = 1.5
-        self.gerrit.replication_retry_interval = 0.5
-        self.fake_gerrit = self.gerrit.gerrit
-        self.fake_gerrit.upstream_root = self.upstream_root
-
-        self.webapp = zuul.webapp.WebApp(self.sched, port=0)
-        self.rpc = zuul.rpclistener.RPCListener(self.config, self.sched)
-
-        self.sched.setLauncher(self.launcher)
-        self.sched.setMerger(self.merge_client)
-        self.sched.registerTrigger(self.gerrit)
-        self.timer = zuul.trigger.timer.Timer(self.config, self.sched)
-        self.sched.registerTrigger(self.timer)
-
-        self.sched.registerReporter(
-            zuul.reporter.gerrit.Reporter(self.gerrit))
-        self.smtp_reporter = zuul.reporter.smtp.Reporter(
-            self.config.get('smtp', 'default_from'),
-            self.config.get('smtp', 'default_to'),
-            self.config.get('smtp', 'server'))
-        self.sched.registerReporter(self.smtp_reporter)
-
-        self.sched.start()
-        self.sched.reconfigure(self.config)
-        self.sched.resume()
-        self.webapp.start()
-        self.rpc.start()
-        self.launcher.gearman.waitForServer()
-        self.registerJobs()
-        self.builds = self.worker.running_builds
-        self.history = self.worker.build_history
-
-        self.addCleanup(self.assertFinalState)
-        self.addCleanup(self.shutdown)
-
-    def assertFinalState(self):
-        # Make sure that the change cache is cleared
-        self.assertEqual(len(self.gerrit._change_cache.keys()), 0)
-        # Make sure that git.Repo objects have been garbage collected.
-        repos = []
-        gc.collect()
-        for obj in gc.get_objects():
-            if isinstance(obj, git.Repo):
-                repos.append(obj)
-        self.assertEqual(len(repos), 0)
-        self.assertEmptyQueues()
-
-    def shutdown(self):
-        self.log.debug("Shutting down after tests")
-        self.launcher.stop()
-        self.merge_server.stop()
-        self.merge_server.join()
-        self.merge_client.stop()
-        self.worker.shutdown()
-        self.gerrit.stop()
-        self.timer.stop()
-        self.sched.stop()
-        self.sched.join()
-        self.statsd.stop()
-        self.statsd.join()
-        self.webapp.stop()
-        self.webapp.join()
-        self.rpc.stop()
-        self.rpc.join()
-        self.gearman_server.shutdown()
-        threads = threading.enumerate()
-        if len(threads) > 1:
-            self.log.error("More than one thread is running: %s" % threads)
-        super(TestScheduler, self).tearDown()
-
-    def init_repo(self, project):
-        parts = project.split('/')
-        path = os.path.join(self.upstream_root, *parts[:-1])
-        if not os.path.exists(path):
-            os.makedirs(path)
-        path = os.path.join(self.upstream_root, project)
-        repo = git.Repo.init(path)
-
-        repo.config_writer().set_value('user', 'email', 'user@example.com')
-        repo.config_writer().set_value('user', 'name', 'User Name')
-        repo.config_writer().write()
-
-        fn = os.path.join(path, 'README')
-        f = open(fn, 'w')
-        f.write("test\n")
-        f.close()
-        repo.index.add([fn])
-        repo.index.commit('initial commit')
-        master = repo.create_head('master')
-        repo.create_tag('init')
-
-        mp = repo.create_head('mp')
-        repo.head.reference = mp
-        f = open(fn, 'a')
-        f.write("test mp\n")
-        f.close()
-        repo.index.add([fn])
-        repo.index.commit('mp commit')
-
-        repo.head.reference = master
-        repo.head.reset(index=True, working_tree=True)
-        repo.git.clean('-x', '-f', '-d')
-
-    def ref_has_change(self, ref, change):
-        path = os.path.join(self.git_root, change.project)
-        repo = git.Repo(path)
-        for commit in repo.iter_commits(ref):
-            if commit.message.strip() == ('%s-1' % change.subject):
-                return True
-        return False
-
-    def job_has_changes(self, *args):
-        job = args[0]
-        commits = args[1:]
-        if isinstance(job, FakeBuild):
-            parameters = job.parameters
-        else:
-            parameters = json.loads(job.arguments)
-        project = parameters['ZUUL_PROJECT']
-        path = os.path.join(self.git_root, project)
-        repo = git.Repo(path)
-        ref = parameters['ZUUL_REF']
-        sha = parameters['ZUUL_COMMIT']
-        repo_messages = [c.message.strip() for c in repo.iter_commits(ref)]
-        repo_shas = [c.hexsha for c in repo.iter_commits(ref)]
-        commit_messages = ['%s-1' % commit.subject for commit in commits]
-        self.log.debug("Checking if job %s has changes; commit_messages %s;"
-                       " repo_messages %s; sha %s" % (job, commit_messages,
-                                                      repo_messages, sha))
-        for msg in commit_messages:
-            if msg not in repo_messages:
-                self.log.debug("  messages do not match")
-                return False
-        if repo_shas[0] != sha:
-            self.log.debug("  sha does not match")
-            return False
-        self.log.debug("  OK")
-        return True
-
-    def registerJobs(self):
-        count = 0
-        for job in self.sched.layout.jobs.keys():
-            self.worker.registerFunction('build:' + job)
-            count += 1
-        self.worker.registerFunction('stop:' + self.worker.worker_id)
-        count += 1
-
-        while len(self.gearman_server.functions) < count:
-            time.sleep(0)
-
-    def release(self, job):
-        if isinstance(job, FakeBuild):
-            job.release()
-        else:
-            job.waiting = False
-            self.log.debug("Queued job %s released" % job.unique)
-            self.gearman_server.wakeConnections()
-
-    def getParameter(self, job, name):
-        if isinstance(job, FakeBuild):
-            return job.parameters[name]
-        else:
-            parameters = json.loads(job.arguments)
-            return parameters[name]
-
-    def resetGearmanServer(self):
-        self.worker.setFunctions([])
-        while True:
-            done = True
-            for connection in self.gearman_server.active_connections:
-                if (connection.functions and
-                    connection.client_id not in ['Zuul RPC Listener',
-                                                 'Zuul Merger']):
-                    done = False
-            if done:
-                break
-            time.sleep(0)
-        self.gearman_server.functions = set()
-        self.rpc.register()
-        self.merge_server.register()
-
-    def haveAllBuildsReported(self):
-        # See if Zuul is waiting on a meta job to complete
-        if self.launcher.meta_jobs:
-            return False
-        # Find out if every build that the worker has completed has been
-        # reported back to Zuul.  If it hasn't then that means a Gearman
-        # event is still in transit and the system is not stable.
-        for build in self.worker.build_history:
-            zbuild = self.launcher.builds.get(build.uuid)
-            if not zbuild:
-                # It has already been reported
-                continue
-            # It hasn't been reported yet.
-            return False
-        # Make sure that none of the worker connections are in GRAB_WAIT
-        for connection in self.worker.active_connections:
-            if connection.state == 'GRAB_WAIT':
-                return False
-        return True
-
-    def areAllBuildsWaiting(self):
-        ret = True
-
-        builds = self.launcher.builds.values()
-        for build in builds:
-            client_job = None
-            for conn in self.launcher.gearman.active_connections:
-                for j in conn.related_jobs.values():
-                    if j.unique == build.uuid:
-                        client_job = j
-                        break
-            if not client_job:
-                self.log.debug("%s is not known to the gearman client" %
-                               build)
-                ret = False
-                continue
-            if not client_job.handle:
-                self.log.debug("%s has no handle" % client_job)
-                ret = False
-                continue
-            server_job = self.gearman_server.jobs.get(client_job.handle)
-            if not server_job:
-                self.log.debug("%s is not known to the gearman server" %
-                               client_job)
-                ret = False
-                continue
-            if not hasattr(server_job, 'waiting'):
-                self.log.debug("%s is being enqueued" % server_job)
-                ret = False
-                continue
-            if server_job.waiting:
-                continue
-            worker_job = self.worker.gearman_jobs.get(server_job.unique)
-            if worker_job:
-                if worker_job.build.isWaiting():
-                    continue
-                else:
-                    self.log.debug("%s is running" % worker_job)
-                    ret = False
-            else:
-                self.log.debug("%s is unassigned" % server_job)
-                ret = False
-        return ret
-
-    def waitUntilSettled(self):
-        self.log.debug("Waiting until settled...")
-        start = time.time()
-        while True:
-            if time.time() - start > 10:
-                print 'queue status:',
-                print self.sched.trigger_event_queue.empty(),
-                print self.sched.result_event_queue.empty(),
-                print self.fake_gerrit.event_queue.empty(),
-                print self.areAllBuildsWaiting()
-                raise Exception("Timeout waiting for Zuul to settle")
-            # Make sure no new events show up while we're checking
-            self.worker.lock.acquire()
-            # have all build states propagated to zuul?
-            if self.haveAllBuildsReported():
-                # Join ensures that the queue is empty _and_ events have been
-                # processed
-                self.fake_gerrit.event_queue.join()
-                self.sched.trigger_event_queue.join()
-                self.sched.result_event_queue.join()
-                self.sched.run_handler_lock.acquire()
-                if (self.sched.trigger_event_queue.empty() and
-                    self.sched.result_event_queue.empty() and
-                    self.fake_gerrit.event_queue.empty() and
-                    not self.merge_client.build_sets and
-                    self.haveAllBuildsReported() and
-                    self.areAllBuildsWaiting()):
-                    self.sched.run_handler_lock.release()
-                    self.worker.lock.release()
-                    self.log.debug("...settled.")
-                    return
-                self.sched.run_handler_lock.release()
-            self.worker.lock.release()
-            self.sched.wake_event.wait(0.1)
-
-    def countJobResults(self, jobs, result):
-        jobs = filter(lambda x: x.result == result, jobs)
-        return len(jobs)
-
-    def getJobFromHistory(self, name):
-        history = self.worker.build_history
-        for job in history:
-            if job.name == name:
-                return job
-        raise Exception("Unable to find job %s in history" % name)
-
-    def assertEmptyQueues(self):
-        # Make sure there are no orphaned jobs
-        for pipeline in self.sched.layout.pipelines.values():
-            for queue in pipeline.queues:
-                if len(queue.queue) != 0:
-                    print 'pipeline %s queue %s contents %s' % (
-                        pipeline.name, queue.name, queue.queue)
-                self.assertEqual(len(queue.queue), 0)
-
-    def assertReportedStat(self, key, value=None, kind=None):
-        start = time.time()
-        while time.time() < (start + 5):
-            for stat in self.statsd.stats:
-                pprint.pprint(self.statsd.stats)
-                k, v = stat.split(':')
-                if key == k:
-                    if value is None and kind is None:
-                        return
-                    elif value:
-                        if value == v:
-                            return
-                    elif kind:
-                        if v.endswith('|' + kind):
-                            return
-            time.sleep(0.1)
-
-        pprint.pprint(self.statsd.stats)
-        raise Exception("Key %s not found in reported stats" % key)
-
+class TestScheduler(ZuulTestCase):
     def test_jobs_launched(self):
         "Test that jobs are launched and a change is merged"
 
@@ -3028,7 +1862,7 @@
         sched = zuul.scheduler.Scheduler()
         sched.registerTrigger(None, 'gerrit')
         sched.registerTrigger(None, 'timer')
-        sched.testConfig(CONFIG.get('zuul', 'layout_config'))
+        sched.testConfig(self.config.get('zuul', 'layout_config'))
 
     def test_build_description(self):
         "Test that build descriptions update"
@@ -3275,25 +2109,25 @@
         self.sched.reconfigure(self.config)
         self.registerJobs()
 
-        start = time.time()
-        failed = True
-        while ((time.time() - start) < 30):
-            if len(self.builds) == 2:
-                failed = False
-                break
-            else:
-                time.sleep(1)
-
-        if failed:
-            raise Exception("Expected jobs never ran")
-
+        # The pipeline triggers every second, so we should have seen
+        # several by now.
+        time.sleep(5)
         self.waitUntilSettled()
+
+        self.assertEqual(len(self.builds), 2)
+
         port = self.webapp.server.socket.getsockname()[1]
 
         f = urllib.urlopen("http://localhost:%s/status.json" % port)
         data = f.read()
 
         self.worker.hold_jobs_in_build = False
+        # Stop queuing timer triggered jobs so that the assertions
+        # below don't race against more jobs being queued.
+        self.config.set('zuul', 'layout_config',
+                        'tests/fixtures/layout-no-timer.yaml')
+        self.sched.reconfigure(self.config)
+        self.registerJobs()
         self.worker.release()
         self.waitUntilSettled()
 
@@ -3317,29 +2151,33 @@
     def test_idle(self):
         "Test that frequent periodic jobs work"
         self.worker.hold_jobs_in_build = True
-        self.config.set('zuul', 'layout_config',
-                        'tests/fixtures/layout-idle.yaml')
-        self.sched.reconfigure(self.config)
-        self.registerJobs()
 
-        # The pipeline triggers every second, so we should have seen
-        # several by now.
-        time.sleep(5)
-        self.waitUntilSettled()
-        self.assertEqual(len(self.builds), 2)
-        self.worker.release('.*')
-        self.waitUntilSettled()
-        self.assertEqual(len(self.builds), 0)
-        self.assertEqual(len(self.history), 2)
+        for x in range(1, 3):
+            # Test that timer triggers periodic jobs even across
+            # layout config reloads.
+            # Start timer trigger
+            self.config.set('zuul', 'layout_config',
+                            'tests/fixtures/layout-idle.yaml')
+            self.sched.reconfigure(self.config)
+            self.registerJobs()
 
-        time.sleep(5)
-        self.waitUntilSettled()
-        self.assertEqual(len(self.builds), 2)
-        self.assertEqual(len(self.history), 2)
-        self.worker.release('.*')
-        self.waitUntilSettled()
-        self.assertEqual(len(self.builds), 0)
-        self.assertEqual(len(self.history), 4)
+            # The pipeline triggers every second, so we should have seen
+            # several by now.
+            time.sleep(5)
+            self.waitUntilSettled()
+
+            # Stop queuing timer triggered jobs so that the assertions
+            # below don't race against more jobs being queued.
+            self.config.set('zuul', 'layout_config',
+                            'tests/fixtures/layout-no-timer.yaml')
+            self.sched.reconfigure(self.config)
+            self.registerJobs()
+
+            self.assertEqual(len(self.builds), 2)
+            self.worker.release('.*')
+            self.waitUntilSettled()
+            self.assertEqual(len(self.builds), 0)
+            self.assertEqual(len(self.history), x * 2)
 
     def test_check_smtp_pool(self):
         self.config.set('zuul', 'layout_config',
@@ -3374,25 +2212,22 @@
 
     def test_timer_smtp(self):
         "Test that a periodic job is triggered"
+        self.worker.hold_jobs_in_build = True
         self.config.set('zuul', 'layout_config',
                         'tests/fixtures/layout-timer-smtp.yaml')
         self.sched.reconfigure(self.config)
         self.registerJobs()
 
-        start = time.time()
-        failed = True
-        while ((time.time() - start) < 30):
-            if len(self.history) == 2:
-                failed = False
-                break
-            else:
-                time.sleep(1)
-
-        if failed:
-            raise Exception("Expected jobs never ran")
-
+        # The pipeline triggers every second, so we should have seen
+        # several by now.
+        time.sleep(5)
         self.waitUntilSettled()
 
+        self.assertEqual(len(self.builds), 2)
+        self.worker.release('.*')
+        self.waitUntilSettled()
+        self.assertEqual(len(self.history), 2)
+
         self.assertEqual(self.getJobFromHistory(
             'project-bitrot-stable-old').result, 'SUCCESS')
         self.assertEqual(self.getJobFromHistory(
@@ -3411,6 +2246,15 @@
         self.assertIn('Subject: Periodic check for org/project succeeded',
                       self.smtp_messages[0]['headers'])
 
+        # Stop queuing timer triggered jobs and let any that may have
+        # queued through so that end of test assertions pass.
+        self.config.set('zuul', 'layout_config',
+                        'tests/fixtures/layout-no-timer.yaml')
+        self.sched.reconfigure(self.config)
+        self.registerJobs()
+        self.worker.release('.*')
+        self.waitUntilSettled()
+
     def test_client_enqueue(self):
         "Test that the RPC client can enqueue a change"
         A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')