Merge "Fix zuul command default function" into feature/zuulv3
diff --git a/doc/source/admin/components.rst b/doc/source/admin/components.rst
index a0de922..cc9d181 100644
--- a/doc/source/admin/components.rst
+++ b/doc/source/admin/components.rst
@@ -385,6 +385,10 @@
 
      port=9000
 
+**websocket_url**
+  Base URL on which the websocket service is exposed, if different than the
+  base URL of the web app.
+
 Operation
 ~~~~~~~~~
 
diff --git a/doc/source/user/config.rst b/doc/source/user/config.rst
index 76f73c3..4a9a99e 100644
--- a/doc/source/user/config.rst
+++ b/doc/source/user/config.rst
@@ -34,13 +34,12 @@
 
 When Zuul starts, it examines all of the git repositories which are
 specified by the system administrator in :ref:`tenant-config` and searches
-for files in the root of each repository.  In the case of a
-*config-project*, Zuul looks for a file named `zuul.yaml`.  In the
-case of an *untrusted-project*, Zuul looks first for `zuul.yaml` and
-if that is not found, `.zuul.yaml` (with a leading dot).  In the case
-of an *untrusted-project*, the configuration from every branch is
-included, however, in the case of a *config-project*, only the
-`master` branch is examined.
+for files in the root of each repository. Zuul looks first for a file named
+`zuul.yaml` or a directory named `zuul.d`, and if they are not found,
+`.zuul.yaml` or `.zuul.d` (with a leading dot). In the case of an
+*untrusted-project*, the configuration from every branch is included,
+however, in the case of a *config-project*, only the `master` branch is
+examined.
 
 When a change is proposed to one of these files in an
 *untrusted-project*, the configuration proposed in the change is
@@ -64,6 +63,16 @@
 YAML-formatted and are structured as a series of items, each of which
 is described below.
 
+In the case of a `zuul.d` directory, Zuul recurses the directory and extends
+the configuration using all the .yaml files in the sorted path order.
+For example, to keep job's variants in a separate file, it needs to be loaded
+after the main entries, for example using number prefixes in file's names::
+
+* zuul.d/pipelines.yaml
+* zuul.d/projects.yaml
+* zuul.d/01_jobs.yaml
+* zuul.d/02_jobs-variants.yaml
+
 .. _pipeline:
 
 Pipeline
diff --git a/doc/source/user/jobs.rst b/doc/source/user/jobs.rst
index 78121bc..c2c376e 100644
--- a/doc/source/user/jobs.rst
+++ b/doc/source/user/jobs.rst
@@ -70,8 +70,41 @@
 
 .. TODO: link to base job documentation and/or document src (and logs?) directory
 
+Variables
+---------
+
+Any variables specified in the job definition are available as Ansible
+host variables.  They are added to the `vars` section of the inventory
+file under the `all` hosts group, so they are available to all hosts.
+Simply refer to them by the name specified in the job's `vars`
+section.
+
+Secrets
+~~~~~~~
+
+Secrets also appear as variables available to Ansible.  Unlike job
+variables, these are not added to the inventory file (so that the
+inventory file may be kept for debugging purposes without revealing
+secrets).  But they are still available to Ansible as normal
+variables.  Because secrets are groups of variables, they will appear
+as a dictionary structure in templates, with the dictionary itself
+being the name of the secret, and its members the individual items in
+the secret.  For example, a secret defined as::
+
+  - secret:
+      name: credentials
+      data:
+        username: foo
+        password: bar
+
+Might be used in a template as::
+
+ {{ credentials.username }} {{ credentials.password }}
+
+.. TODO: xref job vars
+
 Zuul Variables
---------------
+~~~~~~~~~~~~~~
 
 Zuul supplies not only the variables specified by the job definition
 to Ansible, but also some variables from the executor itself.  They
diff --git a/tests/base.py b/tests/base.py
index 1cc9999..fb94638 100755
--- a/tests/base.py
+++ b/tests/base.py
@@ -1308,7 +1308,7 @@
         self.running_builds.append(build)
         self.job_builds[job.unique] = build
         args = json.loads(job.arguments)
-        args['vars']['zuul']['_test'] = dict(test_root=self._test_root)
+        args['zuul']['_test'] = dict(test_root=self._test_root)
         job.arguments = json.dumps(args)
         self.job_workers[job.unique] = RecordingAnsibleJob(self, job)
         self.job_workers[job.unique].run()
diff --git a/tests/fixtures/config/conflict-config/git/common-config/.zuul.d/jobs.yaml b/tests/fixtures/config/conflict-config/git/common-config/.zuul.d/jobs.yaml
new file mode 100644
index 0000000..20056ee
--- /dev/null
+++ b/tests/fixtures/config/conflict-config/git/common-config/.zuul.d/jobs.yaml
@@ -0,0 +1,2 @@
+- job:
+    name: trusted-.zuul.d-jobs
diff --git a/tests/fixtures/config/conflict-config/git/common-config/.zuul.yaml b/tests/fixtures/config/conflict-config/git/common-config/.zuul.yaml
new file mode 100644
index 0000000..da2bc1e
--- /dev/null
+++ b/tests/fixtures/config/conflict-config/git/common-config/.zuul.yaml
@@ -0,0 +1,2 @@
+- job:
+    name: trusted-.zuul.yaml-job
diff --git a/tests/fixtures/config/conflict-config/git/common-config/zuul.d/jobs.yaml b/tests/fixtures/config/conflict-config/git/common-config/zuul.d/jobs.yaml
new file mode 100644
index 0000000..5a92f43
--- /dev/null
+++ b/tests/fixtures/config/conflict-config/git/common-config/zuul.d/jobs.yaml
@@ -0,0 +1,2 @@
+- job:
+    name: trusted-zuul.d-jobs
diff --git a/tests/fixtures/config/conflict-config/git/common-config/zuul.yaml b/tests/fixtures/config/conflict-config/git/common-config/zuul.yaml
new file mode 100644
index 0000000..792fc8f
--- /dev/null
+++ b/tests/fixtures/config/conflict-config/git/common-config/zuul.yaml
@@ -0,0 +1,2 @@
+- job:
+    name: trusted-zuul.yaml-job
diff --git a/tests/fixtures/config/conflict-config/git/org_project/.zuul.yaml b/tests/fixtures/config/conflict-config/git/org_project/.zuul.yaml
new file mode 100644
index 0000000..dc1ff45
--- /dev/null
+++ b/tests/fixtures/config/conflict-config/git/org_project/.zuul.yaml
@@ -0,0 +1,2 @@
+- job:
+    name: untrusted-.zuul.yaml-job
diff --git a/tests/fixtures/config/conflict-config/git/org_project/zuul.yaml b/tests/fixtures/config/conflict-config/git/org_project/zuul.yaml
new file mode 100644
index 0000000..cc63564
--- /dev/null
+++ b/tests/fixtures/config/conflict-config/git/org_project/zuul.yaml
@@ -0,0 +1,2 @@
+- job:
+    name: untrusted-zuul.yaml-job
diff --git a/tests/fixtures/config/conflict-config/main.yaml b/tests/fixtures/config/conflict-config/main.yaml
new file mode 100644
index 0000000..208e274
--- /dev/null
+++ b/tests/fixtures/config/conflict-config/main.yaml
@@ -0,0 +1,8 @@
+- tenant:
+    name: tenant-one
+    source:
+      gerrit:
+        config-projects:
+          - common-config
+        untrusted-projects:
+          - org/project
diff --git a/tests/fixtures/config/in-repo/git/common-config/zuul.yaml b/tests/fixtures/config/in-repo/git/common-config/zuul.yaml
index 1fdaf2e..fce086e 100644
--- a/tests/fixtures/config/in-repo/git/common-config/zuul.yaml
+++ b/tests/fixtures/config/in-repo/git/common-config/zuul.yaml
@@ -32,6 +32,26 @@
         verified: 0
     precedence: high
 
+- pipeline:
+    name: gate
+    manager: dependent
+    trigger:
+      gerrit:
+        - event: comment-added
+          approval:
+            - code-review: 2
+    success:
+      gerrit:
+        verified: 2
+        submit: true
+    failure:
+      gerrit:
+        verified: -2
+    start:
+      gerrit:
+        verified: 0
+    precedence: high
+
 - job:
     name: common-config-test
 
diff --git a/tests/unit/test_configloader.py b/tests/unit/test_configloader.py
index 573ccbf..d08c6a1 100644
--- a/tests/unit/test_configloader.py
+++ b/tests/unit/test_configloader.py
@@ -12,6 +12,8 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+import fixtures
+import logging
 import textwrap
 
 from tests.base import ZuulTestCase
@@ -245,3 +247,39 @@
         # project1-project2-integration test removed, only want project-test1
         self.assertHistory([
             dict(name='project-test1', result='SUCCESS', changes='1,1')])
+
+    def test_config_path_conflict(self):
+        def add_file(project, path):
+            new_file = textwrap.dedent(
+                """
+                - job:
+                    name: test-job
+                """
+            )
+            file_dict = {path: new_file}
+            A = self.fake_gerrit.addFakeChange(project, 'master', 'A',
+                                               files=file_dict)
+            self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+            self.waitUntilSettled()
+
+        log_fixture = self.useFixture(
+            fixtures.FakeLogger(level=logging.WARNING))
+
+        log_fixture._output.truncate(0)
+        add_file("common-config", "zuul.yaml")
+        self.assertIn("Multiple configuration", log_fixture.output)
+
+        log_fixture._output.truncate(0)
+        add_file("org/project1", ".zuul.yaml")
+        self.assertIn("Multiple configuration", log_fixture.output)
+
+
+class TestConfigConflict(ZuulTestCase):
+    tenant_config_file = 'config/conflict-config/main.yaml'
+
+    def test_conflict_config(self):
+        tenant = self.sched.abide.tenants.get('tenant-one')
+        jobs = sorted(tenant.layout.jobs.keys())
+        self.assertEquals(
+            ['noop', 'trusted-zuul.yaml-job', 'untrusted-zuul.yaml-job'],
+            jobs)
diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py
index 8d9d127..c9e2e91 100644
--- a/tests/unit/test_connection.py
+++ b/tests/unit/test_connection.py
@@ -69,7 +69,7 @@
         insp = sa.engine.reflection.Inspector(
             self.connections.connections['resultsdb'].engine)
 
-        self.assertEqual(9, len(insp.get_columns(buildset_table)))
+        self.assertEqual(10, len(insp.get_columns(buildset_table)))
         self.assertEqual(10, len(insp.get_columns(build_table)))
 
     def test_sql_results(self):
@@ -108,6 +108,7 @@
         self.assertEqual(1, buildset0['patchset'])
         self.assertEqual(1, buildset0['score'])
         self.assertEqual('Build succeeded.', buildset0['message'])
+        self.assertEqual('tenant-one', buildset0['tenant'])
 
         buildset0_builds = conn.execute(
             sa.sql.select([reporter.connection.zuul_build_table]).
diff --git a/tests/unit/test_github_driver.py b/tests/unit/test_github_driver.py
index f360866..0cfe3da 100644
--- a/tests/unit/test_github_driver.py
+++ b/tests/unit/test_github_driver.py
@@ -46,7 +46,7 @@
                          self.getJobFromHistory('project-test2').result)
 
         job = self.getJobFromHistory('project-test2')
-        zuulvars = job.parameters['vars']['zuul']
+        zuulvars = job.parameters['zuul']
         self.assertEqual(A.number, zuulvars['change'])
         self.assertEqual(A.head_sha, zuulvars['patchset'])
         self.assertEqual(1, len(A.comments))
diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py
index 61bf9f8..d9cf839 100755
--- a/tests/unit/test_scheduler.py
+++ b/tests/unit/test_scheduler.py
@@ -2290,10 +2290,14 @@
         self.assertEqual('project-merge', status_jobs[0]['name'])
         # TODO(mordred) pull uuids from self.builds
         self.assertEqual(
+            'static/stream.html?uuid={uuid}&logfile=console.log'.format(
+                uuid=status_jobs[0]['uuid']),
+            status_jobs[0]['url'])
+        self.assertEqual(
             'finger://{hostname}/{uuid}'.format(
                 hostname=self.executor_server.hostname,
                 uuid=status_jobs[0]['uuid']),
-            status_jobs[0]['url'])
+            status_jobs[0]['finger_url'])
         # TOOD(mordred) configure a success-url on the base job
         self.assertEqual(
             'finger://{hostname}/{uuid}'.format(
@@ -2302,10 +2306,14 @@
             status_jobs[0]['report_url'])
         self.assertEqual('project-test1', status_jobs[1]['name'])
         self.assertEqual(
+            'static/stream.html?uuid={uuid}&logfile=console.log'.format(
+                uuid=status_jobs[1]['uuid']),
+            status_jobs[1]['url'])
+        self.assertEqual(
             'finger://{hostname}/{uuid}'.format(
                 hostname=self.executor_server.hostname,
                 uuid=status_jobs[1]['uuid']),
-            status_jobs[1]['url'])
+            status_jobs[1]['finger_url'])
         self.assertEqual(
             'finger://{hostname}/{uuid}'.format(
                 hostname=self.executor_server.hostname,
@@ -2314,10 +2322,14 @@
 
         self.assertEqual('project-test2', status_jobs[2]['name'])
         self.assertEqual(
+            'static/stream.html?uuid={uuid}&logfile=console.log'.format(
+                uuid=status_jobs[2]['uuid']),
+            status_jobs[2]['url'])
+        self.assertEqual(
             'finger://{hostname}/{uuid}'.format(
                 hostname=self.executor_server.hostname,
                 uuid=status_jobs[2]['uuid']),
-            status_jobs[2]['url'])
+            status_jobs[2]['finger_url'])
         self.assertEqual(
             'finger://{hostname}/{uuid}'.format(
                 hostname=self.executor_server.hostname,
@@ -2757,7 +2769,7 @@
 
         for build in self.history:
             self.assertEqual(results.get(build.uuid, ''),
-                             build.parameters['vars']['zuul'].get('tags'))
+                             build.parameters['zuul'].get('tags'))
 
     def test_timer(self):
         "Test that a periodic job is triggered"
@@ -3607,10 +3619,13 @@
                 self.assertEqual('gate', job['pipeline'])
                 self.assertEqual(False, job['retry'])
                 self.assertEqual(
+                    'static/stream.html?uuid={uuid}&logfile=console.log'
+                    .format(uuid=job['uuid']), job['url'])
+                self.assertEqual(
                     'finger://{hostname}/{uuid}'.format(
                         hostname=self.executor_server.hostname,
                         uuid=job['uuid']),
-                    job['url'])
+                    job['finger_url'])
                 self.assertEqual(2, len(job['worker']))
                 self.assertEqual(False, job['canceled'])
                 self.assertEqual(True, job['voting'])
diff --git a/tests/unit/test_v3.py b/tests/unit/test_v3.py
index 3477e8f..734c45c 100644
--- a/tests/unit/test_v3.py
+++ b/tests/unit/test_v3.py
@@ -199,6 +199,52 @@
         self.executor_server.release()
         self.waitUntilSettled()
 
+    def test_dynamic_dependent_pipeline(self):
+        # Test dynamically adding a project to a
+        # dependent pipeline for the first time
+        self.executor_server.hold_jobs_in_build = True
+
+        tenant = self.sched.abide.tenants.get('tenant-one')
+        gate_pipeline = tenant.layout.pipelines['gate']
+
+        in_repo_conf = textwrap.dedent(
+            """
+            - job:
+                name: project-test2
+
+            - project:
+                name: org/project
+                gate:
+                  jobs:
+                    - project-test2
+            """)
+
+        in_repo_playbook = textwrap.dedent(
+            """
+            - hosts: all
+              tasks: []
+            """)
+
+        file_dict = {'.zuul.yaml': in_repo_conf,
+                     'playbooks/project-test2.yaml': in_repo_playbook}
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+                                           files=file_dict)
+        A.addApproval('approved', 1)
+        self.fake_gerrit.addEvent(A.addApproval('code-review', 2))
+        self.waitUntilSettled()
+
+        items = gate_pipeline.getAllItems()
+        self.assertEqual(items[0].change.number, '1')
+        self.assertEqual(items[0].change.patchset, '1')
+        self.assertTrue(items[0].live)
+
+        self.executor_server.hold_jobs_in_build = False
+        self.executor_server.release()
+        self.waitUntilSettled()
+
+        # Make sure the dynamic queue got cleaned up
+        self.assertEqual(gate_pipeline.queues, [])
+
     def test_in_repo_branch(self):
         in_repo_conf = textwrap.dedent(
             """
@@ -751,6 +797,30 @@
             dict(name='project-test', result='SUCCESS', changes='1,1'),
         ])
 
+    def test_role_error(self):
+        conf = textwrap.dedent(
+            """
+            - job:
+                name: project-test
+                roles:
+                  - zuul: common-config
+
+            - project:
+                name: org/project
+                check:
+                  jobs:
+                    - project-test
+            """)
+
+        file_dict = {'.zuul.yaml': conf}
+        A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
+                                           files=file_dict)
+        self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
+        self.waitUntilSettled()
+        self.assertIn(
+            '- project-test project-test : ERROR Unable to find role',
+            A.messages[-1])
+
 
 class TestShadow(ZuulTestCase):
     tenant_config_file = 'config/shadow/main.yaml'
diff --git a/zuul/ansible/callback/zuul_json.py b/zuul/ansible/callback/zuul_json.py
new file mode 100644
index 0000000..017c27e
--- /dev/null
+++ b/zuul/ansible/callback/zuul_json.py
@@ -0,0 +1,138 @@
+# (c) 2016, Matt Martz <matt@sivel.net>
+# (c) 2017, Red Hat, Inc.
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+# Copy of github.com/ansible/ansible/lib/ansible/plugins/callback/json.py
+# We need to run as a secondary callback not a stdout and we need to control
+# the output file location via a zuul environment variable similar to how we
+# do in zuul_stream.
+# Subclassing wreaks havoc on the module loader and namepsaces
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import json
+import os
+
+from ansible.plugins.callback import CallbackBase
+try:
+    # It's here in 2.4
+    from ansible.vars import strip_internal_keys
+except ImportError:
+    # It's here in 2.3
+    from ansible.vars.manager import strip_internal_keys
+
+
+class CallbackModule(CallbackBase):
+    CALLBACK_VERSION = 2.0
+    # aggregate means we can be loaded and not be the stdout plugin
+    CALLBACK_TYPE = 'aggregate'
+    CALLBACK_NAME = 'zuul_json'
+
+    def __init__(self, display=None):
+        super(CallbackModule, self).__init__(display)
+        self.results = []
+        self.output_path = os.path.splitext(
+            os.environ['ZUUL_JOB_OUTPUT_FILE'])[0] + '.json'
+        # For now, just read in the old file and write it all out again
+        # This may well not scale from a memory perspective- but let's see how
+        # it goes.
+        if os.path.exists(self.output_path):
+            self.results = json.load(open(self.output_path, 'r'))
+
+    def _get_playbook_name(self, work_dir):
+
+        playbook = self._playbook_name
+        if work_dir and playbook.startswith(work_dir):
+            playbook = playbook.replace(work_dir.rstrip('/') + '/', '')
+            # Lop off the first two path elements - ansible/pre_playbook_0
+            for prefix in ('pre', 'playbook', 'post'):
+                full_prefix = 'ansible/{prefix}_'.format(prefix=prefix)
+                if playbook.startswith(full_prefix):
+                    playbook = playbook.split(os.path.sep, 2)[2]
+        return playbook
+
+    def _new_play(self, play, phase, index, work_dir):
+        return {
+            'play': {
+                'name': play.name,
+                'id': str(play._uuid),
+                'phase': phase,
+                'index': index,
+                'playbook': self._get_playbook_name(work_dir),
+            },
+            'tasks': []
+        }
+
+    def _new_task(self, task):
+        return {
+            'task': {
+                'name': task.name,
+                'id': str(task._uuid)
+            },
+            'hosts': {}
+        }
+
+    def v2_playbook_on_start(self, playbook):
+        self._playbook_name = os.path.splitext(playbook._file_name)[0]
+
+    def v2_playbook_on_play_start(self, play):
+        # Get the hostvars from just one host - the vars we're looking for will
+        # be identical on all of them
+        hostvars = next(iter(play._variable_manager._hostvars.values()))
+        phase = hostvars.get('zuul_execution_phase')
+        index = hostvars.get('zuul_execution_phase_index')
+        # TODO(mordred) For now, protect this to make it not absurdly strange
+        # to run local tests with the callback plugin enabled. Remove once we
+        # have a "run playbook like zuul runs playbook" tool.
+        work_dir = None
+        if 'zuul' in hostvars and 'executor' in hostvars['zuul']:
+            # imply work_dir from src_root
+            work_dir = os.path.dirname(
+                hostvars['zuul']['executor']['src_root'])
+        self.results.append(self._new_play(play, phase, index, work_dir))
+
+    def v2_playbook_on_task_start(self, task, is_conditional):
+        self.results[-1]['tasks'].append(self._new_task(task))
+
+    def v2_runner_on_ok(self, result, **kwargs):
+        host = result._host
+        if result._result.get('_ansible_no_log', False):
+            self.results[-1]['tasks'][-1]['hosts'][host.name] = dict(
+                censored="the output has been hidden due to the fact that"
+                         " 'no_log: true' was specified for this result")
+        else:
+            clean_result = strip_internal_keys(result._result)
+            self.results[-1]['tasks'][-1]['hosts'][host.name] = clean_result
+
+    def v2_playbook_on_stats(self, stats):
+        """Display info about playbook statistics"""
+        hosts = sorted(stats.processed.keys())
+
+        summary = {}
+        for h in hosts:
+            s = stats.summarize(h)
+            summary[h] = s
+
+        output = {
+            'plays': self.results,
+            'stats': summary
+        }
+
+        json.dump(output, open(self.output_path, 'w'),
+                  indent=4, sort_keys=True, separators=(',', ': '))
+
+    v2_runner_on_failed = v2_runner_on_ok
+    v2_runner_on_unreachable = v2_runner_on_ok
+    v2_runner_on_skipped = v2_runner_on_ok
diff --git a/zuul/ansible/callback/zuul_stream.py b/zuul/ansible/callback/zuul_stream.py
index cc979f2..e9f969a 100644
--- a/zuul/ansible/callback/zuul_stream.py
+++ b/zuul/ansible/callback/zuul_stream.py
@@ -312,8 +312,7 @@
             else:
                 self._log_message(
                     result=result,
-                    status=status,
-                    result_dict=result_dict)
+                    status=status)
         elif 'results' in result_dict:
             for res in result_dict['results']:
                 self._log_message(
@@ -342,7 +341,6 @@
             self._log_message(
                 result=result,
                 msg="Item: {item}".format(item=result_dict['item']),
-                result_dict=result_dict,
                 status=status)
         else:
             self._log_message(
diff --git a/zuul/configloader.py b/zuul/configloader.py
index f8e2d15..6dc3274 100644
--- a/zuul/configloader.py
+++ b/zuul/configloader.py
@@ -1213,7 +1213,7 @@
                                    (job, job.files))
             loaded = False
             files = sorted(job.files.keys())
-            for conf_root in ['zuul.yaml', '.zuul.yaml', 'zuul.d', '.zuul.d']:
+            for conf_root in ['zuul.yaml', 'zuul.d', '.zuul.yaml', '.zuul.d']:
                 for fn in files:
                     fn_root = fn.split('/')[0]
                     if fn_root != conf_root or not job.files.get(fn):
@@ -1416,8 +1416,7 @@
                     fns1.append(fn)
                 if fn.startswith(".zuul.d/"):
                     fns2.append(fn)
-
-            fns = ['zuul.yaml', '.zuul.yaml'] + sorted(fns1) + sorted(fns2)
+            fns = ["zuul.yaml"] + sorted(fns1) + [".zuul.yaml"] + sorted(fns2)
             incdata = None
             loaded = None
             for fn in fns:
diff --git a/zuul/driver/github/githubconnection.py b/zuul/driver/github/githubconnection.py
index 1a9e37b..a4a4c12 100644
--- a/zuul/driver/github/githubconnection.py
+++ b/zuul/driver/github/githubconnection.py
@@ -824,6 +824,9 @@
         # Create a repo object
         repository = github.repository(owner, proj)
 
+        if not repository:
+            return 'none'
+
         # Build up a URL
         url = repository._build_url('collaborators', login, 'permission',
                                     base_url=repository._api)
diff --git a/zuul/driver/sql/alembic_reporter/versions/f86c9871ee67_add_tenant_column.py b/zuul/driver/sql/alembic_reporter/versions/f86c9871ee67_add_tenant_column.py
new file mode 100644
index 0000000..7728bd4
--- /dev/null
+++ b/zuul/driver/sql/alembic_reporter/versions/f86c9871ee67_add_tenant_column.py
@@ -0,0 +1,38 @@
+# Copyright 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Add tenant column
+
+Revision ID: f86c9871ee67
+Revises: 20126015a87d
+Create Date: 2017-07-17 05:47:48.189767
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = 'f86c9871ee67'
+down_revision = '20126015a87d'
+branch_labels = None
+depends_on = None
+
+from alembic import op
+import sqlalchemy as sa
+
+
+def upgrade():
+    op.add_column('zuul_buildset', sa.Column('tenant', sa.String(255)))
+
+
+def downgrade():
+    op.drop_column('zuul_buildset', 'tenant')
diff --git a/zuul/driver/sql/sqlconnection.py b/zuul/driver/sql/sqlconnection.py
index e478d33..0e3f0dd 100644
--- a/zuul/driver/sql/sqlconnection.py
+++ b/zuul/driver/sql/sqlconnection.py
@@ -85,6 +85,7 @@
             sa.Column('ref', sa.String(255)),
             sa.Column('score', sa.Integer, nullable=True),
             sa.Column('message', sa.TEXT()),
+            sa.Column('tenant', sa.String(255)),
         )
 
         zuul_build_table = sa.Table(
diff --git a/zuul/driver/sql/sqlreporter.py b/zuul/driver/sql/sqlreporter.py
index 5f93ce8..214b667 100644
--- a/zuul/driver/sql/sqlreporter.py
+++ b/zuul/driver/sql/sqlreporter.py
@@ -52,6 +52,7 @@
                 score=self.result_score,
                 message=self._formatItemReport(
                     item, with_jobs=False),
+                tenant=item.pipeline.layout.tenant.name,
             )
             buildset_ins_result = conn.execute(buildset_ins)
             build_inserts = []
diff --git a/zuul/executor/client.py b/zuul/executor/client.py
index 2a205bf..f764778 100644
--- a/zuul/executor/client.py
+++ b/zuul/executor/client.py
@@ -253,10 +253,12 @@
         params['nodes'] = nodes
         params['groups'] = [group.toDict() for group in nodeset.getGroups()]
         params['vars'] = copy.deepcopy(job.variables)
+        params['secrets'] = {}
         if job.auth:
             for secret in job.auth.secrets:
-                params['vars'][secret.name] = copy.deepcopy(secret.secret_data)
-        params['vars']['zuul'] = zuul_params
+                secret_data = copy.deepcopy(secret.secret_data)
+                params['secrets'][secret.name] = secret_data
+        params['zuul'] = zuul_params
         projects = set()
 
         def make_project_dict(project, override_branch=None):
@@ -376,6 +378,7 @@
             build.node_name = data.get('node_name')
             if result is None:
                 result = data.get('result')
+                build.error_detail = data.get('error_detail')
             if result is None:
                 if (build.build_set.getTries(build.job.name) >=
                     build.job.attempts):
diff --git a/zuul/executor/server.py b/zuul/executor/server.py
index 824a47a..f291dce 100644
--- a/zuul/executor/server.py
+++ b/zuul/executor/server.py
@@ -41,6 +41,16 @@
 DEFAULT_FINGER_PORT = 79
 
 
+class ExecutorError(Exception):
+    """A non-transient run-time executor error
+
+    This class represents error conditions detected by the executor
+    when preparing to run a job which we know are consistently fatal.
+    Zuul should not reschedule the build in these cases.
+    """
+    pass
+
+
 class Watchdog(object):
     def __init__(self, timeout, function, args):
         self.timeout = timeout
@@ -115,8 +125,8 @@
             subprocess.check_output(['ssh-add', key_path], env=env,
                                     stderr=subprocess.PIPE)
         except subprocess.CalledProcessError as e:
-            self.log.error('ssh-add failed. stdout: %s, stderr: %s',
-                           e.output, e.stderr)
+            self.log.exception('ssh-add failed. stdout: %s, stderr: %s',
+                               e.output, e.stderr)
             raise
         self.log.info('Added SSH Key {}'.format(key_path))
 
@@ -215,6 +225,8 @@
             pass
         self.known_hosts = os.path.join(ssh_dir, 'known_hosts')
         self.inventory = os.path.join(self.ansible_root, 'inventory.yaml')
+        self.secrets = os.path.join(self.ansible_root, 'secrets.yaml')
+        self.has_secrets = False
         self.playbooks = []  # The list of candidate playbooks
         self.playbook = None  # A pointer to the candidate we have chosen
         self.pre_playbooks = []
@@ -744,6 +756,11 @@
                                  self.executor_server.keep_jobdir,
                                  str(self.job.unique))
             self._execute()
+        except ExecutorError as e:
+            result_data = json.dumps(dict(result='ERROR',
+                                          error_detail=e.args[0]))
+            self.log.debug("Sending result: %s" % (result_data,))
+            self.job.sendWorkComplete(result_data)
         except Exception:
             self.log.exception("Exception while executing job")
             self.job.sendWorkException(traceback.format_exc())
@@ -767,7 +784,7 @@
     def _execute(self):
         args = json.loads(self.job.arguments)
         self.log.debug("Beginning job %s for ref %s" %
-                       (self.job.name, args['vars']['zuul']['ref']))
+                       (self.job.name, args['zuul']['ref']))
         self.log.debug("Args: %s" % (self.job.arguments,))
         self.log.debug("Job root: %s" % (self.jobdir.root,))
         tasks = []
@@ -913,8 +930,9 @@
                           project_name, project_default_branch)
             repo.checkoutLocalBranch(project_default_branch)
         else:
-            raise Exception("Project %s does not have the default branch %s" %
-                            (project_name, project_default_branch))
+            raise ExecutorError("Project %s does not have the "
+                                "default branch %s" %
+                                (project_name, project_default_branch))
 
     def runPlaybooks(self, args):
         result = None
@@ -1005,9 +1023,9 @@
         '''
         for entry in os.listdir(path):
             if os.path.isdir(entry) and entry.endswith('_plugins'):
-                raise Exception(
-                    "Ansible plugin dir %s found adjacent to playbook %s in"
-                    " non-trusted repo." % (entry, path))
+                raise ExecutorError(
+                    "Ansible plugin dir %s found adjacent to playbook %s in "
+                    "non-trusted repo." % (entry, path))
 
     def findPlaybook(self, path, required=False, trusted=False):
         for ext in ['.yaml', '.yml']:
@@ -1018,7 +1036,7 @@
                     self._blockPluginDirs(playbook_dir)
                 return fn
         if required:
-            raise Exception("Unable to find playbook %s" % path)
+            raise ExecutorError("Unable to find playbook %s" % path)
         return None
 
     def preparePlaybooks(self, args):
@@ -1036,7 +1054,7 @@
                 break
 
         if self.jobdir.playbook is None:
-            raise Exception("No valid playbook found")
+            raise ExecutorError("No valid playbook found")
 
         for playbook in args['post_playbooks']:
             jobdir_playbook = self.jobdir.addPostPlaybook()
@@ -1124,7 +1142,7 @@
                         self._blockPluginDirs(os.path.join(d, entry))
             return d
         # It is neither a bare role, nor a collection of roles
-        raise Exception("Unable to find role in %s" % (path,))
+        raise ExecutorError("Unable to find role in %s" % (path,))
 
     def prepareZuulRole(self, jobdir_playbook, role, args, root):
         self.log.debug("Prepare zuul role for %s" % (role,))
@@ -1162,7 +1180,7 @@
         link = os.path.join(root, name)
         link = os.path.realpath(link)
         if not link.startswith(os.path.realpath(root)):
-            raise Exception("Invalid role name %s", name)
+            raise ExecutorError("Invalid role name %s", name)
         os.symlink(path, link)
 
         role_path = self.findRole(link, trusted=jobdir_playbook.trusted)
@@ -1172,9 +1190,12 @@
         jobdir_playbook.roles_path.append(role_path)
 
     def prepareAnsibleFiles(self, args):
-        all_vars = dict(args['vars'])
+        all_vars = args['vars'].copy()
         # TODO(mordred) Hack to work around running things with python3
         all_vars['ansible_python_interpreter'] = '/usr/bin/python2'
+        if 'zuul' in all_vars:
+            raise Exception("Defining vars named 'zuul' is not allowed")
+        all_vars['zuul'] = args['zuul'].copy()
         all_vars['zuul']['executor'] = dict(
             hostname=self.executor_server.hostname,
             src_root=self.jobdir.src_root,
@@ -1193,6 +1214,15 @@
                 for key in node['host_keys']:
                     known_hosts.write('%s\n' % key)
 
+        secrets = args['secrets'].copy()
+        if secrets:
+            if 'zuul' in secrets:
+                raise Exception("Defining secrets named 'zuul' is not allowed")
+            with open(self.jobdir.secrets, 'w') as secrets_yaml:
+                secrets_yaml.write(
+                    yaml.safe_dump(secrets, default_flow_style=False))
+            self.jobdir.has_secrets = True
+
     def writeAnsibleConfig(self, jobdir_playbook):
         trusted = jobdir_playbook.trusted
 
@@ -1211,6 +1241,7 @@
             config.write('callback_plugins = %s\n'
                          % self.executor_server.callback_dir)
             config.write('stdout_callback = zuul_stream\n')
+            config.write('callback_whitelist = zuul_json\n')
             # bump the timeout because busy nodes may take more than
             # 10s to respond
             config.write('timeout = 30\n')
@@ -1353,6 +1384,8 @@
             # TODO(mordred) If/when we rework use of logger in ansible-playbook
             # we'll want to change how this works to use that as well. For now,
             # this is what we need to do.
+            # TODO(mordred) We probably want to put this into the json output
+            # as well.
             with open(self.jobdir.job_output_file, 'a') as job_output:
                 job_output.write("{now} | ANSIBLE PARSE ERROR\n".format(
                     now=datetime.datetime.now()))
@@ -1373,6 +1406,8 @@
             verbose = '-v'
 
         cmd = ['ansible-playbook', verbose, playbook.path]
+        if self.jobdir.has_secrets:
+            cmd.extend(['-e', '@' + self.jobdir.secrets])
 
         if success is not None:
             cmd.extend(['-e', 'success=%s' % str(bool(success))])
diff --git a/zuul/manager/dependent.py b/zuul/manager/dependent.py
index ada3491..411894e 100644
--- a/zuul/manager/dependent.py
+++ b/zuul/manager/dependent.py
@@ -14,6 +14,7 @@
 
 from zuul import model
 from zuul.manager import PipelineManager, StaticChangeQueueContextManager
+from zuul.manager import DynamicChangeQueueContextManager
 
 
 class DependentPipelineManager(PipelineManager):
@@ -75,8 +76,17 @@
     def getChangeQueue(self, change, existing=None):
         if existing:
             return StaticChangeQueueContextManager(existing)
-        return StaticChangeQueueContextManager(
-            self.pipeline.getQueue(change.project))
+        queue = self.pipeline.getQueue(change.project)
+        if queue:
+            return StaticChangeQueueContextManager(queue)
+        else:
+            # There is no existing queue for this change. Create a
+            # dynamic one for this one change's use
+            change_queue = model.ChangeQueue(self.pipeline, dynamic=True)
+            change_queue.addProject(change.project)
+            self.pipeline.addQueue(change_queue)
+            self.log.debug("Dynamically created queue %s", change_queue)
+            return DynamicChangeQueueContextManager(change_queue)
 
     def isChangeReadyToBeEnqueued(self, change):
         source = change.project.source
@@ -201,3 +211,11 @@
         if failing_items:
             return failing_items
         return None
+
+    def dequeueItem(self, item):
+        super(DependentPipelineManager, self).dequeueItem(item)
+        # If this was a dynamic queue from a speculative change,
+        # remove the queue (if empty)
+        if item.queue.dynamic:
+            if not item.queue.queue:
+                self.pipeline.removeQueue(item.queue)
diff --git a/zuul/model.py b/zuul/model.py
index b8f694e..ef67828 100644
--- a/zuul/model.py
+++ b/zuul/model.py
@@ -164,7 +164,7 @@
             items.extend(shared_queue.queue)
         return items
 
-    def formatStatusJSON(self):
+    def formatStatusJSON(self, websocket_url=None):
         j_pipeline = dict(name=self.name,
                           description=self.description)
         j_queues = []
@@ -181,7 +181,7 @@
                     if j_changes:
                         j_queue['heads'].append(j_changes)
                     j_changes = []
-                j_changes.append(e.formatJSON())
+                j_changes.append(e.formatJSON(websocket_url))
                 if (len(j_changes) > 1 and
                         (j_changes[-2]['remaining_time'] is not None) and
                         (j_changes[-1]['remaining_time'] is not None)):
@@ -209,11 +209,14 @@
     be processed. If a Change succeeds, the Window is increased by
     `window_increase_factor`. If a Change fails, the Window is decreased by
     `window_decrease_factor`.
+
+    A ChangeQueue may be a dynamically created queue, which may be removed
+    from a DependentPipelineManager once empty.
     """
     def __init__(self, pipeline, window=0, window_floor=1,
                  window_increase_type='linear', window_increase_factor=1,
                  window_decrease_type='exponential', window_decrease_factor=2,
-                 name=None):
+                 name=None, dynamic=False):
         self.pipeline = pipeline
         if name:
             self.name = name
@@ -228,6 +231,7 @@
         self.window_increase_factor = window_increase_factor
         self.window_decrease_type = window_decrease_type
         self.window_decrease_factor = window_decrease_factor
+        self.dynamic = dynamic
 
     def __repr__(self):
         return '<ChangeQueue %s: %s>' % (self.pipeline.name, self.name)
@@ -1094,6 +1098,7 @@
         self.url = None
         self.result = None
         self.result_data = {}
+        self.error_detail = None
         self.build_set = None
         self.execute_time = time.time()
         self.start_time = None
@@ -1114,6 +1119,7 @@
     def getSafeAttributes(self):
         return Attributes(uuid=self.uuid,
                           result=self.result,
+                          error_detail=self.error_detail,
                           result_data=self.result_data)
 
 
@@ -1667,7 +1673,7 @@
             url = default_url or build.url or job.name
         return (result, url)
 
-    def formatJSON(self):
+    def formatJSON(self, websocket_url=None):
         ret = {}
         ret['active'] = self.active
         ret['live'] = self.live
@@ -1704,11 +1710,20 @@
             remaining = None
             result = None
             build_url = None
+            finger_url = None
             report_url = None
             worker = None
             if build:
                 result = build.result
-                build_url = build.url
+                finger_url = build.url
+                # TODO(tobiash): add support for custom web root
+                urlformat = 'static/stream.html?' \
+                            'uuid={build.uuid}&' \
+                            'logfile=console.log'
+                if websocket_url:
+                    urlformat += '&websocket_url={websocket_url}'
+                build_url = urlformat.format(
+                    build=build, websocket_url=websocket_url)
                 (unused, report_url) = self.formatJobResult(job)
                 if build.start_time:
                     if build.end_time:
@@ -1734,6 +1749,7 @@
                 'elapsed_time': elapsed,
                 'remaining_time': remaining,
                 'url': build_url,
+                'finger_url': finger_url,
                 'report_url': report_url,
                 'result': result,
                 'voting': job.voting,
diff --git a/zuul/reporter/__init__.py b/zuul/reporter/__init__.py
index 95b9208..49181a7 100644
--- a/zuul/reporter/__init__.py
+++ b/zuul/reporter/__init__.py
@@ -138,7 +138,11 @@
                     elapsed = ' in %ds' % (s)
             else:
                 elapsed = ''
+            if build.error_detail:
+                error = ' ' + build.error_detail
+            else:
+                error = ''
             name = job.name + ' '
-            ret += '- %s%s : %s%s%s\n' % (name, url, result, elapsed,
-                                          voting)
+            ret += '- %s%s : %s%s%s%s\n' % (name, url, result, error,
+                                            elapsed, voting)
         return ret
diff --git a/zuul/scheduler.py b/zuul/scheduler.py
index e5e7f87..2217b0b 100644
--- a/zuul/scheduler.py
+++ b/zuul/scheduler.py
@@ -889,6 +889,7 @@
         data = {}
 
         data['zuul_version'] = self.zuul_version
+        websocket_url = get_default(self.config, 'web', 'websocket_url', None)
 
         if self._pause:
             ret = '<p><b>Queue only mode:</b> preparing to '
@@ -912,5 +913,5 @@
         data['pipelines'] = pipelines
         tenant = self.abide.tenants.get(tenant_name)
         for pipeline in tenant.layout.pipelines.values():
-            pipelines.append(pipeline.formatStatusJSON())
+            pipelines.append(pipeline.formatStatusJSON(websocket_url))
         return json.dumps(data)
diff --git a/zuul/web.py b/zuul/web/__init__.py
similarity index 93%
rename from zuul/web.py
rename to zuul/web/__init__.py
index ab16e11..faf22b5 100644
--- a/zuul/web.py
+++ b/zuul/web/__init__.py
@@ -18,6 +18,7 @@
 import asyncio
 import json
 import logging
+import os
 import uvloop
 
 import aiohttp
@@ -25,6 +26,8 @@
 
 import zuul.rpcclient
 
+STATIC_DIR = os.path.join(os.path.dirname(__file__), 'static')
+
 
 class LogStreamingHandler(object):
     log = logging.getLogger("zuul.web.LogStreamingHandler")
@@ -39,11 +42,11 @@
         self.ssl_ca = ssl_ca
 
     def _getPortLocation(self, job_uuid):
-        '''
+        """
         Query Gearman for the executor running the given job.
 
         :param str job_uuid: The job UUID we want to stream.
-        '''
+        """
         # TODO: Fetch the entire list of uuid/file/server/ports once and
         #       share that, and fetch a new list on cache misses perhaps?
         # TODO: Avoid recreating a client for each request.
@@ -55,14 +58,14 @@
         return ret
 
     async def _fingerClient(self, ws, server, port, job_uuid):
-        '''
+        """
         Create a client to connect to the finger streamer and pull results.
 
         :param aiohttp.web.WebSocketResponse ws: The websocket response object.
         :param str server: The executor server running the job.
         :param str port: The executor server port.
         :param str job_uuid: The job UUID to stream.
-        '''
+        """
         self.log.debug("Connecting to finger server %s:%s", server, port)
         reader, writer = await asyncio.open_connection(host=server, port=port,
                                                        loop=self.event_loop)
@@ -82,12 +85,12 @@
                 return
 
     async def _streamLog(self, ws, request):
-        '''
+        """
         Stream the log for the requested job back to the client.
 
         :param aiohttp.web.WebSocketResponse ws: The websocket response object.
         :param dict request: The client request parameters.
-        '''
+        """
         for key in ('uuid', 'logfile'):
             if key not in request:
                 return (4000, "'{key}' missing from request payload".format(
@@ -112,11 +115,11 @@
         return (1000, "No more data")
 
     async def processRequest(self, request):
-        '''
+        """
         Handle a client websocket request for log streaming.
 
         :param aiohttp.web.Request request: The client request.
-        '''
+        """
         try:
             ws = web.WebSocketResponse()
             await ws.prepare(request)
@@ -161,6 +164,8 @@
         self.ssl_key = ssl_key
         self.ssl_cert = ssl_cert
         self.ssl_ca = ssl_ca
+        self.event_loop = None
+        self.term = None
 
     async def _handleWebsocket(self, request):
         handler = LogStreamingHandler(self.event_loop,
@@ -169,7 +174,7 @@
         return await handler.processRequest(request)
 
     def run(self, loop=None):
-        '''
+        """
         Run the websocket daemon.
 
         Because this method can be the target of a new thread, we need to
@@ -178,9 +183,9 @@
         :param loop: The event loop to use. If not supplied, the default main
             thread event loop is used. This should be supplied if ZuulWeb
             is run within a separate (non-main) thread.
-        '''
+        """
         routes = [
-            ('GET', '/console-stream', self._handleWebsocket)
+            ('GET', '/console-stream', self._handleWebsocket),
         ]
 
         self.log.debug("ZuulWeb starting")
@@ -195,6 +200,7 @@
         app = web.Application()
         for method, path, handler in routes:
             app.router.add_route(method, path, handler)
+        app.router.add_static('/static', STATIC_DIR)
         handler = app.make_handler(loop=self.event_loop)
 
         # create the server
@@ -224,7 +230,8 @@
             loop.close()
 
     def stop(self):
-        self.event_loop.call_soon_threadsafe(self.term.set_result, True)
+        if self.event_loop and self.term:
+            self.event_loop.call_soon_threadsafe(self.term.set_result, True)
 
 
 if __name__ == "__main__":
diff --git a/zuul/web/static/stream.html b/zuul/web/static/stream.html
new file mode 100644
index 0000000..dbeb66b
--- /dev/null
+++ b/zuul/web/static/stream.html
@@ -0,0 +1,114 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
+   "http://www.w3.org/TR/html4/strict.dtd">
+<html>
+  <head>
+      <style type="text/css">
+
+        body {
+          font-family: monospace;
+          background-color: black;
+          color: lightgrey;
+        }
+
+        #overlay {
+            position: fixed;
+            top: 5px;
+            right: 5px;
+            background-color: darkgrey;
+            color: black;
+        }
+
+        pre {
+            white-space: pre;
+            margin: 0px 10px;
+        }
+      </style>
+
+    <script type="text/javascript">
+
+      function escapeLog(text) {
+          var pattern = /[<>&"']/g;
+
+          return text.replace(pattern, function(match) {
+              return '&#' + match.charCodeAt(0) + ';';
+          });
+      }
+
+      window.onload = function() {
+
+          pageUpdateInMS = 250;
+          var receiveBuffer = "";
+          var websocket_url = null
+
+          setInterval(function() {
+              console.log("autoScroll");
+              if (receiveBuffer != "") {
+                  document.getElementById('pagecontent').innerHTML += receiveBuffer;
+                  receiveBuffer = "";
+                  if (document.getElementById('autoscroll').checked) {
+                      window.scrollTo(0, document.body.scrollHeight);
+                  }
+              }
+          }, pageUpdateInMS);
+
+          var url = new URL(window.location);
+
+          var params = {
+              uuid: url.searchParams.get('uuid')
+          }
+          document.getElementById('pagetitle').innerHTML = params['uuid'];
+          if (url.searchParams.has('logfile')) {
+              params['logfile'] = url.searchParams.get('logfile');
+              var logfile_suffix = "(" + params['logfile'] + ")";
+              document.getElementById('pagetitle').innerHTML += logfile_suffix;
+          }
+          if (url.searchParams.has('websocket_url')) {
+              params['websocket_url'] = url.searchParams.get('websocket_url');
+          } else {
+              // Websocket doesn't accept relative urls so construct an
+              // absolute one.
+              var protocol = '';
+              if (url['protocol'] == 'https:') {
+                  protocol = 'wss://';
+              } else {
+                  protocol = 'ws://';
+              }
+              path = url['pathname'].replace(/static\/.*$/g, '') + 'console-stream';
+              params['websocket_url'] = protocol + url['host'] + path;
+          }
+          var ws = new WebSocket(params['websocket_url']);
+
+          ws.onmessage = function(event) {
+              console.log("onmessage");
+              receiveBuffer = receiveBuffer + escapeLog(event.data);
+          };
+
+          ws.onopen = function(event) {
+              console.log("onopen");
+              ws.send(JSON.stringify(params));
+          };
+
+          ws.onclose = function(event) {
+              console.log("onclose");
+              receiveBuffer = receiveBuffer + "\n--- END OF STREAM ---\n";
+          };
+
+      };
+
+    </script>
+
+    <title id="pagetitle"></title>
+  </head>
+
+  <body>
+
+    <div id="overlay">
+      <form>
+        <input type="checkbox" id="autoscroll" checked> autoscroll
+      </form>
+    </div>
+
+    <pre id="pagecontent"></pre>
+
+  </body>
+</html>