summaryrefslogtreecommitdiffstats
path: root/src/VBox/ValidationKit/docs/AutomaticTestingRevamp.html
diff options
context:
space:
mode:
Diffstat (limited to 'src/VBox/ValidationKit/docs/AutomaticTestingRevamp.html')
-rw-r--r--src/VBox/ValidationKit/docs/AutomaticTestingRevamp.html1357
1 files changed, 1357 insertions, 0 deletions
diff --git a/src/VBox/ValidationKit/docs/AutomaticTestingRevamp.html b/src/VBox/ValidationKit/docs/AutomaticTestingRevamp.html
new file mode 100644
index 00000000..be262a09
--- /dev/null
+++ b/src/VBox/ValidationKit/docs/AutomaticTestingRevamp.html
@@ -0,0 +1,1357 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<meta name="generator" content="Docutils 0.12: http://docutils.sourceforge.net/" />
+<title></title>
+<style type="text/css">
+
+/*
+:Author: David Goodger (goodger@python.org)
+:Id: $Id: AutomaticTestingRevamp.html $
+:Copyright: This stylesheet has been placed in the public domain.
+
+Default cascading style sheet for the HTML output of Docutils.
+
+See http://docutils.sf.net/docs/howto/html-stylesheets.html for how to
+customize this style sheet.
+*/
+
+/* used to remove borders from tables and images */
+.borderless, table.borderless td, table.borderless th {
+ border: 0 }
+
+table.borderless td, table.borderless th {
+ /* Override padding for "table.docutils td" with "! important".
+ The right padding separates the table cells. */
+ padding: 0 0.5em 0 0 ! important }
+
+.first {
+ /* Override more specific margin styles with "! important". */
+ margin-top: 0 ! important }
+
+.last, .with-subtitle {
+ margin-bottom: 0 ! important }
+
+.hidden {
+ display: none }
+
+a.toc-backref {
+ text-decoration: none ;
+ color: black }
+
+blockquote.epigraph {
+ margin: 2em 5em ; }
+
+dl.docutils dd {
+ margin-bottom: 0.5em }
+
+object[type="image/svg+xml"], object[type="application/x-shockwave-flash"] {
+ overflow: hidden;
+}
+
+/* Uncomment (and remove this text!) to get bold-faced definition list terms
+dl.docutils dt {
+ font-weight: bold }
+*/
+
+div.abstract {
+ margin: 2em 5em }
+
+div.abstract p.topic-title {
+ font-weight: bold ;
+ text-align: center }
+
+div.admonition, div.attention, div.caution, div.danger, div.error,
+div.hint, div.important, div.note, div.tip, div.warning {
+ margin: 2em ;
+ border: medium outset ;
+ padding: 1em }
+
+div.admonition p.admonition-title, div.hint p.admonition-title,
+div.important p.admonition-title, div.note p.admonition-title,
+div.tip p.admonition-title {
+ font-weight: bold ;
+ font-family: sans-serif }
+
+div.attention p.admonition-title, div.caution p.admonition-title,
+div.danger p.admonition-title, div.error p.admonition-title,
+div.warning p.admonition-title, .code .error {
+ color: red ;
+ font-weight: bold ;
+ font-family: sans-serif }
+
+/* Uncomment (and remove this text!) to get reduced vertical space in
+ compound paragraphs.
+div.compound .compound-first, div.compound .compound-middle {
+ margin-bottom: 0.5em }
+
+div.compound .compound-last, div.compound .compound-middle {
+ margin-top: 0.5em }
+*/
+
+div.dedication {
+ margin: 2em 5em ;
+ text-align: center ;
+ font-style: italic }
+
+div.dedication p.topic-title {
+ font-weight: bold ;
+ font-style: normal }
+
+div.figure {
+ margin-left: 2em ;
+ margin-right: 2em }
+
+div.footer, div.header {
+ clear: both;
+ font-size: smaller }
+
+div.line-block {
+ display: block ;
+ margin-top: 1em ;
+ margin-bottom: 1em }
+
+div.line-block div.line-block {
+ margin-top: 0 ;
+ margin-bottom: 0 ;
+ margin-left: 1.5em }
+
+div.sidebar {
+ margin: 0 0 0.5em 1em ;
+ border: medium outset ;
+ padding: 1em ;
+ background-color: #ffffee ;
+ width: 40% ;
+ float: right ;
+ clear: right }
+
+div.sidebar p.rubric {
+ font-family: sans-serif ;
+ font-size: medium }
+
+div.system-messages {
+ margin: 5em }
+
+div.system-messages h1 {
+ color: red }
+
+div.system-message {
+ border: medium outset ;
+ padding: 1em }
+
+div.system-message p.system-message-title {
+ color: red ;
+ font-weight: bold }
+
+div.topic {
+ margin: 2em }
+
+h1.section-subtitle, h2.section-subtitle, h3.section-subtitle,
+h4.section-subtitle, h5.section-subtitle, h6.section-subtitle {
+ margin-top: 0.4em }
+
+h1.title {
+ text-align: center }
+
+h2.subtitle {
+ text-align: center }
+
+hr.docutils {
+ width: 75% }
+
+img.align-left, .figure.align-left, object.align-left {
+ clear: left ;
+ float: left ;
+ margin-right: 1em }
+
+img.align-right, .figure.align-right, object.align-right {
+ clear: right ;
+ float: right ;
+ margin-left: 1em }
+
+img.align-center, .figure.align-center, object.align-center {
+ display: block;
+ margin-left: auto;
+ margin-right: auto;
+}
+
+.align-left {
+ text-align: left }
+
+.align-center {
+ clear: both ;
+ text-align: center }
+
+.align-right {
+ text-align: right }
+
+/* reset inner alignment in figures */
+div.align-right {
+ text-align: inherit }
+
+/* div.align-center * { */
+/* text-align: left } */
+
+ol.simple, ul.simple {
+ margin-bottom: 1em }
+
+ol.arabic {
+ list-style: decimal }
+
+ol.loweralpha {
+ list-style: lower-alpha }
+
+ol.upperalpha {
+ list-style: upper-alpha }
+
+ol.lowerroman {
+ list-style: lower-roman }
+
+ol.upperroman {
+ list-style: upper-roman }
+
+p.attribution {
+ text-align: right ;
+ margin-left: 50% }
+
+p.caption {
+ font-style: italic }
+
+p.credits {
+ font-style: italic ;
+ font-size: smaller }
+
+p.label {
+ white-space: nowrap }
+
+p.rubric {
+ font-weight: bold ;
+ font-size: larger ;
+ color: maroon ;
+ text-align: center }
+
+p.sidebar-title {
+ font-family: sans-serif ;
+ font-weight: bold ;
+ font-size: larger }
+
+p.sidebar-subtitle {
+ font-family: sans-serif ;
+ font-weight: bold }
+
+p.topic-title {
+ font-weight: bold }
+
+pre.address {
+ margin-bottom: 0 ;
+ margin-top: 0 ;
+ font: inherit }
+
+pre.literal-block, pre.doctest-block, pre.math, pre.code {
+ margin-left: 2em ;
+ margin-right: 2em }
+
+pre.code .ln { color: grey; } /* line numbers */
+pre.code, code { background-color: #eeeeee }
+pre.code .comment, code .comment { color: #5C6576 }
+pre.code .keyword, code .keyword { color: #3B0D06; font-weight: bold }
+pre.code .literal.string, code .literal.string { color: #0C5404 }
+pre.code .name.builtin, code .name.builtin { color: #352B84 }
+pre.code .deleted, code .deleted { background-color: #DEB0A1}
+pre.code .inserted, code .inserted { background-color: #A3D289}
+
+span.classifier {
+ font-family: sans-serif ;
+ font-style: oblique }
+
+span.classifier-delimiter {
+ font-family: sans-serif ;
+ font-weight: bold }
+
+span.interpreted {
+ font-family: sans-serif }
+
+span.option {
+ white-space: nowrap }
+
+span.pre {
+ white-space: pre }
+
+span.problematic {
+ color: red }
+
+span.section-subtitle {
+ /* font-size relative to parent (h1..h6 element) */
+ font-size: 80% }
+
+table.citation {
+ border-left: solid 1px gray;
+ margin-left: 1px }
+
+table.docinfo {
+ margin: 2em 4em }
+
+table.docutils {
+ margin-top: 0.5em ;
+ margin-bottom: 0.5em }
+
+table.footnote {
+ border-left: solid 1px black;
+ margin-left: 1px }
+
+table.docutils td, table.docutils th,
+table.docinfo td, table.docinfo th {
+ padding-left: 0.5em ;
+ padding-right: 0.5em ;
+ vertical-align: top }
+
+table.docutils th.field-name, table.docinfo th.docinfo-name {
+ font-weight: bold ;
+ text-align: left ;
+ white-space: nowrap ;
+ padding-left: 0 }
+
+/* "booktabs" style (no vertical lines) */
+table.docutils.booktabs {
+ border: 0px;
+ border-top: 2px solid;
+ border-bottom: 2px solid;
+ border-collapse: collapse;
+}
+table.docutils.booktabs * {
+ border: 0px;
+}
+table.docutils.booktabs th {
+ border-bottom: thin solid;
+ text-align: left;
+}
+
+h1 tt.docutils, h2 tt.docutils, h3 tt.docutils,
+h4 tt.docutils, h5 tt.docutils, h6 tt.docutils {
+ font-size: 100% }
+
+ul.auto-toc {
+ list-style-type: none }
+
+</style>
+</head>
+<body>
+<div class="document">
+
+
+<div class="section" id="revamp-of-automatic-virtualbox-testing">
+<h1>Revamp of Automatic VirtualBox Testing</h1>
+<div class="section" id="introduction">
+<h2>Introduction</h2>
+<p>This is the design document for a revamped automatic testing framework.
+The revamp aims at replacing the current tinderbox based testing by a new
+system that is written from scratch.</p>
+<p>The old system is not easy to work with and was never meant to be used for
+managing tests, after all it just a simple a build manager tailored for
+contiguous building. Modifying the existing tinderbox system to do what
+we want would require fundamental changes that would render it useless as
+a build manager, it would therefore end up as a fork. The amount of work
+required would probably be about the same as writing a new system from
+scratch. Other considerations, such as the license of the tinderbox
+system (MPL) and language it is realized in (Perl), are also in favor of
+doing it from scratch.</p>
+<p>The language envisioned for the new automatic testing framework is Python. This
+is for several reasons:</p>
+<blockquote>
+<ul class="simple">
+<li>The VirtualBox API has Python bindings.</li>
+<li>Python is used quite a bit inside Sun (dunno about Oracle).</li>
+<li>Works relatively well with Apache for the server side bits.</li>
+<li>It is more difficult to produce write-only code in Python (alias the
+we-don't-like-perl argument).</li>
+<li>You don't need to compile stuff.</li>
+</ul>
+</blockquote>
+<p>Note that the author of this document has no special training as a test
+engineer and may therefore be using the wrong terms here and there. The
+primary focus is to express what we need to do in order to improve
+testing.</p>
+<p>This document is written in reStructuredText (rst) which just happens to
+be used by Python, the primary language for this revamp. For more
+information on reStructuredText: <a class="reference external" href="http://docutils.sourceforge.net/rst.html">http://docutils.sourceforge.net/rst.html</a></p>
+</div>
+</div>
+<div class="section" id="definitions-glossary">
+<h1>Definitions / Glossary</h1>
+<dl class="docutils">
+<dt>sub-test driver</dt>
+<dd>A set of test cases that can be used by more than one test driver. Could
+also be called a test unit, in the pascal sense of unit, if it wasn't so
+easily confused with 'unit test'.</dd>
+<dt>test</dt>
+<dd>This is somewhat ambiguous and this document try avoid using it where
+possible. When used it normally refers to doing testing by executing one or
+more testcases.</dd>
+<dt>test case</dt>
+<dd>A set of inputs, test programs and expected results. It validates system
+requirements and generates a pass or failed status. A basic unit of testing.
+Note that we use the term in a rather broad sense.</dd>
+<dt>test driver</dt>
+<dd>A program/script used to execute a test. Also known as a test harness.
+Generally abbreviated 'td'. It can have sub-test drivers.</dd>
+<dt>test manager</dt>
+<dd>Software managing the automatic testing. This is a web application that runs
+on a dedicated server (tindertux).</dd>
+<dt>test set</dt>
+<dd>The output of testing activity. Logs, results, ++. Our usage of this should
+probably be renamed to 'test run'.</dd>
+<dt>test group</dt>
+<dd>A collection of related test cases.</dd>
+<dt>testbox</dt>
+<dd>A computer that does testing.</dd>
+<dt>testbox script</dt>
+<dd>Script executing orders from the test manager on a testbox. Started
+automatically upon bootup.</dd>
+<dt>testing</dt>
+<dd>todo</dd>
+<dt>TODO: Check that we've got all this right and make them more exact</dt>
+<dd>where possible.</dd>
+</dl>
+<p>See also <a class="reference external" href="http://encyclopedia2.thefreedictionary.com/testing%20types">http://encyclopedia2.thefreedictionary.com/testing%20types</a>
+and <a class="reference external" href="http://www.aptest.com/glossary.html">http://www.aptest.com/glossary.html</a> .</p>
+</div>
+<div class="section" id="objectives">
+<h1>Objectives</h1>
+<blockquote>
+<ul class="simple">
+<li>A scalable test manager (&gt;200 testboxes).</li>
+<li>Optimize the web user interface (WUI) for typical workflows and analysis.</li>
+<li>Efficient and flexibile test configuration.</li>
+<li>Import test result from other test systems (logo testing, VDI, ++).</li>
+<li>Easy to add lots of new testscripts.</li>
+<li>Run tests locally without a manager.</li>
+<li>Revamp a bit at the time.</li>
+</ul>
+</blockquote>
+</div>
+<div class="section" id="the-testbox-side">
+<h1>The Testbox Side</h1>
+<p>Each testbox has a unique name corresponding to its DNS zone entry. When booted
+a testbox script is started automatically. This script will query the test
+manager for orders and execute them. The core order downloads and executes a
+test driver with parameters (configuration) from the server. The test driver
+does all the necessary work for executing the test. In a typical VirtualBox
+test this means picking a build, installing it, configuring VMs, running the
+test VMs, collecting the results, submitting them to the server, and finally
+cleaning up afterwards.</p>
+<p>The testbox environment which the test drivers are executed in will have a
+number of environment variables for determining location of the source images
+and other test data, scratch space, test set id, server URL, and so on and so
+forth.</p>
+<p>On startup, the testbox script will look for crash dumps and similar on
+systems where this is possible. If any sign of a crash is found, it will
+put any dumps and reports in the upload directory and inform the test
+manager before reporting for duty. In order to generate the proper file
+names and report the crash in the right test set as well as prevent
+reporting crashes unrelated to automatic testing, the testbox script will
+keep information (test set id, ++) in a separate scratch directory
+(${TESTBOX_PATH_SCRATCH}/../testbox) and make sure it is synced to the
+disk (both files and directories).</p>
+<p>After checking for crashes, the testbox script will clean up any previous test
+which might be around. This involves first invoking the test script in cleanup
+mode and the wiping the scratch space.</p>
+<p>When reporting for duty the script will submit information about the host: OS
+name, OS version, OS bitness, CPU vendor, total number of cores, VT-x support,
+AMD-V support, amount of memory, amount of scratch space, and anything else that
+can be found useful for scheduling tests or filtering test configurations.</p>
+<div class="section" id="testbox-script-orders">
+<h2>Testbox Script Orders</h2>
+<p>The orders are kept in a queue on the server and the testbox script will fetch
+them one by one. Orders that cannot be executed at the moment will be masked in
+the query from the testbox.</p>
+<dl class="docutils">
+<dt>Execute Test Driver</dt>
+<dd>Downloads and executes the a specified test driver with the given
+configuration (arguments). Only one test driver can be executed at a time.
+The server can specify more than one ZIP file to be downloaded and unpacked
+before executing the test driver. The testbox script may cache these zip
+files using http time stamping.</dd>
+<dt>Abort Test Driver</dt>
+<dd>Aborts the current test driver. This will drop a hint to the driver and give
+it 60 seconds to shut down the normal way. If that fails, the testbox script
+will kill the driver processes (SIGKILL or equivalent), invoke the
+testdriver in cleanup mode, and finally wipe the scratch area. Should either
+of the last two steps fail in some way, the testbox will be rebooted.</dd>
+<dt>Idle</dt>
+<dd>Ask again in X seconds, where X is specified by the server.</dd>
+<dt>Reboot</dt>
+<dd>Reboot the testbox. If a test driver is current running, an attempt at
+aborting it (Abort Test Driver) will be made first.</dd>
+<dt>Update</dt>
+<dd>Updates the testbox script. The order includes a server relative path to the
+new testbox script. This can only be executed when no test driver is
+currently being executed.</dd>
+</dl>
+</div>
+<div class="section" id="testbox-environment-variables">
+<h2>Testbox Environment: Variables</h2>
+<dl class="docutils">
+<dt>COMSPEC</dt>
+<dd>This will be set to C:WindowsSystem32cmd.exe on Windows.</dd>
+<dt>PATH</dt>
+<dd>This will contain the kBuild binary directory for the host platform.</dd>
+<dt>SHELL</dt>
+<dd>This will be set to point to kmk_ash(.exe) on all platforms.</dd>
+<dt>TESTBOX_NAME</dt>
+<dd>The testbox name.
+This is not required by the local reporter.</dd>
+<dt>TESTBOX_PATH_BUILDS</dt>
+<dd>The absolute path to where the build repository can be found. This should be
+a read only mount when possible.</dd>
+<dt>TESTBOX_PATH_RESOURCES</dt>
+<dd>The absolute path to where static test resources like ISOs and VDIs can be
+found. The test drivers knows the layout of this. This should be a read only
+mount when possible.</dd>
+<dt>TESTBOX_PATH_SCRATCH</dt>
+<dd>The absolute path to the scratch space. This is the current directory when
+starting the test driver. It will be wiped automatically after executing the
+test.
+(Envisioned as ${TESTBOX_PATH_SCRIPTS}/../scratch and that
+${TESTBOX_PATH_SCRATCH}/ will be automatically wiped by the testbox script.)</dd>
+<dt>TESTBOX_PATH_SCRIPTS</dt>
+<dd>The absolute path to the test driver and the other files that was unzipped
+together with it. This is also where the test-driver-abort file will be put.
+(Envisioned as ${TESTBOX_PATH_SCRATCH}/../driver, see above.)</dd>
+<dt>TESTBOX_PATH_UPLOAD</dt>
+<dd>The absolute path to the upload directory for the testbox. This is for
+putting VOBs, PNGs, core dumps, crash dumps, and such on. The files should be
+bzipped or zipped if they aren't compress already. The names should contain
+the testbox and test set ID.</dd>
+<dt>TESTBOX_REPORTER</dt>
+<dd>The name of the test reporter back end. If not present, it will default to
+the local reporter.</dd>
+<dt>TESTBOX_TEST_SET_ID</dt>
+<dd>The test set ID if we're running.
+This is not required by the local reporter.</dd>
+<dt>TESTBOX_MANAGER_URL</dt>
+<dd>The URL to the test manager.
+This is not required by the local reporter.</dd>
+<dt>TESTBOX_XYZ</dt>
+<dd>There will probably be some more of these.</dd>
+</dl>
+</div>
+<div class="section" id="testbox-environment-core-utilities">
+<h2>Testbox Environment: Core Utilities</h2>
+<p>The testbox will not provide the typical unix /bin and /usr/bin utilities. In
+other words, cygwin will not be used on Windows!</p>
+<p>The testbox will provide the unixy utilties that ships with kBuild and possibly
+some additional ones from tools/<em>.</em>/bin in the VirtualBox tree (wget, unzip,
+zip, and so on). The test drivers will avoid invoking any of these utilites
+directly and instead rely on generic utility methods in the test driver
+framework. That way we can more easily reimplement the functionality of the
+core utilites and drop the dependency on them. It also allows us to quickly
+work around platform specific oddities and bugs.</p>
+</div>
+<div class="section" id="test-drivers">
+<h2>Test Drivers</h2>
+<p>The test drivers are programs that will do the actual testing. In addition to
+run under the testbox script, they can be executed in the VirtualBox development
+environment. This is important for bug analysis and for simplifying local
+testing by the developers before commiting changes. It also means the test
+drivers can be developed locally in the VirtualBox development environment.</p>
+<p>The main difference between executing a driver under the testbox script and
+running it manually is that there is no test manager in the latter case. The
+test result reporter will not talk to the server, but report things to a local
+log file and/or standard out/err. When invoked manually, all the necessary
+arguments will need to be specified by hand of course - it should be possible
+to extract them from a test set as well.</p>
+<p>For the early implementation stages, an implementation of the reporter interface
+that talks to the tinderbox base test manager will be needed. This will be
+dropped later on when a new test manager is ready.</p>
+<p>As hinted at in other sections, there will be a common framework
+(libraries/packages/classes) for taking care of the tedious bits that every
+test driver needs to do. Sharing code is essential to easing test driver
+development as well as reducing their complexity. The framework will contain:</p>
+<blockquote>
+<ul>
+<li><p class="first">A generic way of submitting output. This will be a generic interface with
+multiple implementation, the TESTBOX_REPORTER environment variable
+will decide which of them to use. The interface will have very specific
+methods to allow the reporter to do a best possible job in reporting the
+results to the test manager.</p>
+</li>
+<li><dl class="first docutils">
+<dt>Helpers for typical tasks, like:</dt>
+<dd><ul class="first last simple">
+<li>Copying files.</li>
+<li>Deleting files, directory trees and scratch space.</li>
+<li>Unzipping files.</li>
+<li>Creating ISOs</li>
+<li>And such things.</li>
+</ul>
+</dd>
+</dl>
+</li>
+<li><p class="first">Helpers for installing and uninstalling VirtualBox.</p>
+</li>
+<li><p class="first">Helpers for defining VMs. (The VBox API where available.)</p>
+</li>
+<li><p class="first">Helpers for controlling VMs. (The VBox API where available.)</p>
+</li>
+</ul>
+</blockquote>
+<p>The VirtualBox bits will be separate from the more generic ones, simply because
+this is cleaner it will allow us to reuse the system for testing other products.</p>
+<p>The framework will be packaged in a zip file other than the test driver so we
+don't waste time and space downloading the same common code.</p>
+<p>The test driver will poll for the file
+${TESTBOX_PATH_SCRIPTS}/test-driver-abort and abort all testing when it sees it.</p>
+<p>The test driver can be invoked in three modes: execute, help and cleanup. The
+default is execute mode, the help shows an configuration summary and the cleanup
+is for cleaning up after a reboot or aborted run. The latter is done by the
+testbox script on startup and after abort - the driver is expected to clean up
+by itself after a normal run.</p>
+</div>
+</div>
+<div class="section" id="the-server-side">
+<h1>The Server Side</h1>
+<p>The server side will be implemented using a webserver (apache), a database
+(postgres) and cgi scripts (Python). In addition a cron job (Python) running
+once a minute will generate static html for frequently used pages and maybe
+execute some other tasks for driving the testing forwards. The order queries
+from the testbox script is the primary driving force in the system. The total
+makes up the test manager.</p>
+<p>The test manager can be split up into three rough parts:</p>
+<blockquote>
+<ul class="simple">
+<li>Configuration (of tests, testgroups and testboxes).</li>
+<li>Execution (of tests, collecting and organizing the output).</li>
+<li>Analysis (of test output, mostly about presentation).</li>
+</ul>
+</blockquote>
+</div>
+<div class="section" id="test-manager-requirements">
+<h1>Test Manager: Requirements</h1>
+<p>List of requirements:</p>
+<blockquote>
+<ul>
+<li><p class="first">Two level testing - L1 quick smoke tests and L2 longer tests performed on
+builds passing L1. (Klaus (IIRC) ment this could be realized using
+test dependency.)</p>
+</li>
+<li><p class="first">Black listing builds (by revision or similar) known to be bad.</p>
+</li>
+<li><p class="first">Distinguish between build types so we can do a portion of the testing with
+strict builds.</p>
+</li>
+<li><p class="first">Easy to re-configure build source for testing different branch or for
+testing a release candidate. (Directory based is fine.)</p>
+</li>
+<li><p class="first">Useful to be able to partition testboxes (run specific builds on some
+boxes, let an engineer have a few boxes for a while).</p>
+</li>
+<li><p class="first">Interation with ILOM/...: reset systems.</p>
+</li>
+<li><p class="first">Be able to suspend testing on selected testboxes when doing maintance
+(where automatically resuming testing on reboot is undesired) or similar
+activity.</p>
+</li>
+<li><p class="first">Abort testing on seleced testboxes.</p>
+</li>
+<li><p class="first">Scheduling of tests requiring more than one testbox.</p>
+</li>
+<li><p class="first">Scheduling of tests that cannot be executing concurrently on several
+machines because of some global resource like an iSCSI target.</p>
+</li>
+<li><p class="first">Jump the scheduling queue. Scheduling of specified test the next time a
+testbox is available (optionally specifying which testbox to schedule it
+on).</p>
+</li>
+<li><dl class="first docutils">
+<dt>Configure tests with variable configuration to get better coverage. Two modes:</dt>
+<dd><ul class="first last simple">
+<li>TM generates the permutations based on one or more sets of test script arguments.</li>
+<li>Each configuration permuation is specified manually.</li>
+</ul>
+</dd>
+</dl>
+</li>
+<li><p class="first">Test specification needs to be flexible (select tests, disable test, test
+scheduling (run certain tests nightly), ... ).</p>
+</li>
+<li><p class="first">Test scheduling by hour+weekday and by priority.</p>
+</li>
+<li><p class="first">Test dependencies (test A depends on test B being successful).</p>
+</li>
+<li><p class="first">Historize all configuration data, in particular test configs (permutations
+included) and testboxes.</p>
+</li>
+<li><p class="first">Test sets has at a minimum a build reference, a testbox reference and a
+primary log associated with it.</p>
+</li>
+<li><dl class="first docutils">
+<dt>Test sets stores further result as a recursive collection of:</dt>
+<dd><ul class="first last simple">
+<li>hierachical subtest name (slash sep)</li>
+<li>test parameters / config</li>
+<li>bool fail/succ</li>
+<li>attributes (typed?)</li>
+<li>test time</li>
+<li>e.g. throughput</li>
+<li>subresults</li>
+<li>log</li>
+<li>screenshots, video,...</li>
+</ul>
+</dd>
+</dl>
+</li>
+<li><p class="first">The test sets database structure needs to designed such that data mining
+can be done in an efficient manner.</p>
+</li>
+<li><p class="first">Presentation/analysis: graphs!, categorize bugs, columns reorganizing
+grouped by test (hierarchical), overviews, result for last day.</p>
+</li>
+</ul>
+</blockquote>
+</div>
+<div class="section" id="test-manager-configuration">
+<h1>Test Manager: Configuration</h1>
+<div class="section" id="testboxes">
+<h2>Testboxes</h2>
+<p>Configuration of testboxes doesn't involve much work normally. A testbox
+is added manually to the test manager by entering the DNS entry and/or IP
+address (the test manager resolves the missing one when necessary) as well as
+the system UUID (when obtainable - should be displayed by the testbox script
+installer). Queries from unregistered testboxes will be declined as a kind of
+security measure, the incident should be logged in the webserver log if
+possible. In later dealings with the client the System UUID will be the key
+identifier. It's permittable for the IP address to change when the testbox
+isn't online, but not while testing (just imagine live migration tests and
+network tests). Ideally, the testboxes should not change IP address.</p>
+<p>The testbox edit function must allow changing the name and system UUID.</p>
+<p>One further idea for the testbox configuration is indicating what they are
+capable of to filter out tests and test configurations that won't work on that
+testbox. To examplify this take the ACP2 installation test. If the test
+manager does not make sure the testbox have VT-x or AMD-v capabilities, the test
+is surely going to fail. Other testbox capabilities would be total number of
+CPU cores, memory size, scratch space. These testbox capabilities should be
+collected automatically on bootup by the testbox script together with OS name,
+OS version and OS bitness.</p>
+<p>A final thought, instead of outright declining all requests from new testboxes,
+we could record the unregistered testboxes with ip, UUID, name, os info and
+capabilities but mark them as inactive. The test operator can then activate
+them on an activation page or edit the testbox or something.</p>
+</div>
+<div class="section" id="testcases">
+<h2>Testcases</h2>
+<p>We use the term testcase for a test.</p>
+</div>
+<div class="section" id="testgroups">
+<h2>Testgroups</h2>
+<p>Testcases are organized into groups. A testcase can be member of more than one
+group. The testcase gets a priority assigned to it in connection with the
+group membership.</p>
+<p>Testgroups are picked up by a testbox partition (aka scheduling group) and a
+prioirty, scheduling time restriction and dependencies on other test groups are
+associated with the assignment. A testgroup can be used by several testbox
+partitions.</p>
+<p>(This used to be called 'testsuites' but was renamed to avoid confusion with
+the VBox Test Suite.)</p>
+</div>
+<div class="section" id="scheduling">
+<h2>Scheduling</h2>
+<p>The initial scheduler will be modelled after what we're doing already on in the
+tinderbox driven testing. It's best described as a best effort continuous
+integration scheduler. Meaning, it will always use the latest build suitable
+for a testcase. It will schedule on a testcase level, using the combined
+priority of the testcase in the test group and the test group with the testbox
+partition, trying to spread the test case argument varation out accordingly
+over the whole scheduilng queue. Which argument variation to start with, is
+not undefined (random would be best).</p>
+<p>Later, we may add other schedulers as needed.</p>
+</div>
+</div>
+<div class="section" id="the-test-manager-database">
+<h1>The Test Manager Database</h1>
+<p>First a general warning:</p>
+<blockquote>
+The guys working on this design are not database experts, web
+programming experts or similar, rather we are low level guys
+who's main job is x86 &amp; AMD64 virtualization. So, please don't
+be too hard on us. :-)</blockquote>
+<p>A logical table layout can be found in TestManagerDatabaseMap.png (created by
+Oracle SQL Data Modeler, stored in TestManagerDatabase.dmd). The physical
+database layout can be found in TestManagerDatabaseInit.pgsql postgreSQL
+script. The script is commented.</p>
+<div class="section" id="data-history">
+<h2>Data History</h2>
+<p>We need to somehow track configuration changes over time. We also need to
+be able to query the exact configuration a test set was run with so we can
+understand and make better use of the results.</p>
+<p>There are different techniques for archiving this, one is tuple-versioning
+( <a class="reference external" href="http://en.wikipedia.org/wiki/Tuple-versioning">http://en.wikipedia.org/wiki/Tuple-versioning</a> ), another is log trigger
+( <a class="reference external" href="http://en.wikipedia.org/wiki/Log_trigger">http://en.wikipedia.org/wiki/Log_trigger</a> ). We use tuple-versioning in
+this database, with 'effective' as start date field name and 'expire' as
+the end (exclusive).</p>
+<p>Tuple-versioning has a shortcomming wrt to keys, both primary and foreign.
+The primary key of a table employing tuple-versioning is really
+'id' + 'valid_period', where the latter is expressed using two fields
+([effective...expire-1]). Only, how do you tell the database engine that
+it should not allow overlapping valid_periods? Useful suggestions are
+welcomed. :-)</p>
+<p>Foreign key references to a table using tuple-versioning is running into
+trouble because of the time axsis and that to our knowledge foreign keys
+must reference exactly one row in the other table. When time is involved
+what we wish to tell the database is that at any given time, there actually
+is exactly one row we want to match in the other table, only we've no idea
+how to express this. So, many foreign keys are not expressed in SQL of this
+database.</p>
+<p>In some cases, we extend the tuple-versioning with a generation ID so that
+normal foreign key referencing can be used. We only use this for recording
+(references in testset) and scheduling (schedqueue), as using it more widely
+would force updates (gen_id changes) to propagate into all related tables.</p>
+<dl class="docutils">
+<dt>See also:</dt>
+<dd><ul class="first last simple">
+<li><a class="reference external" href="http://en.wikipedia.org/wiki/Slowly_changing_dimension">http://en.wikipedia.org/wiki/Slowly_changing_dimension</a></li>
+<li><a class="reference external" href="http://en.wikipedia.org/wiki/Change_data_capture">http://en.wikipedia.org/wiki/Change_data_capture</a></li>
+<li><a class="reference external" href="http://en.wikipedia.org/wiki/Temporal_database">http://en.wikipedia.org/wiki/Temporal_database</a></li>
+</ul>
+</dd>
+</dl>
+</div>
+</div>
+<div class="section" id="test-manager-execution">
+<h1>Test Manager: Execution</h1>
+</div>
+<div class="section" id="test-manager-scenarios">
+<h1>Test Manager: Scenarios</h1>
+<div class="section" id="testbox-signs-on-at-bootup">
+<h2>#1 - Testbox Signs On (At Bootup)</h2>
+<dl class="docutils">
+<dt>The testbox supplies a number of inputs when reporting for duty:</dt>
+<dd><ul class="first last simple">
+<li>IP address.</li>
+<li>System UUID.</li>
+<li>OS name.</li>
+<li>OS version.</li>
+<li>CPU architecture.</li>
+<li>CPU count (= threads).</li>
+<li>CPU VT-x/AMD-V capability.</li>
+<li>CPU nested paging capability.</li>
+<li>Chipset I/O MMU capability.</li>
+<li>Memory size.</li>
+<li>Scratch size space (for testing).</li>
+<li>Testbox Script revision.</li>
+</ul>
+</dd>
+<dt>Results:</dt>
+<dd><ul class="first last simple">
+<li>ACK or NACK.</li>
+<li>Testbox ID and name on ACK.</li>
+</ul>
+</dd>
+</dl>
+<p>After receiving a ACK the testbox will ask for work to do, i.e. continue with
+scenario #2. In the NACK case, it will sleep for 60 seconds and try again.</p>
+<p>Actions:</p>
+<ol class="arabic">
+<li><p class="first">Validate the testbox by looking the UUID up in the TestBoxes table.
+If not found, NACK the request. SQL:</p>
+<pre class="literal-block">
+SELECT idTestBox, sName
+FROM TestBoxes
+WHERE uuidSystem = :sUuid
+ AND tsExpire = 'infinity'::timestamp;
+</pre>
+</li>
+<li><p class="first">Check if any of the information by testbox script has changed. The two
+sizes are normalized first, memory size rounded to nearest 4 MB and scratch
+space is rounded down to nearest 64 MB. If anything changed, insert a new
+row in the testbox table and historize the current one, i.e. set
+OLD.tsExpire to NEW.tsEffective and get a new value for NEW.idGenTestBox.</p>
+</li>
+<li><dl class="first docutils">
+<dt>Check with TestBoxStatuses:</dt>
+<dd><ol class="first last loweralpha simple">
+<li>If there is an row for the testbox in it already clean up change it
+to 'idle' state and deal with any open testset like described in
+scenario #9.</li>
+<li>If there is no row, add one with 'idle' state.</li>
+</ol>
+</dd>
+</dl>
+</li>
+<li><p class="first">ACK the request and pass back the idTestBox.</p>
+</li>
+</ol>
+<dl class="docutils">
+<dt>Note! Testbox.enabled is not checked here, that is only relevant when it asks</dt>
+<dd>for a new task (scenario #2 and #5).</dd>
+<dt>Note! Should the testbox script detect changes in any of the inputs, it should</dt>
+<dd>redo the sign in.</dd>
+<dt>Note! In scenario #8, the box will not sign on until it has done the reboot and</dt>
+<dd>cleanup reporting!</dd>
+</dl>
+</div>
+<div class="section" id="testbox-asks-for-work-to-do">
+<h2>#2 - Testbox Asks For Work To Do</h2>
+<dl class="docutils">
+<dt>Inputs:</dt>
+<dd><ul class="first last simple">
+<li>The testbox is supplying its IP indirectly.</li>
+<li>The testbox should supply its UUID and ID directly.</li>
+</ul>
+</dd>
+<dt>Results:</dt>
+<dd><ul class="first last simple">
+<li>IDLE, WAIT, EXEC, REBOOT, UPGRADE, UPGRADE-AND-REBOOT, SPECIAL or DEAD.</li>
+</ul>
+</dd>
+</dl>
+<p>Actions:</p>
+<ol class="arabic">
+<li><p class="first">Validate the ID and IP by selecting the currently valid testbox row:</p>
+<pre class="literal-block">
+SELECT idGenTestBox, fEnabled, idSchedGroup, enmPendingCmd
+FROM TestBoxes
+WHERE id = :id
+ AND uuidSystem = :sUuid
+ AND ip = :ip
+ AND tsExpire = 'infinity'::timestamp;
+</pre>
+<p>If NOT found return DEAD to the testbox client (it will go back to sign on
+mode and retry every 60 seconds or so - see scenario #1).</p>
+<dl class="docutils">
+<dt>Note! The WUI will do all necessary clean-ups when deleting a testbox, so</dt>
+<dd><p class="first last">contrary to the initial plans, we don't need to do anything more for
+the DEAD status.</p>
+</dd>
+</dl>
+</li>
+<li><p class="first">Check with TestBoxStatuses (maybe joined with query from 1).</p>
+<p>If enmState is 'gang-gathering': Goto scenario #6 on timeout or pending
+'abort' or 'reboot' command. Otherwise, tell the testbox to WAIT [done].</p>
+<p>If enmState is 'gang-testing': The gang has been gathered and execution
+has been triggered. Goto 5.</p>
+<p>If enmState is not 'idle', change it to 'idle'.</p>
+<p>If idTestSet is not NULL, CALL scenario #9 to it up.</p>
+<p>If there is a pending abort command, remove it.</p>
+<p>If there is a pending command and the old state doesn't indicate that it was
+being executed, GOTO scenario #3.</p>
+<dl class="docutils">
+<dt>Note! There should be a TestBoxStatuses row after executing scenario #1,</dt>
+<dd><p class="first last">however should none be found for some funky reason, returning DEAD
+will fix the problem (see above)</p>
+</dd>
+</dl>
+</li>
+<li><p class="first">If the testbox was marked as disabled, respond with an IDLE command to the
+testbox [done]. (Note! Must do this after TestBoxStatuses maintainance from
+point 2, or abandoned tests won't be cleaned up after a testbox is disabled.)</p>
+</li>
+<li><p class="first">Consider testcases in the scheduling queue, pick the first one which the
+testbox can execute. There is a concurrency issue here, so we put and
+exclusive lock on the SchedQueues table while considering its content.</p>
+<p>The cursor we open looks something like this:</p>
+<pre class="literal-block">
+SELECT idItem, idGenTestCaseArgs,
+ idTestSetGangLeader, cMissingGangMembers
+FROM SchedQueues
+WHERE idSchedGroup = :idSchedGroup
+ AND ( bmHourlySchedule is NULL
+ OR get_bit(bmHourlySchedule, :iHourOfWeek) = 1 ) --&lt; does this work?
+ORDER BY ASC idItem;
+</pre>
+</li>
+</ol>
+<blockquote>
+<p>If there no rows are returned (this can happen because no testgroups are
+associated with this scheduling group, the scheduling group is disabled,
+or because the queue is being regenerated), we will tell the testbox to
+IDLE [done].</p>
+<dl class="docutils">
+<dt>For each returned row we will:</dt>
+<dd><ol class="first last loweralpha">
+<li><p class="first">Check testcase/group dependencies.</p>
+</li>
+<li><p class="first">Select a build (and default testsuite) satisfying the dependencies.</p>
+</li>
+<li><p class="first">Check the testcase requirements with that build in mind.</p>
+</li>
+<li><p class="first">If idTestSetGangLeader is NULL, try allocate the necessary resources.</p>
+</li>
+<li><p class="first">If it didn't check out, fetch the next row and redo from (a).</p>
+</li>
+<li><p class="first">Tentatively create a new test set row.</p>
+</li>
+<li><dl class="first docutils">
+<dt>If not gang scheduling:</dt>
+<dd><ul class="first last simple">
+<li>Next state: 'testing'</li>
+</ul>
+</dd>
+<dt>ElIf we're the last gang participant:</dt>
+<dd><ul class="first last simple">
+<li>Set idTestSetGangLeader to NULL.</li>
+<li>Set cMissingGangMembers to 0.</li>
+<li>Next state: 'gang-testing'</li>
+</ul>
+</dd>
+<dt>ElIf we're the first gang member:</dt>
+<dd><ul class="first last simple">
+<li>Set cMissingGangMembers to TestCaseArgs.cGangMembers - 1.</li>
+<li>Set idTestSetGangLeader to our idTestSet.</li>
+<li>Next state: 'gang-gathering'</li>
+</ul>
+</dd>
+<dt>Else:</dt>
+<dd><ul class="first last simple">
+<li>Decrement cMissingGangMembers.</li>
+<li>Next state: 'gang-gathering'</li>
+</ul>
+</dd>
+<dt>If we're not gang scheduling OR cMissingGangMembers is 0:</dt>
+<dd><p class="first last">Move the scheduler queue entry to the end of the queue.</p>
+</dd>
+</dl>
+<p>Update our TestBoxStatuses row with the new state and test set.
+COMMIT;</p>
+</li>
+</ol>
+</dd>
+</dl>
+</blockquote>
+<ol class="arabic" start="5">
+<li><dl class="first docutils">
+<dt>If state is 'testing' or 'gang-testing':</dt>
+<dd><p class="first">EXEC reponse.</p>
+<p class="last">The EXEC response for a gang scheduled testcase includes a number of
+extra arguments so that the script knows the position of the testbox
+it is running on and of the other members. This means the that the
+TestSet.iGangMemberNo is passed using --gang-member-no and the IP
+addresses of the all gang members using --gang-ipv4-&lt;memb-no&gt; &lt;ip&gt;.</p>
+</dd>
+<dt>Else (state is 'gang-gathering'):</dt>
+<dd><p class="first last">WAIT</p>
+</dd>
+</dl>
+</li>
+</ol>
+</div>
+<div class="section" id="pending-command-when-testbox-asks-for-work">
+<h2>#3 - Pending Command When Testbox Asks For Work</h2>
+<p>This is a subfunction of scenario #2 and #5.</p>
+<p>As seen in scenario #2, the testbox will send 'abort' commands to /dev/null
+when it finds one when not executing a test. This includes when it reports
+that the test has completed (no need to abort a completed test, wasting lot
+of effort when standing at the finish line).</p>
+<p>The other commands, though, are passed back to the testbox. The testbox
+script will respond with an ACK or NACK as it sees fit. If NACKed, the
+pending command will be removed (pending_cmd set to none) and that's it.
+If ACKed, the state of the testbox will change to that appropriate for the
+command and the pending_cmd set to none. Should the testbox script fail to
+respond, the command will be repeated the next time it asks for work.</p>
+</div>
+<div class="section" id="testbox-uploads-results-during-test">
+<h2>#4 - Testbox Uploads Results During Test</h2>
+<p>TODO</p>
+</div>
+<div class="section" id="testbox-completes-test-and-asks-for-work">
+<h2>#5 - Testbox Completes Test and Asks For Work</h2>
+<p>This is very similar to scenario #2</p>
+<p>TODO</p>
+</div>
+<div class="section" id="gang-gathering-timeout">
+<h2>#6 - Gang Gathering Timeout</h2>
+<p>This is a subfunction of scenario #2.</p>
+<p>When gathering a gang of testboxes for a testcase, we do not want to wait
+forever and have testboxes doing nothing for hours while waiting for partners.
+So, the gathering has a reasonable timeout (imagine something like 20-30 mins).</p>
+<p>Also, we need some way of dealing with 'abort' and 'reboot' commands being
+issued while waiting. The easy way out is pretent it's a time out.</p>
+<p>When changing the status to 'gang-timeout' we have to be careful. First of all,
+we need to exclusively lock the SchedQueues and TestBoxStatuses (in that order)
+and re-query our status. If it changed redo the checks in scenario #2 point 2.</p>
+<p>If we still want to timeout/abort, change the state from 'gang-gathering' to
+'gang-gathering-timedout' on all the gang members that has gathered so far.
+Then reset the scheduling queue record and move it to the end of the queue.</p>
+<p>When acting on 'gang-timeout' the TM will fail the testset in a manner similar
+to scenario #9. No need to repeat that.</p>
+</div>
+<div class="section" id="gang-cleanup">
+<h2>#7 - Gang Cleanup</h2>
+<p>When a testbox completes a gang scheduled test, we will have to serialize
+resource cleanup (both globally and on testboxes) as they stop. More details
+can be found in the documentation of 'gang-cleanup'.</p>
+<p>So, the transition from 'gang-testing' is always to 'gang-cleanup'. When we
+can safely leave 'gang-cleanup' is decided by the query:</p>
+<pre class="literal-block">
+SELECT COUNT(*)
+FROM TestBoxStatuses,
+ TestSets
+WHERE TestSets.idTestSetGangLeader = :idTestSetGangLeader
+ AND TestSets.idTestBox = TestBoxStatuses.idTestBox
+ AND TestBoxStatuses.enmState = 'gang-running'::TestBoxState_T;
+</pre>
+<p>As long as there are testboxes still running, we stay in the 'gang-cleanup'
+state. Once there are none, we continue closing the testset and such.</p>
+</div>
+<div class="section" id="testbox-reports-a-crash-during-test-execution">
+<h2>#8 - Testbox Reports A Crash During Test Execution</h2>
+<p>TODO</p>
+</div>
+<div class="section" id="cleaning-up-abandoned-testcase">
+<h2>#9 - Cleaning Up Abandoned Testcase</h2>
+<p>This is a subfunction of scenario #1 and #2. The actions taken are the same in
+both situations. The precondition for taking this path is that the row in the
+testboxstatus table is refering to a testset (i.e. testset_id is not NULL).</p>
+<p>Actions:</p>
+<ol class="arabic">
+<li><dl class="first docutils">
+<dt>If the testset is incomplete, we need to completed:</dt>
+<dd><ol class="first last loweralpha simple">
+<li>Add a message to the root TestResults row, creating one if necesary,
+that explains that the test was abandoned. This is done
+by inserting/finding the string into/in TestResultStrTab and adding
+a row to TestResultMsgs with idStrMsg set to that string id and
+enmLevel set to 'failure'.</li>
+<li>Mark the testset as failed.</li>
+</ol>
+</dd>
+</dl>
+</li>
+<li><p class="first">Free any global resources referenced by the test set. This is done by
+deleting all rows in GlobalResourceStatuses matching the testbox id.</p>
+</li>
+<li><p class="first">Set the idTestSet to NULL in the TestBoxStatuses row.</p>
+</li>
+</ol>
+</div>
+<div class="section" id="cleaning-up-a-disabled-dead-testbox">
+<h2>#10 - Cleaning Up a Disabled/Dead TestBox</h2>
+<p>The UI needs to be able to clean up the remains of a testbox which for some
+reason is out of action. Normal cleaning up of abandoned testcases requires
+that the testbox signs on or asks for work, but if the testbox is dead or
+in some way indisposed, it won't be doing any of that. So, the testbox
+sheriff needs to have a way of cleaning up after it.</p>
+<p>It's basically a manual scenario #9 but with some safe guards, like checking
+that the box hasn't been active for the last 1-2 mins (max idle/wait time * 2).</p>
+<dl class="docutils">
+<dt>Note! When disabling a box that still executing the testbox script, this</dt>
+<dd>cleanup isn't necessary as it will happen automatically. Also, it's
+probably desirable that the testbox finishes what ever it is doing first
+before going dormant.</dd>
+</dl>
+</div>
+</div>
+<div class="section" id="test-manager-analysis">
+<h1>Test Manager: Analysis</h1>
+<p>One of the testbox sheriff's tasks is to try figure out the reason why something
+failed. The test manager will provide facilities for doing so from very early
+in it's implementation.</p>
+<p>We need to work out some useful status reports for the early implementation.
+Later there will be more advanced analysis tools, where for instance we can
+create graphs from selected test result values or test execution times.</p>
+</div>
+<div class="section" id="implementation-plan">
+<h1>Implementation Plan</h1>
+<p>This has changed for various reasons. The current plan is to implement the
+infrastructure (TM &amp; testbox script) first and do a small deployment with the
+2-5 test drivers in the Testsuite as basis. Once the bugs are worked out, we
+will convert the rest of the tests and start adding new ones.</p>
+<p>We just need to finally get this done, no point in doing it piecemeal by now!</p>
+<div class="section" id="test-manager-implementation-sub-tasks">
+<h2>Test Manager Implementation Sub-Tasks</h2>
+<p>The implementation of the test manager and adjusting/completing of the testbox
+script and the test drivers are tasks which can be done by more than one
+person. Splitting up the TM implementation into smaller tasks should allow
+parallel development of different tasks and get us working code sooner.</p>
+</div>
+<div class="section" id="milestone-1">
+<h2>Milestone #1</h2>
+<p>The goal is to getting the fundamental testmanager engine implemented, debugged
+and working. With the exception of testboxes, the configuration will be done
+via SQL inserts.</p>
+<p>Tasks in somewhat prioritized order:</p>
+<blockquote>
+<ul class="simple">
+<li>Kick off test manager. It will live in testmanager/. Salvage as much as
+possible from att/testserv. Create basic source and file layout.</li>
+<li>Adjust the testbox script, part one. There currently is a testbox script
+in att/testbox, this shall be moved up into testboxscript/. The script
+needs to be adjusted according to the specification layed down earlier
+in this document. Installers or installation scripts for all relevant
+host OSes are required. Left for part two is result reporting beyond the
+primary log. This task must be 100% feature complete, on all host OSes,
+there is no room for FIXME, XXX or &#64;todo here.</li>
+<li>Implement the schedule queue generator.</li>
+<li>Implement the testbox dispatcher in TM. Support all the testbox script
+responses implemented above, including upgrading the testbox script.</li>
+<li>Implement simple testbox management page.</li>
+<li>Implement some basic activity and result reports so that we can see
+what's going on.</li>
+<li>Create a testmanager / testbox test setup. This lives in selftest/.<ol class="arabic">
+<li>Set up something that runs, no fiddly bits. Debug till it works.</li>
+<li>Create a setup that tests testgroup dependencies, i.e. real tests
+depending on smoke tests.</li>
+<li>Create a setup that exercises testcase dependency.</li>
+<li>Create a setup that exercises global resource allocation.</li>
+<li>Create a setup that exercises gang scheduling.</li>
+</ol>
+</li>
+<li>Check that all features work.</li>
+</ul>
+</blockquote>
+</div>
+<div class="section" id="milestone-2">
+<h2>Milestone #2</h2>
+<p>The goal is getting to VBox testing.</p>
+<p>Tasks in somewhat prioritized order:</p>
+<blockquote>
+<ul class="simple">
+<li>Implement full result reporting in the testbox script and testbox driver.
+A testbox script specific reporter needs to be implemented for the
+testdriver framework. The testbox script needs to forward the results to
+the test manager, or alternatively the testdriver report can talk
+directly to the TM.</li>
+<li>Implement the test manager side of the test result reporting.</li>
+<li>Extend the selftest with some setup that report all kinds of test
+results.</li>
+<li>Implement script/whatever feeding builds to the test manager from the
+tinderboxes.</li>
+<li>The toplevel test driver is a VBox thing that must be derived from the
+base TestDriver class or maybe the VBox one. It should move from
+toptestdriver to testdriver and be renamed to vboxtltd or smth.</li>
+<li>Create a vbox testdriver that boots the t-xppro VM once and that's it.</li>
+<li>Create a selftest setup which tests booting t-xppro taking builds from
+the tinderbox.</li>
+</ul>
+</blockquote>
+</div>
+<div class="section" id="milestone-3">
+<h2>Milestone #3</h2>
+<p>The goal for this milestone is configuration and converting current testscases,
+the result will be the a minimal test deployment (4-5 new testboxes).</p>
+<p>Tasks in somewhat prioritized order:</p>
+<blockquote>
+<ul class="simple">
+<li>Implement testcase configuration.</li>
+<li>Implement testgroup configuration.</li>
+<li>Implement build source configuration.</li>
+<li>Implement scheduling group configuration.</li>
+<li>Implement global resource configuration.</li>
+<li>Re-visit the testbox configuration.</li>
+<li>Black listing of builds.</li>
+<li>Implement simple failure analysis and reporting.</li>
+<li>Implement the initial smoke tests modelled on the current smoke tests.</li>
+<li>Implement installation tests for Windows guests.</li>
+<li>Implement installation tests for Linux guests.</li>
+<li>Implement installation tests for Solaris guest.</li>
+<li>Implement installation tests for OS/2 guest.</li>
+<li>Set up a small test deployment.</li>
+</ul>
+</blockquote>
+</div>
+<div class="section" id="further-work">
+<h2>Further work</h2>
+<p>After milestone #3 has been reached and issues found by the other team members
+have been addressed, we will probably go for full deployment.</p>
+<p>Beyond this point we will need to improve reporting and analysis. There may be
+configuration aspects needing reporting as well.</p>
+<p>Once deployed, a golden rule will be that all new features shall have test
+coverage. Preferrably, implemented by someone else and prior to the feature
+implementation.</p>
+</div>
+</div>
+<div class="section" id="discussion-logs">
+<h1>Discussion Logs</h1>
+<div class="section" id="various-discussions-with-michal-and-or-klaus">
+<h2>2009-07-21,22,23 Various Discussions with Michal and/or Klaus</h2>
+<ul class="simple">
+<li>Scheduling of tests requiring more than one testbox.</li>
+<li>Scheduling of tests that cannot be executing concurrently on several machines
+because of some global resource like an iSCSI target.</li>
+<li>Manually create the test config permutations instead of having the test
+manager create all possible ones and wasting time.</li>
+<li>Distinguish between built types so we can run smoke tests on strick builds as
+well as release ones.</li>
+</ul>
+</div>
+<div class="section" id="brief-discussion-with-michal">
+<h2>2009-07-20 Brief Discussion with Michal</h2>
+<ul class="simple">
+<li>Installer for the testbox script to make bringing up a new testbox even
+smoother.</li>
+</ul>
+</div>
+<div class="section" id="raw-input">
+<h2>2009-07-16 Raw Input</h2>
+<ul>
+<li><dl class="first docutils">
+<dt>test set. recursive collection of:</dt>
+<dd><ul class="first last simple">
+<li>hierachical subtest name (slash sep)</li>
+<li>test parameters / config</li>
+<li>bool fail/succ</li>
+<li>attributes (typed?)</li>
+<li>test time</li>
+<li>e.g. throughput</li>
+<li>subresults</li>
+<li>log</li>
+<li>screenshots,....</li>
+</ul>
+</dd>
+</dl>
+</li>
+<li><p class="first">client package (zip) dl from server (maybe client caching)</p>
+</li>
+<li><dl class="first docutils">
+<dt>thoughts on bits to do at once.</dt>
+<dd><ul class="first last simple">
+<li>We <em>really</em> need the basic bits ASAP.</li>
+<li>client -&gt; support for test driver</li>
+<li>server -&gt; controls configs</li>
+<li>cleanup on both sides</li>
+</ul>
+</dd>
+</dl>
+</li>
+</ul>
+</div>
+<div class="section" id="id1">
+<h2>2009-07-15 Raw Input</h2>
+<ul class="simple">
+<li>testing should start automatically</li>
+<li>switching to branch too tedious</li>
+<li>useful to be able to partition testboxes (run specific builds on some boxes, let an engineer have a few boxes for a while).</li>
+<li>test specification needs to be more flexible (select tests, disable test, test scheduling (run certain tests nightly), ... )</li>
+<li>testcase dependencies (blacklisting builds, run smoketests on box A before long tests on box B, ...)</li>
+<li>more testing flexibility, more test than just install/moke. For instance unit tests, benchmarks, ...</li>
+<li>presentation/analysis: graphs!, categorize bugs, columns reorganizing grouped by test (hierarchical), overviews, result for last day.</li>
+<li>testcase specificion, variables (e.g. I/O-APIC, SMP, HWVIRT, SATA...) as sub-tests</li>
+<li>interation with ILOM/...: reset systems</li>
+<li>Changes needs LDAP authentication</li>
+<li>historize all configuration w/ name</li>
+<li>ability to run testcase locally (provided the VDI/ISO/whatever extra requirements can be met).</li>
+</ul>
+<hr class="docutils" />
+<table class="docutils footnote" frame="void" id="id2" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label">[1]</td><td>no such footnote</td></tr>
+</tbody>
+</table>
+<hr class="docutils" />
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name" />
+<col class="field-body" />
+<tbody valign="top">
+<tr class="field"><th class="field-name">Status:</th><td class="field-body">$Id: AutomaticTestingRevamp.html $</td>
+</tr>
+<tr class="field"><th class="field-name">Copyright:</th><td class="field-body">Copyright (C) 2010-2017 Oracle Corporation.</td>
+</tr>
+</tbody>
+</table>
+</div>
+</div>
+</div>
+</body>
+</html>