Index: utils/testrunner/testrunner.dart |
=================================================================== |
--- utils/testrunner/testrunner.dart (revision 0) |
+++ utils/testrunner/testrunner.dart (revision 0) |
@@ -0,0 +1,300 @@ |
+//#!/usr/bin/env dart |
+// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
+// for details. All rights reserved. Use of this source code is governed by a |
+// BSD-style license that can be found in the LICENSE file. |
+ |
+/** |
+ * testrunner is a program to run Dart unit tests. Unlike ~/tools/test.dart, |
+ * this program is intended for 3rd parties to be able to run unit tests in |
+ * a batched fashion. As such, it adds some features and removes others. Some |
+ * of the removed features are: |
+ * |
+ * - No support for test.status files. The assumption is that tests are |
+ * expected to pass. |
+ * - A restricted set of runtimes. The assumption here is that the Dart |
+ * libraries deal with platform dependencies, and so the primary |
+ * SKUs that a user of this app would be concerned with would be |
+ * Dart-native versus compiled, and client (browser) vs server. To |
+ * support these, four runtimes are allowed: 'drt-dart' and 'drt-js' (for |
+ * client native and client-compiled, respectively), and 'vm' and 'd8' |
+ * (for server-side native and compiled, respectively). |
+ * - No sharding of test processes. |
+ * |
+ * On the other hand, a number of features have been added: |
+ * |
+ * - The ability to filter tests by group or name. |
+ * - The ability to run tests in isolates. |
+ * - The ability to customize the format of the test result messages. |
+ * - The ability to list the tests available. |
+ * |
+ * By default, testrunner will run all tests in the current directory. |
+ * With a -R option, it will recurse into subdirectories. |
+ * Directories can also be specified on the command line; if |
+ * any are specified they will override the use of the current directory. |
+ * All files that match the --test-file-pat will be included; by default |
+ * this is files with names that end in _test.dart. |
+ * |
+ * Options can be specified on the command line, via a configuration |
+ * file (--config) or via a test.config file in the test directory, |
+ * in decreasing order of priority. |
+ * |
+ * TODO(gram) - Layout tests. The plan here will be to look for a file |
+ * with a .layout extension that corresponds to the .dart file, that contains |
+ * multiple layouts, one for each test. Each test will be run in its own |
+ * instance of DRT and and the result compared with the expected layout. |
+ * |
+ */ |
+#library('testrunner'); |
+#import('dart:io'); |
+#import('dart:isolate'); |
+#import('dart:math'); |
+#import('../../pkg/args/args.dart'); |
+ |
+#source('dart_wrap_task.dart'); |
+#source('delete_task.dart'); |
+#source('html_wrap_task.dart'); |
+#source('meta.dart'); |
+#source('options.dart'); |
+#source('pipeline_runner.dart'); |
+#source('pipeline_task.dart'); |
+#source('run_process_task.dart'); |
+#source('utils.dart'); |
+ |
+// The set of [PipelineRunner]s to execute. |
+List tasks; |
+ArgResults configuration; |
+// The maximum number of pipelines that can run concurrently. |
+int maxTasks; |
+// The number of pipelines currently running. |
+int numTasks; |
+// The index of the next pipeline runner to execute. |
+int nextTask; |
+// Whether to capture all output from a test or just the result. |
+bool verbose; |
+// The timeout to use on running processes. |
+int timeout; |
+// The stream to use for high-value messages, like test results. |
+OutputStream outStream; |
+// The stream to use for low-value messages, like verbose output. |
+OutputStream logStream; |
+ |
+// The user can specify output streams on the command line, using 'none', |
+// 'stdout', 'stderr', or a file path; [getStream] will take such a name |
+// and return an appropriate [OutputStream]. |
+OutputStream getStream(String name) { |
+ if (name == 'none') { |
+ return null; |
+ } |
+ if (name == 'stdout') { |
+ return stdout; |
+ } |
+ if (name == 'stderr') { |
+ return stderr; |
+ } |
+ return new File(name).openOutputStream(FileMode.WRITE); |
+} |
+ |
+/** |
+ * Generate a templated list of commands that should be executed for each test |
+ * file. Each command is an instance of a [PipelineTask]. |
+ * The commands can make use of a number of metatokens that will be |
+ * expanded before execution (see the [Meta] class for details). |
+ */ |
+List getPipelineTemplate(String runtime, bool checkedMode, bool keepTests) { |
+ var pipeline = new List(); |
+ var pathSep = Platform.pathSeparator; |
+ var tempDirPath = configuration['tempdir']; |
+ Directory tempDir = new Directory(tempDirPath); |
+ |
+ if (!tempDir.existsSync()) { |
+ tempDir.createSync(); |
+ } |
+ var mustCleanupJavascript = false; |
+ var mustCleanupHtml = false; |
+ |
+ // Templates for the generated files that are used to run the wrapped test. |
+ var tempDartFile = '$tempDirPath$pathSep${Meta.filenameNoExtension}.dart'; |
+ var tempJsFile = '$tempDirPath$pathSep${Meta.filenameNoExtension}.js'; |
+ var tempHTMLFile = '$tempDirPath$pathSep${Meta.filenameNoExtension}.html'; |
+ var tempCSSFile = '$tempDirPath$pathSep${Meta.filenameNoExtension}.css'; |
+ |
+ // Add step for wrapping in Dart scaffold. |
+ pipeline.add(new DartWrapTask(Meta.fullFilePath, tempDartFile)); |
+ |
+ // Add the compiler step, unless we are running native Dart. |
+ if (runtime != 'vm' && runtime != 'drt-dart') { |
+ mustCleanupJavascript = true; |
+ if (checkedMode) { |
+ pipeline.add(new RunProcessTask(configuration['dart2js'], |
+ [ '--enable_checked_mode', '--out=$tempJsFile', |
+ tempDartFile ], timeout)); |
+ } else { |
+ pipeline.add(new RunProcessTask(configuration['dart2js'], |
+ [ '--out=$tempJsFile', tempDartFile ], timeout)); |
+ } |
+ } |
+ |
+ // Add step for wrapping in HTML, if we are running in DRT. |
+ if (runtime == 'drt-dart' || runtime == 'drt-js') { |
+ mustCleanupHtml = true; |
+ // The user can have pre-existing HTML and CSS files for the test in the |
+ // same directory and using the same name. The paths to these are matched |
+ // by these two templates. |
+ var HTMLFile = '${Meta.directory}$pathSep${Meta.filenameNoExtension}.html'; |
+ var CSSFile = '${Meta.directory}$pathSep${Meta.filenameNoExtension}.css'; |
+ pipeline.add(new HtmlWrapTask(Meta.fullFilePath, |
+ HTMLFile, tempHTMLFile, CSSFile, tempCSSFile)); |
+ } |
+ |
+ // Add the execution step. |
+ if (runtime == 'vm') { |
+ if (checkedMode) { |
+ pipeline.add(new RunProcessTask(configuration['dart'], |
+ [ '--enable_asserts', '--enable_typechecks', tempDartFile ], |
+ timeout)); |
+ } else { |
+ pipeline.add(new RunProcessTask(configuration['dart'], |
+ [ tempDartFile ], timeout)); |
+ } |
+ } else if (runtime == 'drt-dart' || runtime == 'drt-js') { |
+ pipeline.add(new RunProcessTask(configuration['drt'], |
+ [ '--no-timeout', tempHTMLFile ], timeout)); |
+ } else if (runtime == 'd8') { |
+ pipeline.add(new RunProcessTask(configuration['d8'], |
+ [ tempJsFile ], timeout)); |
+ } |
+ |
+ // Add the cleanup steps. |
+ if (!keepTests) { |
+ pipeline.add(new DeleteTask(tempDartFile)); |
+ |
+ if (mustCleanupJavascript) { |
+ pipeline.add(new DeleteTask(tempJsFile)); |
+ } |
+ |
+ if (mustCleanupHtml) { |
+ pipeline.add(new DeleteTask(tempHTMLFile)); |
+ pipeline.add(new DeleteTask(tempCSSFile)); |
+ } |
+ } |
+ |
+ return pipeline; |
+} |
+ |
+// Once we have enumerated all the test files, we call [processTests] to |
+// handle the next step - either listing the files or creating and executing |
+// pipelines for the files. |
+void processTests(List pipelineTemplate, List testFiles) { |
+ outStream = getStream(configuration['out']); |
+ logStream = getStream(configuration['log']); |
+ if (configuration['list-files']) { |
+ if (outStream != null) { |
+ for (var i = 0; i < testFiles.length; i++) { |
+ outStream.writeString(testFiles[i]); |
+ outStream.writeString('\n'); |
+ } |
+ } |
+ } else { |
+ // Create execution pipelines for each test file from the pipeline |
+ // template and the concrete test file path, and then kick |
+ // off execution of the first batch. |
+ tasks = new List(); |
+ for (var i = 0; i < testFiles.length; i++) { |
+ tasks.add(new PipelineRunner(pipelineTemplate, testFiles[i], verbose, |
+ completeHandler)); |
+ } |
+ |
+ maxTasks = min(parseInt(configuration['tasks']), testFiles.length); |
+ numTasks = 0; |
+ nextTask = 0; |
+ spawnTasks(); |
+ } |
+} |
+ |
+// Execute as many tasks as we can up to the maxTasks limit. |
+void spawnTasks() { |
+ while (numTasks < maxTasks && nextTask < tasks.length) { |
+ ++numTasks; |
+ tasks[nextTask++].execute(); |
+ } |
+} |
+ |
+// Handle the completion of a task. Kick off more tasks if we |
+// have them. |
+void completeHandler(String testFile, |
+ int exitCode, |
+ List _stdout, |
+ List _stderr) { |
+ writelog(_stdout, outStream, logStream); |
+ writelog(_stderr, outStream, logStream); |
+ --numTasks; |
+ if (exitCode == 0 || !configuration['stop-on-failure']) { |
+ spawnTasks(); |
+ } |
+ if (numTasks == 0) { |
+ // No outstanding tasks; we're all done. |
+ // We could later print a summary report here. |
+ } |
+} |
+ |
+// Our tests are configured so that critical messages have a '###' prefix. |
+// [writeLog] takes the output from a pipeline execution and writes it to |
+// our output streams. It will strip the '###' if necessary on critical |
+// messages; other messages will only be written if verbose output was |
+// specified. |
+void writelog(List messages, OutputStream out, OutputStream log) { |
+ for (var i = 0; i < messages.length; i++) { |
+ var msg = messages[i]; |
+ if (msg.startsWith('###')) { |
+ if (out != null) { |
+ out.writeString(msg.substring(3)); |
+ out.writeString('\n'); |
+ } |
+ } else if (verbose) { |
+ if (log != null) { |
+ log.writeString(msg); |
+ log.writeString('\n'); |
+ } |
+ } |
+ } |
+} |
+ |
+main() { |
+ var optionsParser = getOptionParser(); |
+ configuration = loadConfiguration(optionsParser); |
+ if (sane(configuration)) { |
+ if (configuration['list-options']) { |
+ printOptions(optionsParser, configuration, false, stdout); |
+ } else if (configuration['list-all-options']) { |
+ printOptions(optionsParser, configuration, true, stdout); |
+ } else { |
+ // Pull out some useful config stuff. |
+ timeout = parseInt(configuration['timeout']); |
+ verbose = configuration['log'] != 'none'; |
+ if (configuration['list-groups']) { |
+ verbose = false; |
+ } |
+ // Build the command templates needed for test compile and execute. |
+ var runtime = configuration['runtime']; |
+ var checkedMode = configuration['checked']; |
+ var keepTests = configuration['keep-generated-tests'] && |
+ !(configuration['list-groups'] || configuration['list-tests']); |
+ var pipelineTemplate = |
+ getPipelineTemplate(runtime, checkedMode, keepTests); |
+ |
+ if (pipelineTemplate != null) { |
+ // Build the list of tests and then execute them. |
+ List dirs = configuration.rest; |
+ bool recurse = configuration['recurse']; |
+ if (dirs.length == 0) { |
+ dirs.add('.'); // Use current working directory as default. |
+ } |
+ buildFileList(dirs, |
+ new RegExp(configuration['test-file-pat']), recurse, |
+ (f)=>processTests(pipelineTemplate, f)); |
+ } |
+ } |
+ } |
+} |
+ |
+ |
Property changes on: utils/testrunner/testrunner.dart |
___________________________________________________________________ |
Added: svn:executable |
+ * |