OLD | NEW |
(Empty) | |
| 1 //#!/usr/bin/env dart |
| 2 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 3 // for details. All rights reserved. Use of this source code is governed by a |
| 4 // BSD-style license that can be found in the LICENSE file. |
| 5 |
| 6 /** |
| 7 * testrunner is a program to run Dart unit tests. Unlike $DART/tools/test.dart, |
| 8 * this program is intended for 3rd parties to be able to run unit tests in |
| 9 * a batched fashion. As such, it adds some features and removes others. Some |
| 10 * of the removed features are: |
| 11 * |
| 12 * - No support for test.status files. The assumption is that tests are |
| 13 * expected to pass. |
| 14 * - A restricted set of runtimes. The assumption here is that the Dart |
| 15 * libraries deal with platform dependencies, and so the primary |
| 16 * SKUs that a user of this app would be concerned with would be |
| 17 * Dart-native versus compiled, and client (browser) vs server. To |
| 18 * support these, three runtimes are allowed: 'drt-dart' and 'drt-js' (for |
| 19 * client native and client-compiled, respectively), and 'vm' |
| 20 * (for server-side native). |
| 21 * - No sharding of test processes. |
| 22 * |
| 23 * On the other hand, a number of features have been added: |
| 24 * |
| 25 * - The ability to filter tests by group or name. |
| 26 * - The ability to run tests in isolates. |
| 27 * - The ability to customize the format of the test result messages. |
| 28 * - The ability to list the tests available. |
| 29 * |
| 30 * By default, testrunner will run all tests in the current directory. |
| 31 * With a -R option, it will recurse into subdirectories. |
| 32 * Directories can also be specified on the command line; if |
| 33 * any are specified they will override the use of the current directory. |
| 34 * All files that match the `--test-file-pattern` will be included; by default |
| 35 * this is files with names that end in _test.dart. |
| 36 * |
| 37 * Options can be specified on the command line, via a configuration |
| 38 * file (`--config`) or via a test.config file in the test directory, |
| 39 * in decreasing order of priority. |
| 40 * |
| 41 * The three runtimes are: |
| 42 * |
| 43 * vm - run native Dart in the VM; i.e. using $DARTSDK/dart-sdk/bin/dart. |
| 44 * drt-dart - run native Dart in DumpRenderTree, the headless version of |
| 45 * Dartium, which is located in $DARTSDK/chromium/DumpRenderTree. |
| 46 * drt-js - run Dart compiled to Javascript in DumpRenderTree. |
| 47 */ |
| 48 |
| 49 /* TODO(gram) - Layout tests. The plan here will be to look for a file |
| 50 * with a .layout extension that corresponds to the .dart file, that contains |
| 51 * multiple layouts, one for each test. Each test will be run in its own |
| 52 * instance of DRT and and the result compared with the expected layout. |
| 53 * |
| 54 * TODO(gram) - for TDD, add the ability to indicate that some test cases are |
| 55 * expected to fail temporarily. |
| 56 */ |
| 57 #library('testrunner'); |
| 58 #import('dart:io'); |
| 59 #import('dart:isolate'); |
| 60 #import('dart:math'); |
| 61 #import('../../pkg/args/args.dart'); |
| 62 |
| 63 #source('configuration.dart'); |
| 64 #source('dart_task.dart'); |
| 65 #source('dart_wrap_task.dart'); |
| 66 #source('dart2js_task.dart'); |
| 67 #source('delete_task.dart'); |
| 68 #source('drt_task.dart'); |
| 69 #source('html_wrap_task.dart'); |
| 70 #source('macros.dart'); |
| 71 #source('options.dart'); |
| 72 #source('pipeline_runner.dart'); |
| 73 #source('pipeline_task.dart'); |
| 74 #source('run_process_task.dart'); |
| 75 #source('utils.dart'); |
| 76 |
| 77 /** The set of [PipelineRunner]s to execute. */ |
| 78 List _tasks; |
| 79 |
| 80 /** The maximum number of pipelines that can run concurrently. */ |
| 81 int _maxTasks; |
| 82 |
| 83 /** The number of pipelines currently running. */ |
| 84 int _numTasks; |
| 85 |
| 86 /** The index of the next pipeline runner to execute. */ |
| 87 int _nextTask; |
| 88 |
| 89 /** The stream to use for high-value messages, like test results. */ |
| 90 OutputStream _outStream; |
| 91 |
| 92 /** The stream to use for low-value messages, like verbose output. */ |
| 93 OutputStream _logStream; |
| 94 |
| 95 /** The full set of options. */ |
| 96 Configuration config; |
| 97 |
| 98 /** |
| 99 * The user can specify output streams on the command line, using 'none', |
| 100 * 'stdout', 'stderr', or a file path; [getStream] will take such a name |
| 101 * and return an appropriate [OutputStream]. |
| 102 */ |
| 103 OutputStream getStream(String name) { |
| 104 if (name == 'none') { |
| 105 return null; |
| 106 } |
| 107 if (name == 'stdout') { |
| 108 return stdout; |
| 109 } |
| 110 if (name == 'stderr') { |
| 111 return stderr; |
| 112 } |
| 113 return new File(name).openOutputStream(FileMode.WRITE); |
| 114 } |
| 115 |
| 116 /** |
| 117 * Generate a templated list of commands that should be executed for each test |
| 118 * file. Each command is an instance of a [PipelineTask]. |
| 119 * The commands can make use of a number of metatokens that will be |
| 120 * expanded before execution (see the [Meta] class for details). |
| 121 */ |
| 122 List getPipelineTemplate(String runtime, bool checkedMode, bool keepTests) { |
| 123 var pipeline = new List(); |
| 124 var pathSep = Platform.pathSeparator; |
| 125 Directory tempDir = new Directory(config.tempDir); |
| 126 |
| 127 if (!tempDir.existsSync()) { |
| 128 tempDir.createSync(); |
| 129 } |
| 130 |
| 131 // Templates for the generated files that are used to run the wrapped test. |
| 132 var basePath = |
| 133 '${config.tempDir}$pathSep${Macros.flattenedDirectory}_' |
| 134 '${Macros.filenameNoExtension}'; |
| 135 var tempDartFile = '${basePath}.dart'; |
| 136 var tempJsFile = '${basePath}.js'; |
| 137 var tempHTMLFile = '${basePath}.html'; |
| 138 var tempCSSFile = '${basePath}.css'; |
| 139 |
| 140 // Add step for wrapping in Dart scaffold. |
| 141 pipeline.add(new DartWrapTask(Macros.fullFilePath, tempDartFile)); |
| 142 |
| 143 // Add the compiler step, unless we are running native Dart. |
| 144 if (runtime == 'drt-js') { |
| 145 if (checkedMode) { |
| 146 pipeline.add(new Dart2jsTask.checked(tempDartFile, tempJsFile)); |
| 147 } else { |
| 148 pipeline.add(new Dart2jsTask(tempDartFile, tempJsFile)); |
| 149 } |
| 150 } |
| 151 |
| 152 // Add step for wrapping in HTML, if we are running in DRT. |
| 153 if (runtime != 'vm') { |
| 154 // The user can have pre-existing HTML and CSS files for the test in the |
| 155 // same directory and using the same name. The paths to these are matched |
| 156 // by these two templates. |
| 157 var HTMLFile = |
| 158 '${Macros.directory}$pathSep${Macros.filenameNoExtension}.html'; |
| 159 var CSSFile = |
| 160 '${Macros.directory}$pathSep${Macros.filenameNoExtension}.css'; |
| 161 pipeline.add(new HtmlWrapTask(Macros.fullFilePath, |
| 162 HTMLFile, tempHTMLFile, CSSFile, tempCSSFile)); |
| 163 } |
| 164 |
| 165 // Add the execution step. |
| 166 if (runtime == 'vm') { |
| 167 if (checkedMode) { |
| 168 pipeline.add(new DartTask.checked(tempDartFile)); |
| 169 } else { |
| 170 pipeline.add(new DartTask(tempDartFile)); |
| 171 } |
| 172 } else { |
| 173 pipeline.add(new DrtTask(tempHTMLFile)); |
| 174 } |
| 175 return pipeline; |
| 176 } |
| 177 |
| 178 /** |
| 179 * Given a [List] of [testFiles], either print the list or create |
| 180 * and execute pipelines for the files. |
| 181 */ |
| 182 void processTests(List pipelineTemplate, List testFiles) { |
| 183 _outStream = getStream(config.outputStream); |
| 184 _logStream = getStream(config.logStream); |
| 185 if (config.listFiles) { |
| 186 if (_outStream != null) { |
| 187 for (var i = 0; i < testFiles.length; i++) { |
| 188 _outStream.writeString(testFiles[i]); |
| 189 _outStream.writeString('\n'); |
| 190 } |
| 191 } |
| 192 } else { |
| 193 // Create execution pipelines for each test file from the pipeline |
| 194 // template and the concrete test file path, and then kick |
| 195 // off execution of the first batch. |
| 196 _tasks = new List(); |
| 197 for (var i = 0; i < testFiles.length; i++) { |
| 198 _tasks.add(new PipelineRunner(pipelineTemplate, testFiles[i], |
| 199 config.verbose, completeHandler)); |
| 200 } |
| 201 |
| 202 _maxTasks = min(config.maxTasks, testFiles.length); |
| 203 _numTasks = 0; |
| 204 _nextTask = 0; |
| 205 spawnTasks(); |
| 206 } |
| 207 } |
| 208 |
| 209 /** Execute as many tasks as possible up to the maxTasks limit. */ |
| 210 void spawnTasks() { |
| 211 while (_numTasks < _maxTasks && _nextTask < _tasks.length) { |
| 212 ++_numTasks; |
| 213 _tasks[_nextTask++].execute(); |
| 214 } |
| 215 } |
| 216 |
| 217 /** |
| 218 * Handle the completion of a task. Kick off more tasks if we |
| 219 * have them. |
| 220 */ |
| 221 void completeHandler(String testFile, |
| 222 int exitCode, |
| 223 List _stdout, |
| 224 List _stderr) { |
| 225 writelog(_stdout, _outStream, _logStream); |
| 226 writelog(_stderr, _outStream, _logStream); |
| 227 --_numTasks; |
| 228 if (exitCode == 0 || !config.stopOnFailure) { |
| 229 spawnTasks(); |
| 230 } |
| 231 if (_numTasks == 0) { |
| 232 // No outstanding tasks; we're all done. |
| 233 // We could later print a summary report here. |
| 234 } |
| 235 } |
| 236 |
| 237 /** |
| 238 * Our tests are configured so that critical messages have a '###' prefix. |
| 239 * [writeLog] takes the output from a pipeline execution and writes it to |
| 240 * our output streams. It will strip the '###' if necessary on critical |
| 241 * messages; other messages will only be written if verbose output was |
| 242 * specified. |
| 243 */ |
| 244 void writelog(List messages, OutputStream out, OutputStream log) { |
| 245 for (var i = 0; i < messages.length; i++) { |
| 246 var msg = messages[i]; |
| 247 if (msg.startsWith('###')) { |
| 248 if (out != null) { |
| 249 out.writeString(msg.substring(3)); |
| 250 out.writeString('\n'); |
| 251 } |
| 252 } else if (config.verbose) { |
| 253 if (log != null) { |
| 254 log.writeString(msg); |
| 255 log.writeString('\n'); |
| 256 } |
| 257 } |
| 258 } |
| 259 } |
| 260 |
| 261 main() { |
| 262 var optionsParser = getOptionParser(); |
| 263 var options = loadConfiguration(optionsParser); |
| 264 if (isSane(options)) { |
| 265 if (options['list-options']) { |
| 266 printOptions(optionsParser, options, false, stdout); |
| 267 } else if (options['list-all-options']) { |
| 268 printOptions(optionsParser, options, true, stdout); |
| 269 } else { |
| 270 config = new Configuration(optionsParser, options); |
| 271 // Build the command templates needed for test compile and execute. |
| 272 var pipelineTemplate = getPipelineTemplate(config.runtime, |
| 273 config.checkedMode, |
| 274 config.keepTests); |
| 275 if (pipelineTemplate != null) { |
| 276 // Build the list of tests and then execute them. |
| 277 List dirs = options.rest; |
| 278 if (dirs.length == 0) { |
| 279 dirs.add('.'); // Use current working directory as default. |
| 280 } |
| 281 buildFileList(dirs, |
| 282 new RegExp(options['test-file-pattern']), options['recurse'], |
| 283 (f) => processTests(pipelineTemplate, f)); |
| 284 } |
| 285 } |
| 286 } |
| 287 } |
| 288 |
| 289 |
OLD | NEW |