OLD | NEW |
(Empty) | |
| 1 //#!/usr/bin/env dart |
| 2 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 3 // for details. All rights reserved. Use of this source code is governed by a |
| 4 // BSD-style license that can be found in the LICENSE file. |
| 5 |
| 6 /** |
| 7 * testrunner is a program to run Dart unit tests. Unlike $DART/tools/test.dart, |
| 8 * this program is intended for 3rd parties to be able to run unit tests in |
| 9 * a batched fashion. As such, it adds some features and removes others. Some |
| 10 * of the removed features are: |
| 11 * |
| 12 * - No support for test.status files. The assumption is that tests are |
| 13 * expected to pass. |
| 14 * - A restricted set of runtimes. The assumption here is that the Dart |
| 15 * libraries deal with platform dependencies, and so the primary |
| 16 * SKUs that a user of this app would be concerned with would be |
| 17 * Dart-native versus compiled, and client (browser) vs server. To |
| 18 * support these, three runtimes are allowed: 'drt-dart' and 'drt-js' (for |
| 19 * client native and client-compiled, respectively), and 'vm' |
| 20 * (for server-side native). |
| 21 * - No sharding of test processes. |
| 22 * |
| 23 * On the other hand, a number of features have been added: |
| 24 * |
| 25 * - The ability to filter tests by group or name. |
| 26 * - The ability to run tests in isolates. |
| 27 * - The ability to customize the format of the test result messages. |
| 28 * - The ability to list the tests available. |
| 29 * |
| 30 * By default, testrunner will run all tests in the current directory. |
| 31 * With a -R option, it will recurse into subdirectories. |
| 32 * Directories can also be specified on the command line; if |
| 33 * any are specified they will override the use of the current directory. |
| 34 * All files that match the `--test-file-pattern` will be included; by default |
| 35 * this is files with names that end in _test.dart. |
| 36 * |
| 37 * Options can be specified on the command line, via a configuration |
| 38 * file (`--config`) or via a test.config file in the test directory, |
| 39 * in decreasing order of priority. |
| 40 * |
| 41 * The three runtimes are: |
| 42 * |
| 43 * vm - run native Dart in the VM; i.e. using $DARTSDK/dart-sdk/bin/dart. |
| 44 * drt-dart - run native Dart in DumpRenderTree, the headless version of |
| 45 * Dartium, which is located in $DARTSDK/chromium/DumpRenderTree, if |
| 46 * you intsalled the SDK that is bundled with the editor, or available |
| 47 * from http://gsdview.appspot.com/dartium-archive/continuous/ |
| 48 * otherwise. |
| 49 * |
| 50 * drt-js - run Dart compiled to Javascript in DumpRenderTree. |
| 51 */ |
| 52 |
| 53 /* TODO(gram) - Layout tests. The plan here will be to look for a file |
| 54 * with a .layout extension that corresponds to the .dart file, that contains |
| 55 * multiple layouts, one for each test. Each test will be run in its own |
| 56 * instance of DRT and and the result compared with the expected layout. |
| 57 */ |
| 58 #library('testrunner'); |
| 59 #import('dart:io'); |
| 60 #import('dart:isolate'); |
| 61 #import('dart:math'); |
| 62 #import('../../pkg/args/args.dart'); |
| 63 |
| 64 #source('configuration.dart'); |
| 65 #source('dart_task.dart'); |
| 66 #source('dart_wrap_task.dart'); |
| 67 #source('dart2js_task.dart'); |
| 68 #source('delete_task.dart'); |
| 69 #source('drt_task.dart'); |
| 70 #source('html_wrap_task.dart'); |
| 71 #source('macros.dart'); |
| 72 #source('options.dart'); |
| 73 #source('pipeline_runner.dart'); |
| 74 #source('pipeline_task.dart'); |
| 75 #source('run_process_task.dart'); |
| 76 #source('utils.dart'); |
| 77 |
| 78 /** The set of [PipelineRunner]s to execute. */ |
| 79 List _tasks; |
| 80 |
| 81 /** The maximum number of pipelines that can run concurrently. */ |
| 82 int _maxTasks; |
| 83 |
| 84 /** The number of pipelines currently running. */ |
| 85 int _numTasks; |
| 86 |
| 87 /** The index of the next pipeline runner to execute. */ |
| 88 int _nextTask; |
| 89 |
| 90 /** The stream to use for high-value messages, like test results. */ |
| 91 OutputStream _outStream; |
| 92 |
| 93 /** The stream to use for low-value messages, like verbose output. */ |
| 94 OutputStream _logStream; |
| 95 |
| 96 /** The full set of options. */ |
| 97 Configuration config; |
| 98 |
| 99 /** |
| 100 * The user can specify output streams on the command line, using 'none', |
| 101 * 'stdout', 'stderr', or a file path; [getStream] will take such a name |
| 102 * and return an appropriate [OutputStream]. |
| 103 */ |
| 104 OutputStream getStream(String name) { |
| 105 if (name == 'none') { |
| 106 return null; |
| 107 } |
| 108 if (name == 'stdout') { |
| 109 return stdout; |
| 110 } |
| 111 if (name == 'stderr') { |
| 112 return stderr; |
| 113 } |
| 114 return new File(name).openOutputStream(FileMode.WRITE); |
| 115 } |
| 116 |
| 117 /** |
| 118 * Generate a templated list of commands that should be executed for each test |
| 119 * file. Each command is an instance of a [PipelineTask]. |
| 120 * The commands can make use of a number of metatokens that will be |
| 121 * expanded before execution (see the [Meta] class for details). |
| 122 */ |
| 123 List getPipelineTemplate(String runtime, bool checkedMode, bool keepTests) { |
| 124 var pipeline = new List(); |
| 125 var pathSep = Platform.pathSeparator; |
| 126 Directory tempDir = new Directory(config.tempDir); |
| 127 |
| 128 if (!tempDir.existsSync()) { |
| 129 tempDir.createSync(); |
| 130 } |
| 131 |
| 132 // Templates for the generated files that are used to run the wrapped test. |
| 133 var basePath = |
| 134 '${config.tempDir}$pathSep${Macros.flattenedDirectory}_' |
| 135 '${Macros.filenameNoExtension}'; |
| 136 var tempDartFile = '${basePath}.dart'; |
| 137 var tempJsFile = '${basePath}.js'; |
| 138 var tempHTMLFile = '${basePath}.html'; |
| 139 var tempCSSFile = '${basePath}.css'; |
| 140 |
| 141 // Add step for wrapping in Dart scaffold. |
| 142 pipeline.add(new DartWrapTask(Macros.fullFilePath, tempDartFile)); |
| 143 |
| 144 // Add the compiler step, unless we are running native Dart. |
| 145 if (runtime == 'drt-js') { |
| 146 if (checkedMode) { |
| 147 pipeline.add(new Dart2jsTask.checked(tempDartFile, tempJsFile)); |
| 148 } else { |
| 149 pipeline.add(new Dart2jsTask(tempDartFile, tempJsFile)); |
| 150 } |
| 151 } |
| 152 |
| 153 // Add step for wrapping in HTML, if we are running in DRT. |
| 154 if (runtime != 'vm') { |
| 155 // The user can have pre-existing HTML and CSS files for the test in the |
| 156 // same directory and using the same name. The paths to these are matched |
| 157 // by these two templates. |
| 158 var HTMLFile = |
| 159 '${Macros.directory}$pathSep${Macros.filenameNoExtension}.html'; |
| 160 var CSSFile = |
| 161 '${Macros.directory}$pathSep${Macros.filenameNoExtension}.css'; |
| 162 pipeline.add(new HtmlWrapTask(Macros.fullFilePath, |
| 163 HTMLFile, tempHTMLFile, CSSFile, tempCSSFile)); |
| 164 } |
| 165 |
| 166 // Add the execution step. |
| 167 if (runtime == 'vm') { |
| 168 if (checkedMode) { |
| 169 pipeline.add(new DartTask.checked(tempDartFile)); |
| 170 } else { |
| 171 pipeline.add(new DartTask(tempDartFile)); |
| 172 } |
| 173 } else { |
| 174 pipeline.add(new DrtTask(tempHTMLFile)); |
| 175 } |
| 176 return pipeline; |
| 177 } |
| 178 |
| 179 /** |
| 180 * Given a [List] of [testFiles], either print the list or create |
| 181 * and execute pipelines for the files. |
| 182 */ |
| 183 void processTests(List pipelineTemplate, List testFiles) { |
| 184 _outStream = getStream(config.outputStream); |
| 185 _logStream = getStream(config.logStream); |
| 186 if (config.listFiles) { |
| 187 if (_outStream != null) { |
| 188 for (var i = 0; i < testFiles.length; i++) { |
| 189 _outStream.writeString(testFiles[i]); |
| 190 _outStream.writeString('\n'); |
| 191 } |
| 192 } |
| 193 } else { |
| 194 // Create execution pipelines for each test file from the pipeline |
| 195 // template and the concrete test file path, and then kick |
| 196 // off execution of the first batch. |
| 197 _tasks = new List(); |
| 198 for (var i = 0; i < testFiles.length; i++) { |
| 199 _tasks.add(new PipelineRunner(pipelineTemplate, testFiles[i], |
| 200 config.verbose, completeHandler)); |
| 201 } |
| 202 |
| 203 _maxTasks = min(config.maxTasks, testFiles.length); |
| 204 _numTasks = 0; |
| 205 _nextTask = 0; |
| 206 spawnTasks(); |
| 207 } |
| 208 } |
| 209 |
| 210 /** Execute as many tasks as possible up to the maxTasks limit. */ |
| 211 void spawnTasks() { |
| 212 while (_numTasks < _maxTasks && _nextTask < _tasks.length) { |
| 213 ++_numTasks; |
| 214 _tasks[_nextTask++].execute(); |
| 215 } |
| 216 } |
| 217 |
| 218 /** |
| 219 * Handle the completion of a task. Kick off more tasks if we |
| 220 * have them. |
| 221 */ |
| 222 void completeHandler(String testFile, |
| 223 int exitCode, |
| 224 List _stdout, |
| 225 List _stderr) { |
| 226 writelog(_stdout, _outStream, _logStream); |
| 227 writelog(_stderr, _outStream, _logStream); |
| 228 --_numTasks; |
| 229 if (exitCode == 0 || !config.stopOnFailure) { |
| 230 spawnTasks(); |
| 231 } |
| 232 if (_numTasks == 0) { |
| 233 // No outstanding tasks; we're all done. |
| 234 // We could later print a summary report here. |
| 235 } |
| 236 } |
| 237 |
| 238 /** |
| 239 * Our tests are configured so that critical messages have a '###' prefix. |
| 240 * [writeLog] takes the output from a pipeline execution and writes it to |
| 241 * our output streams. It will strip the '###' if necessary on critical |
| 242 * messages; other messages will only be written if verbose output was |
| 243 * specified. |
| 244 */ |
| 245 void writelog(List messages, OutputStream out, OutputStream log) { |
| 246 for (var i = 0; i < messages.length; i++) { |
| 247 var msg = messages[i]; |
| 248 if (msg.startsWith('###')) { |
| 249 if (out != null) { |
| 250 out.writeString(msg.substring(3)); |
| 251 out.writeString('\n'); |
| 252 } |
| 253 } else if (config.verbose) { |
| 254 if (log != null) { |
| 255 log.writeString(msg); |
| 256 log.writeString('\n'); |
| 257 } |
| 258 } |
| 259 } |
| 260 } |
| 261 |
| 262 main() { |
| 263 var optionsParser = getOptionParser(); |
| 264 var options = loadConfiguration(optionsParser); |
| 265 if (isSane(options)) { |
| 266 if (options['list-options']) { |
| 267 printOptions(optionsParser, options, false, stdout); |
| 268 } else if (options['list-all-options']) { |
| 269 printOptions(optionsParser, options, true, stdout); |
| 270 } else { |
| 271 config = new Configuration(optionsParser, options); |
| 272 // Build the command templates needed for test compile and execute. |
| 273 var pipelineTemplate = getPipelineTemplate(config.runtime, |
| 274 config.checkedMode, |
| 275 config.keepTests); |
| 276 if (pipelineTemplate != null) { |
| 277 // Build the list of tests and then execute them. |
| 278 List dirs = options.rest; |
| 279 if (dirs.length == 0) { |
| 280 dirs.add('.'); // Use current working directory as default. |
| 281 } |
| 282 buildFileList(dirs, |
| 283 new RegExp(options['test-file-pattern']), options['recurse'], |
| 284 (f) => processTests(pipelineTemplate, f)); |
| 285 } |
| 286 } |
| 287 } |
| 288 } |
| 289 |
| 290 |
OLD | NEW |