|
OLD | NEW |
---|---|
(Empty) | |
1 //#!/usr/bin/env dart | |
2 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | |
3 // for details. All rights reserved. Use of this source code is governed by a | |
4 // BSD-style license that can be found in the LICENSE file. | |
5 | |
6 /** | |
7 * testrunner is a program to run Dart unit tests. Unlike $DART/tools/test.dart, | |
8 * this program is intended for 3rd parties to be able to run unit tests in | |
9 * a batched fashion. As such, it adds some features and removes others. Some | |
10 * of the removed features are: | |
11 * | |
12 * - No support for test.status files. The assumption is that tests are | |
13 * expected to pass. | |
14 * - A restricted set of runtimes. The assumption here is that the Dart | |
15 * libraries deal with platform dependencies, and so the primary | |
16 * SKUs that a user of this app would be concerned with would be | |
17 * Dart-native versus compiled, and client (browser) vs server. To | |
18 * support these, three runtimes are allowed: 'drt-dart' and 'drt-js' (for | |
19 * client native and client-compiled, respectively), and 'vm' | |
20 * (for server-side native). | |
21 * - No sharding of test processes. | |
22 * | |
23 * On the other hand, a number of features have been added: | |
24 * | |
25 * - The ability to filter tests by group or name. | |
26 * - The ability to run tests in isolates. | |
27 * - The ability to customize the format of the test result messages. | |
28 * - The ability to list the tests available. | |
29 * | |
30 * By default, testrunner will run all tests in the current directory. | |
31 * With a -R option, it will recurse into subdirectories. | |
32 * Directories can also be specified on the command line; if | |
33 * any are specified they will override the use of the current directory. | |
34 * All files that match the `--test-file-pattern` will be included; by default | |
35 * this is files with names that end in _test.dart. | |
36 * | |
37 * Options can be specified on the command line, via a configuration | |
38 * file (`--config`) or via a test.config file in the test directory, | |
39 * in decreasing order of priority. | |
40 * | |
41 * The four runtimes are: | |
Siggi Cherem (dart-lang)
2012/08/29 20:47:35
four => 3
gram
2012/08/30 00:16:55
Done.
| |
42 * | |
43 * vm - run native Dart in the VM; i.e. using $DARTSDK/dart-sdk/bin/dart. | |
44 * drt-dart - run native Dart in DumpRenderTree, the headless version of | |
45 * Dartium, which is located in $DARTSDK/chromium/DumpRenderTree. | |
46 * drt-js - run Dart compiled to Javascript in DumpRenderTree. | |
47 */ | |
48 | |
49 /* TODO(gram) - Layout tests. The plan here will be to look for a file | |
50 * with a .layout extension that corresponds to the .dart file, that contains | |
51 * multiple layouts, one for each test. Each test will be run in its own | |
52 * instance of DRT and and the result compared with the expected layout. | |
53 * | |
54 * TODO(gram) - for TDD, add the ability to indicate that some test cases are | |
Siggi Cherem (dart-lang)
2012/08/29 20:47:35
Unless you want them in the documentation, move th
gram
2012/08/30 00:16:55
They are /*, not /**
| |
55 * expected to fail temporarily. | |
56 */ | |
57 #library('testrunner'); | |
58 #import('dart:io'); | |
59 #import('dart:isolate'); | |
60 #import('dart:math'); | |
61 #import('../../pkg/args/args.dart'); | |
62 | |
63 #source('configuration.dart'); | |
64 #source('dart_wrap_task.dart'); | |
65 #source('delete_task.dart'); | |
66 #source('html_wrap_task.dart'); | |
67 #source('macros.dart'); | |
68 #source('options.dart'); | |
69 #source('pipeline_runner.dart'); | |
70 #source('pipeline_task.dart'); | |
71 #source('run_process_task.dart'); | |
72 #source('utils.dart'); | |
73 | |
74 /** The set of [PipelineRunner]s to execute. */ | |
75 List _tasks; | |
76 | |
77 /** The maximum number of pipelines that can run concurrently. */ | |
78 int _maxTasks; | |
79 | |
80 /** The number of pipelines currently running. */ | |
81 int _numTasks; | |
82 | |
83 /** The index of the next pipeline runner to execute. */ | |
84 int _nextTask; | |
85 | |
86 /** The stream to use for high-value messages, like test results. */ | |
87 OutputStream _outStream; | |
88 | |
89 /** The stream to use for low-value messages, like verbose output. */ | |
90 OutputStream _logStream; | |
91 | |
92 /** The full set of options. */ | |
93 Configuration config; | |
94 | |
95 /** | |
96 * The user can specify output streams on the command line, using 'none', | |
97 * 'stdout', 'stderr', or a file path; [getStream] will take such a name | |
98 * and return an appropriate [OutputStream]. | |
99 */ | |
100 OutputStream getStream(String name) { | |
101 if (name == 'none') { | |
102 return null; | |
103 } | |
104 if (name == 'stdout') { | |
105 return stdout; | |
106 } | |
107 if (name == 'stderr') { | |
108 return stderr; | |
109 } | |
110 return new File(name).openOutputStream(FileMode.WRITE); | |
111 } | |
112 | |
113 /** | |
114 * Generate a templated list of commands that should be executed for each test | |
115 * file. Each command is an instance of a [PipelineTask]. | |
116 * The commands can make use of a number of metatokens that will be | |
117 * expanded before execution (see the [Meta] class for details). | |
118 */ | |
119 List getPipelineTemplate(String runtime, bool checkedMode, bool keepTests) { | |
120 var pipeline = new List(); | |
121 var cleanup = new List(); | |
122 var pathSep = Platform.pathSeparator; | |
123 Directory tempDir = new Directory(config.tempDir); | |
124 | |
125 if (!tempDir.existsSync()) { | |
126 tempDir.createSync(); | |
127 } | |
128 | |
129 // Templates for the generated files that are used to run the wrapped test. | |
130 var basePath = | |
131 '${config.tempDir}$pathSep${Macros.flattenedDirectory}_' | |
132 '${Macros.filenameNoExtension}'; | |
133 var tempDartFile = '${basePath}.dart'; | |
134 var tempJsFile = '${basePath}.js'; | |
135 var tempHTMLFile = '${basePath}.html'; | |
136 var tempCSSFile = '${basePath}.css'; | |
137 | |
138 // Add step for wrapping in Dart scaffold. | |
139 pipeline.add(new DartWrapTask(Macros.fullFilePath, tempDartFile)); | |
140 cleanup.add(new DeleteTask(tempDartFile)); | |
141 | |
142 // Add the compiler step, unless we are running native Dart. | |
143 if (runtime == 'drt-js') { | |
144 if (checkedMode) { | |
145 pipeline.add(new RunProcessTask(config.dart2jsPath, | |
146 [ '--enable_checked_mode', '--out=$tempJsFile', | |
147 tempDartFile ], config.timeout)); | |
148 } else { | |
149 pipeline.add(new RunProcessTask(config.dart2jsPath, | |
150 [ '--out=$tempJsFile', tempDartFile ], config.timeout)); | |
151 } | |
152 cleanup.add(new DeleteTask(tempJsFile)); | |
153 } | |
154 | |
155 // Add step for wrapping in HTML, if we are running in DRT. | |
156 if (runtime != 'vm') { | |
157 // The user can have pre-existing HTML and CSS files for the test in the | |
158 // same directory and using the same name. The paths to these are matched | |
159 // by these two templates. | |
160 var HTMLFile = '${Macros.directory}$pathSep${Macros.filenameNoExtension}.htm l'; | |
Siggi Cherem (dart-lang)
2012/08/29 20:47:35
80 col...
gram
2012/08/30 00:16:55
Done.
| |
161 var CSSFile = '${Macros.directory}$pathSep${Macros.filenameNoExtension}.css' ; | |
162 pipeline.add(new HtmlWrapTask(Macros.fullFilePath, | |
163 HTMLFile, tempHTMLFile, CSSFile, tempCSSFile)); | |
164 cleanup.add(new DeleteTask(tempHTMLFile)); | |
165 cleanup.add(new DeleteTask(tempCSSFile)); | |
166 } | |
167 | |
168 // Add the execution step. | |
169 if (runtime == 'vm') { | |
170 if (checkedMode) { | |
171 pipeline.add(new RunProcessTask(config.dartPath, | |
172 [ '--enable_asserts', '--enable_typechecks', tempDartFile ], | |
173 config.timeout)); | |
174 } else { | |
175 pipeline.add(new RunProcessTask(config.dartPath, | |
176 [ tempDartFile ], config.timeout)); | |
177 } | |
178 } else { | |
179 pipeline.add(new RunProcessTask(config.drtPath, | |
180 [ '--no-timeout', tempHTMLFile ], config.timeout)); | |
181 } | |
182 | |
183 // Add the cleanup steps. | |
184 if (!keepTests) { | |
185 pipeline.addAll(cleanup); | |
186 } | |
187 | |
188 return pipeline; | |
189 } | |
190 | |
191 /** | |
192 * Given a [List] of [testFiles], either print the list or create | |
193 * and execute pipelines for the files. | |
194 */ | |
195 void processTests(List pipelineTemplate, List testFiles) { | |
196 _outStream = getStream(config.outputStream); | |
197 _logStream = getStream(config.logStream); | |
198 if (config.listFiles) { | |
199 if (_outStream != null) { | |
200 for (var i = 0; i < testFiles.length; i++) { | |
201 _outStream.writeString(testFiles[i]); | |
202 _outStream.writeString('\n'); | |
203 } | |
204 } | |
205 } else { | |
206 // Create execution pipelines for each test file from the pipeline | |
207 // template and the concrete test file path, and then kick | |
208 // off execution of the first batch. | |
209 _tasks = new List(); | |
210 for (var i = 0; i < testFiles.length; i++) { | |
211 _tasks.add(new PipelineRunner(pipelineTemplate, testFiles[i], | |
212 config.verbose, completeHandler)); | |
213 } | |
214 | |
215 _maxTasks = min(config.maxTasks, testFiles.length); | |
216 _numTasks = 0; | |
217 _nextTask = 0; | |
218 spawnTasks(); | |
219 } | |
220 } | |
221 | |
222 /** Execute as many tasks as possible up to the maxTasks limit. */ | |
223 void spawnTasks() { | |
224 while (_numTasks < _maxTasks && _nextTask < _tasks.length) { | |
225 ++_numTasks; | |
226 _tasks[_nextTask++].execute(); | |
227 } | |
228 } | |
229 | |
230 /** | |
231 * Handle the completion of a task. Kick off more tasks if we | |
232 * have them. | |
233 */ | |
234 void completeHandler(String testFile, | |
235 int exitCode, | |
236 List _stdout, | |
237 List _stderr) { | |
238 writelog(_stdout, _outStream, _logStream); | |
239 writelog(_stderr, _outStream, _logStream); | |
240 --_numTasks; | |
241 if (exitCode == 0 || !config.stopOnFailure) { | |
242 spawnTasks(); | |
243 } | |
244 if (_numTasks == 0) { | |
245 // No outstanding tasks; we're all done. | |
246 // We could later print a summary report here. | |
247 } | |
248 } | |
249 | |
250 /** | |
251 * Our tests are configured so that critical messages have a '###' prefix. | |
252 * [writeLog] takes the output from a pipeline execution and writes it to | |
253 * our output streams. It will strip the '###' if necessary on critical | |
254 * messages; other messages will only be written if verbose output was | |
255 * specified. | |
256 */ | |
257 void writelog(List messages, OutputStream out, OutputStream log) { | |
258 for (var i = 0; i < messages.length; i++) { | |
259 var msg = messages[i]; | |
260 if (msg.startsWith('###')) { | |
261 if (out != null) { | |
262 out.writeString(msg.substring(3)); | |
263 out.writeString('\n'); | |
264 } | |
265 } else if (config.verbose) { | |
266 if (log != null) { | |
267 log.writeString(msg); | |
268 log.writeString('\n'); | |
269 } | |
270 } | |
271 } | |
272 } | |
273 | |
274 main() { | |
275 var optionsParser = getOptionParser(); | |
276 var options = loadConfiguration(optionsParser); | |
277 if (isSane(options)) { | |
278 if (options['list-options']) { | |
279 printOptions(optionsParser, options, false, stdout); | |
280 } else if (options['list-all-options']) { | |
281 printOptions(optionsParser, options, true, stdout); | |
282 } else { | |
283 config = new Configuration(optionsParser, options); | |
284 // Build the command templates needed for test compile and execute. | |
285 var pipelineTemplate = getPipelineTemplate(config.runtime, | |
286 config.checkedMode, | |
287 config.keepTests); | |
288 if (pipelineTemplate != null) { | |
289 // Build the list of tests and then execute them. | |
290 List dirs = options.rest; | |
291 if (dirs.length == 0) { | |
292 dirs.add('.'); // Use current working directory as default. | |
293 } | |
294 buildFileList(dirs, | |
295 new RegExp(options['test-file-pattern']), options['recurse'], | |
296 (f) => processTests(pipelineTemplate, f)); | |
297 } | |
298 } | |
299 } | |
300 } | |
301 | |
302 | |
OLD | NEW |