OLD | NEW |
| (Empty) |
1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file | |
2 // for details. All rights reserved. Use of this source code is governed by a | |
3 // BSD-style license that can be found in the LICENSE file. | |
4 | |
5 // TODO(jimhug): Error recovery needs major work! | |
6 /** | |
7 * A simple recursive descent parser for the dart language. | |
8 * | |
9 * This parser is designed to be more permissive than the official | |
10 * Dart grammar. It is expected that many grammar errors would be | |
11 * reported by a later compiler phase. For example, a class is allowed | |
12 * to extend an arbitrary number of base classes - this can be | |
13 * very clearly detected and is reported in a later compiler phase. | |
14 */ | |
15 class Parser { | |
16 TokenSource tokenizer; | |
17 | |
18 final SourceFile source; | |
19 /** Enables diet parse, which skips function bodies. */ | |
20 final bool diet; | |
21 /** | |
22 * Throw an IncompleteSourceException if the parser encounters a premature end | |
23 * of file or an incomplete multiline string. | |
24 */ | |
25 final bool throwOnIncomplete; | |
26 | |
27 /** Allow semicolons to be omitted at the end of lines. */ | |
28 // TODO(nweiz): make this work for more than just end-of-file | |
29 final bool optionalSemicolons; | |
30 | |
31 /** | |
32 * Allow the await keyword, when the await transformation is available (see | |
33 * await/awaitc.dart). | |
34 */ | |
35 bool get enableAwait() => experimentalAwaitPhase != null; | |
36 | |
37 /** | |
38 * To resolve ambiguity in initializers between constructor body and lambda | |
39 * expression. | |
40 */ | |
41 bool _inhibitLambda = false; | |
42 | |
43 Token _previousToken; | |
44 Token _peekToken; | |
45 | |
46 // When we encounter '(' in a method body we need to find the ')' to know it | |
47 // we're parsing a lambda, paren-expr, or argument list. Closure formals are | |
48 // followed by '=>' or '{'. This list is used to cache the tokens after any | |
49 // nested parenthesis we find while peeking. | |
50 // TODO(jmesserly): it's simpler and faster to cache this on the Token itself, | |
51 // but that might add too much complexity for tools that need to invalidate. | |
52 List<Token> _afterParens; | |
53 int _afterParensIndex = 0; | |
54 | |
55 bool _recover = false; | |
56 | |
57 Parser(this.source, [this.diet = false, this.throwOnIncomplete = false, | |
58 this.optionalSemicolons = false, int startOffset = 0]) { | |
59 tokenizer = new Tokenizer(source, true, startOffset); | |
60 _peekToken = tokenizer.next(); | |
61 _afterParens = <Token>[]; | |
62 } | |
63 | |
64 /** Generate an error if [source] has not been completely consumed. */ | |
65 void checkEndOfFile() { | |
66 _eat(TokenKind.END_OF_FILE); | |
67 } | |
68 | |
69 /** Guard to break out of parser when an unexpected end of file is found. */ | |
70 bool isPrematureEndOfFile() { | |
71 if (throwOnIncomplete && _maybeEat(TokenKind.END_OF_FILE)) { | |
72 throw new IncompleteSourceException(_previousToken); | |
73 } else if (_maybeEat(TokenKind.END_OF_FILE)) { | |
74 _error('unexpected end of file', _peekToken.span); | |
75 return true; | |
76 } else { | |
77 return false; | |
78 } | |
79 } | |
80 | |
81 /** | |
82 * Recovers the parser after an error, by iterating until it finds one of | |
83 * the provide [TokenKind] values. | |
84 */ | |
85 bool _recoverTo(int kind1, [int kind2, int kind3]) { | |
86 assert(_recover); | |
87 while (!isPrematureEndOfFile()) { | |
88 int kind = _peek(); | |
89 if (kind == kind1 || kind == kind2 || kind == kind3) { | |
90 _recover = false; // Done recovering. Issue errors normally. | |
91 return true; | |
92 } | |
93 _next(); | |
94 } | |
95 // End of file without finding a match | |
96 return false; | |
97 } | |
98 | |
99 /////////////////////////////////////////////////////////////////// | |
100 // Basic support methods | |
101 /////////////////////////////////////////////////////////////////// | |
102 int _peek() => _peekToken.kind; | |
103 | |
104 Token _next() { | |
105 _previousToken = _peekToken; | |
106 _peekToken = tokenizer.next(); | |
107 return _previousToken; | |
108 } | |
109 | |
110 bool _peekKind(int kind) => _peekToken.kind == kind; | |
111 | |
112 /* Is the next token a legal identifier? This includes pseudo-keywords. */ | |
113 bool _peekIdentifier() => _isIdentifier(_peekToken.kind); | |
114 | |
115 bool _isIdentifier(kind) { | |
116 return TokenKind.isIdentifier(kind) | |
117 // Note: 'await' is not a pseudo-keyword. When [enableAwait] is true, it | |
118 // is illegal to consider 'await' an identifier. | |
119 || (!enableAwait && kind == TokenKind.AWAIT); | |
120 } | |
121 | |
122 bool _maybeEat(int kind) { | |
123 if (_peekToken.kind == kind) { | |
124 _previousToken = _peekToken; | |
125 _peekToken = tokenizer.next(); | |
126 return true; | |
127 } else { | |
128 return false; | |
129 } | |
130 } | |
131 | |
132 void _eat(int kind) { | |
133 if (!_maybeEat(kind)) { | |
134 _errorExpected(TokenKind.kindToString(kind)); | |
135 } | |
136 } | |
137 | |
138 void _eatSemicolon() { | |
139 if (optionalSemicolons && _peekKind(TokenKind.END_OF_FILE)) return; | |
140 _eat(TokenKind.SEMICOLON); | |
141 } | |
142 | |
143 void _errorExpected(String expected) { | |
144 // Throw an IncompleteSourceException if that's the problem and | |
145 // throwOnIncomplete is true | |
146 if (throwOnIncomplete) isPrematureEndOfFile(); | |
147 var tok = _next(); | |
148 if (tok is ErrorToken && tok.message != null) { | |
149 // give priority to tokenizer errors | |
150 _error(tok.message, tok.span); | |
151 } else { | |
152 _error('expected $expected, but found $tok', tok.span); | |
153 } | |
154 } | |
155 | |
156 void _error(String message, [SourceSpan location=null]) { | |
157 // Suppress error messages while we're trying to recover. | |
158 if (_recover) return; | |
159 | |
160 if (location == null) { | |
161 location = _peekToken.span; | |
162 } | |
163 world.fatal(message, location); // syntax errors are fatal for now | |
164 _recover = true; // start error recovery | |
165 } | |
166 | |
167 /** Skips from an opening '{' to the syntactically matching '}'. */ | |
168 void _skipBlock() { | |
169 int depth = 1; | |
170 _eat(TokenKind.LBRACE); | |
171 while (true) { | |
172 var tok = _next(); | |
173 if (tok.kind == TokenKind.LBRACE) { | |
174 depth += 1; | |
175 } else if (tok.kind == TokenKind.RBRACE) { | |
176 depth -= 1; | |
177 if (depth == 0) return; | |
178 } else if (tok.kind == TokenKind.END_OF_FILE) { | |
179 _error('unexpected end of file during diet parse', tok.span); | |
180 return; | |
181 } | |
182 } | |
183 } | |
184 | |
185 SourceSpan _makeSpan(int start) { | |
186 return new SourceSpan(source, start, _previousToken.end); | |
187 } | |
188 | |
189 /////////////////////////////////////////////////////////////////// | |
190 // Top level productions | |
191 /////////////////////////////////////////////////////////////////// | |
192 | |
193 /** Entry point to the parser for parsing a compilation unit (i.e. a file). */ | |
194 List<Definition> compilationUnit() { | |
195 var ret = []; | |
196 _maybeEat(TokenKind.HASHBANG); | |
197 | |
198 while (_peekKind(TokenKind.HASH)) { | |
199 ret.add(directive()); | |
200 } | |
201 _recover = false; | |
202 while (!_maybeEat(TokenKind.END_OF_FILE)) { | |
203 ret.add(topLevelDefinition()); | |
204 } | |
205 _recover = false; | |
206 return ret; | |
207 } | |
208 | |
209 directive() { | |
210 int start = _peekToken.start; | |
211 _eat(TokenKind.HASH); | |
212 var name = identifier(); | |
213 var args = arguments(); | |
214 _eatSemicolon(); | |
215 return new DirectiveDefinition(name, args, _makeSpan(start)); | |
216 } | |
217 | |
218 topLevelDefinition() { | |
219 switch (_peek()) { | |
220 case TokenKind.CLASS: | |
221 return classDefinition(TokenKind.CLASS); | |
222 case TokenKind.INTERFACE: | |
223 return classDefinition(TokenKind.INTERFACE); | |
224 case TokenKind.TYPEDEF: | |
225 return functionTypeAlias(); | |
226 default: | |
227 return declaration(); | |
228 } | |
229 } | |
230 | |
231 /** Entry point to the parser for an eval unit (i.e. a repl command). */ | |
232 evalUnit() { | |
233 switch (_peek()) { | |
234 case TokenKind.CLASS: | |
235 return classDefinition(TokenKind.CLASS); | |
236 case TokenKind.INTERFACE: | |
237 return classDefinition(TokenKind.INTERFACE); | |
238 case TokenKind.TYPEDEF: | |
239 return functionTypeAlias(); | |
240 default: | |
241 return statement(); | |
242 } | |
243 _recover = false; | |
244 } | |
245 | |
246 /////////////////////////////////////////////////////////////////// | |
247 // Definition productions | |
248 /////////////////////////////////////////////////////////////////// | |
249 | |
250 classDefinition(int kind) { | |
251 int start = _peekToken.start; | |
252 _eat(kind); | |
253 var name = identifierForType(); | |
254 | |
255 var typeParams = null; | |
256 if (_peekKind(TokenKind.LT)) { | |
257 typeParams = typeParameters(); | |
258 } | |
259 | |
260 var _extends = null; | |
261 if (_maybeEat(TokenKind.EXTENDS)) { | |
262 _extends = typeList(); | |
263 } | |
264 | |
265 var _implements = null; | |
266 if (_maybeEat(TokenKind.IMPLEMENTS)) { | |
267 _implements = typeList(); | |
268 } | |
269 | |
270 var _native = null; | |
271 if (_maybeEat(TokenKind.NATIVE)) { | |
272 _native = maybeStringLiteral(); | |
273 if (_native != null) _native = new NativeType(_native); | |
274 } | |
275 | |
276 bool oldFactory = _maybeEat(TokenKind.FACTORY); | |
277 var defaultType = null; | |
278 if (oldFactory || _maybeEat(TokenKind.DEFAULT)) { | |
279 // TODO(jmesserly): keep old factory support for now. Remove soon. | |
280 if (oldFactory) { | |
281 world.warning('factory no longer supported, use "default" instead', | |
282 _previousToken.span); | |
283 } | |
284 | |
285 // Note: this can't be type() because it has type parameters not type | |
286 // arguments. | |
287 var baseType = nameTypeReference(); | |
288 var factTypeParams = null; | |
289 if (_peekKind(TokenKind.LT)) { | |
290 factTypeParams = typeParameters(); | |
291 } | |
292 defaultType = new DefaultTypeReference(oldFactory, | |
293 baseType, factTypeParams, _makeSpan(baseType.span.start)); | |
294 } | |
295 | |
296 var body = []; | |
297 if (_maybeEat(TokenKind.LBRACE)) { | |
298 while (!_maybeEat(TokenKind.RBRACE)) { | |
299 body.add(declaration()); | |
300 if (_recover) { | |
301 if (!_recoverTo(TokenKind.RBRACE, TokenKind.SEMICOLON)) break; | |
302 _maybeEat(TokenKind.SEMICOLON); | |
303 } | |
304 } | |
305 } else { | |
306 _errorExpected('block starting with "{" or ";"'); | |
307 } | |
308 return new TypeDefinition(kind == TokenKind.CLASS, name, typeParams, | |
309 _extends, _implements, _native, defaultType, body, _makeSpan(start)); | |
310 } | |
311 | |
312 functionTypeAlias() { | |
313 int start = _peekToken.start; | |
314 _eat(TokenKind.TYPEDEF); | |
315 | |
316 var di = declaredIdentifier(false); | |
317 var typeParams = null; | |
318 if (_peekKind(TokenKind.LT)) { | |
319 typeParams = typeParameters(); | |
320 } | |
321 var formals = formalParameterList(); | |
322 _eatSemicolon(); | |
323 | |
324 // TODO(jimhug): Validate that di.name is not a pseudo-keyword | |
325 var func = new FunctionDefinition(null, di.type, di.name, formals, | |
326 null, null, null, _makeSpan(start)); | |
327 | |
328 return new FunctionTypeDefinition(func, typeParams, _makeSpan(start)); | |
329 } | |
330 | |
331 initializers() { | |
332 _inhibitLambda = true; | |
333 var ret = []; | |
334 do { | |
335 ret.add(expression()); | |
336 } while (_maybeEat(TokenKind.COMMA)); | |
337 _inhibitLambda = false; | |
338 return ret; | |
339 } | |
340 | |
341 functionBody(bool inExpression) { | |
342 int start = _peekToken.start; | |
343 if (_maybeEat(TokenKind.ARROW)) { | |
344 var expr = expression(); | |
345 if (!inExpression) { | |
346 _eatSemicolon(); | |
347 } | |
348 return new ReturnStatement(expr, _makeSpan(start)); | |
349 } else if (_peekKind(TokenKind.LBRACE)) { | |
350 if (diet) { | |
351 _skipBlock(); | |
352 return new DietStatement(_makeSpan(start)); | |
353 } else { | |
354 return block(); | |
355 } | |
356 } else if (!inExpression) { | |
357 if (_maybeEat(TokenKind.SEMICOLON)) { | |
358 return null; | |
359 } | |
360 } | |
361 | |
362 _error('Expected function body (neither { nor => found)'); | |
363 } | |
364 | |
365 finishField(start, modifiers, type, name, value) { | |
366 var names = [name]; | |
367 var values = [value]; | |
368 | |
369 while (_maybeEat(TokenKind.COMMA)) { | |
370 names.add(identifier()); | |
371 if (_maybeEat(TokenKind.ASSIGN)) { | |
372 values.add(expression()); | |
373 } else { | |
374 values.add(null); | |
375 } | |
376 } | |
377 | |
378 _eatSemicolon(); | |
379 return new VariableDefinition(modifiers, type, names, values, | |
380 _makeSpan(start)); | |
381 } | |
382 | |
383 finishDefinition(int start, List<Token> modifiers, di) { | |
384 switch(_peek()) { | |
385 case TokenKind.LPAREN: | |
386 var formals = formalParameterList(); | |
387 var inits = null, native = null; | |
388 if (_maybeEat(TokenKind.COLON)) { | |
389 inits = initializers(); | |
390 } | |
391 if (_maybeEat(TokenKind.NATIVE)) { | |
392 native = maybeStringLiteral(); | |
393 if (native == null) native = ''; | |
394 } | |
395 var body = functionBody(/*inExpression:*/false); | |
396 if (di.name == null) { | |
397 // TODO(jimhug): Must be named constructor - verify how? | |
398 di.name = di.type.name; | |
399 } | |
400 return new FunctionDefinition(modifiers, di.type, di.name, formals, | |
401 inits, native, body, _makeSpan(start)); | |
402 | |
403 case TokenKind.ASSIGN: | |
404 _eat(TokenKind.ASSIGN); | |
405 var value = expression(); | |
406 return finishField(start, modifiers, di.type, di.name, value); | |
407 | |
408 case TokenKind.COMMA: | |
409 case TokenKind.SEMICOLON: | |
410 return finishField(start, modifiers, di.type, di.name, null); | |
411 | |
412 default: | |
413 // TODO(jimhug): This error message sucks. | |
414 _errorExpected('declaration'); | |
415 | |
416 return null; | |
417 } | |
418 } | |
419 | |
420 declaration([bool includeOperators=true]) { | |
421 int start = _peekToken.start; | |
422 if (_peekKind(TokenKind.FACTORY)) { | |
423 return factoryConstructorDeclaration(); | |
424 } | |
425 | |
426 var modifiers = _readModifiers(); | |
427 return finishDefinition(start, modifiers, | |
428 declaredIdentifier(includeOperators)); | |
429 } | |
430 | |
431 // TODO(jmesserly): do we still need this method? | |
432 // I left it here for now to support old-style factories | |
433 factoryConstructorDeclaration() { | |
434 int start = _peekToken.start; | |
435 var factoryToken = _next(); | |
436 | |
437 var names = [identifier()]; | |
438 while (_maybeEat(TokenKind.DOT)) { | |
439 names.add(identifier()); | |
440 } | |
441 if (_peekKind(TokenKind.LT)) { | |
442 var tp = typeParameters(); | |
443 world.warning('type parameters on factories are no longer supported, ' | |
444 'place them on the class instead', _makeSpan(tp[0].span.start)); | |
445 } | |
446 | |
447 var name = null; | |
448 var type = null; | |
449 if (_maybeEat(TokenKind.DOT)) { | |
450 name = identifier(); | |
451 } else { | |
452 if (names.length > 1) { | |
453 name = names.removeLast(); | |
454 } else { | |
455 name = new Identifier('', names[0].span); | |
456 } | |
457 } | |
458 | |
459 if (names.length > 1) { | |
460 // TODO(jimhug): This is nasty to support and currently unused. | |
461 _error('unsupported qualified name for factory', names[0].span); | |
462 } | |
463 type = new NameTypeReference(false, names[0], null, names[0].span); | |
464 var di = new DeclaredIdentifier(type, name, false, _makeSpan(start)); | |
465 return finishDefinition(start, [factoryToken], di); | |
466 } | |
467 | |
468 /////////////////////////////////////////////////////////////////// | |
469 // Statement productions | |
470 /////////////////////////////////////////////////////////////////// | |
471 Statement statement() { | |
472 switch (_peek()) { | |
473 case TokenKind.BREAK: | |
474 return breakStatement(); | |
475 case TokenKind.CONTINUE: | |
476 return continueStatement(); | |
477 case TokenKind.RETURN: | |
478 return returnStatement(); | |
479 case TokenKind.THROW: | |
480 return throwStatement(); | |
481 case TokenKind.ASSERT: | |
482 return assertStatement(); | |
483 | |
484 case TokenKind.WHILE: | |
485 return whileStatement(); | |
486 case TokenKind.DO: | |
487 return doStatement(); | |
488 case TokenKind.FOR: | |
489 return forStatement(); | |
490 | |
491 case TokenKind.IF: | |
492 return ifStatement(); | |
493 case TokenKind.SWITCH: | |
494 return switchStatement(); | |
495 | |
496 case TokenKind.TRY: | |
497 return tryStatement(); | |
498 | |
499 case TokenKind.LBRACE: | |
500 return block(); | |
501 case TokenKind.SEMICOLON: | |
502 return emptyStatement(); | |
503 | |
504 case TokenKind.FINAL: | |
505 return declaration(false); | |
506 case TokenKind.VAR: | |
507 return declaration(false); | |
508 | |
509 default: | |
510 // Covers var decl, func decl, labeled stmt and real expressions. | |
511 return finishExpressionAsStatement(expression()); | |
512 } | |
513 } | |
514 | |
515 finishExpressionAsStatement(expr) { | |
516 // TODO(jimhug): This method looks very inefficient - bundle tests. | |
517 int start = expr.span.start; | |
518 | |
519 if (_maybeEat(TokenKind.COLON)) { | |
520 var label = _makeLabel(expr); | |
521 return new LabeledStatement(label, statement(), _makeSpan(start)); | |
522 } | |
523 | |
524 if (expr is LambdaExpression) { | |
525 if (expr.func.body is! BlockStatement) { | |
526 _eatSemicolon(); | |
527 expr.func.span = _makeSpan(start); | |
528 } | |
529 return expr.func; | |
530 } else if (expr is DeclaredIdentifier) { | |
531 var value = null; | |
532 if (_maybeEat(TokenKind.ASSIGN)) { | |
533 value = expression(); | |
534 } | |
535 return finishField(start, null, expr.type, expr.name, value); | |
536 } else if (_isBin(expr, TokenKind.ASSIGN) && | |
537 (expr.x is DeclaredIdentifier)) { | |
538 DeclaredIdentifier di = expr.x; // TODO(jimhug): inference should handle! | |
539 return finishField(start, null, di.type, di.name, expr.y); | |
540 } else if (_isBin(expr, TokenKind.LT) && _maybeEat(TokenKind.COMMA)) { | |
541 var baseType = _makeType(expr.x); | |
542 var typeArgs = [_makeType(expr.y)]; | |
543 var gt = _finishTypeArguments(baseType, 0, typeArgs); | |
544 var name = identifier(); | |
545 var value = null; | |
546 if (_maybeEat(TokenKind.ASSIGN)) { | |
547 value = expression(); | |
548 } | |
549 return finishField(expr.span.start, null, gt, name, value); | |
550 } else { | |
551 _eatSemicolon(); | |
552 return new ExpressionStatement(expr, _makeSpan(expr.span.start)); | |
553 } | |
554 } | |
555 | |
556 Expression testCondition() { | |
557 _eatLeftParen(); | |
558 var ret = expression(); | |
559 _eat(TokenKind.RPAREN); | |
560 return ret; | |
561 } | |
562 | |
563 /** Parses a block. Also is an entry point when parsing [DietStatement]. */ | |
564 BlockStatement block() { | |
565 int start = _peekToken.start; | |
566 _eat(TokenKind.LBRACE); | |
567 var stmts = []; | |
568 while (!_maybeEat(TokenKind.RBRACE)) { | |
569 stmts.add(statement()); | |
570 if (_recover && !_recoverTo(TokenKind.RBRACE, TokenKind.SEMICOLON)) break; | |
571 } | |
572 _recover = false; | |
573 return new BlockStatement(stmts, _makeSpan(start)); | |
574 } | |
575 | |
576 EmptyStatement emptyStatement() { | |
577 int start = _peekToken.start; | |
578 _eat(TokenKind.SEMICOLON); | |
579 return new EmptyStatement(_makeSpan(start)); | |
580 } | |
581 | |
582 | |
583 IfStatement ifStatement() { | |
584 int start = _peekToken.start; | |
585 _eat(TokenKind.IF); | |
586 var test = testCondition(); | |
587 var trueBranch = statement(); | |
588 var falseBranch = null; | |
589 if (_maybeEat(TokenKind.ELSE)) { | |
590 falseBranch = statement(); | |
591 } | |
592 return new IfStatement(test, trueBranch, falseBranch, _makeSpan(start)); | |
593 } | |
594 | |
595 WhileStatement whileStatement() { | |
596 int start = _peekToken.start; | |
597 _eat(TokenKind.WHILE); | |
598 var test = testCondition(); | |
599 var body = statement(); | |
600 return new WhileStatement(test, body, _makeSpan(start)); | |
601 } | |
602 | |
603 DoStatement doStatement() { | |
604 int start = _peekToken.start; | |
605 _eat(TokenKind.DO); | |
606 var body = statement(); | |
607 _eat(TokenKind.WHILE); | |
608 var test = testCondition(); | |
609 _eatSemicolon(); | |
610 return new DoStatement(body, test, _makeSpan(start)); | |
611 } | |
612 | |
613 forStatement() { | |
614 int start = _peekToken.start; | |
615 _eat(TokenKind.FOR); | |
616 _eatLeftParen(); | |
617 | |
618 var init = forInitializerStatement(start); | |
619 if (init is ForInStatement) { | |
620 return init; | |
621 } | |
622 var test = null; | |
623 if (!_maybeEat(TokenKind.SEMICOLON)) { | |
624 test = expression(); | |
625 _eatSemicolon(); | |
626 } | |
627 var step = []; | |
628 if (!_maybeEat(TokenKind.RPAREN)) { | |
629 step.add(expression()); | |
630 while (_maybeEat(TokenKind.COMMA)) { | |
631 step.add(expression()); | |
632 } | |
633 _eat(TokenKind.RPAREN); | |
634 } | |
635 | |
636 var body = statement(); | |
637 | |
638 return new ForStatement(init, test, step, body, _makeSpan(start)); | |
639 } | |
640 | |
641 forInitializerStatement(int start) { | |
642 if (_maybeEat(TokenKind.SEMICOLON)) { | |
643 return null; | |
644 } else { | |
645 var init = expression(); | |
646 // Weird code here is needed to handle generic type and for in | |
647 // TODO(jmesserly): unify with block in finishExpressionAsStatement | |
648 if (_peekKind(TokenKind.COMMA) && _isBin(init, TokenKind.LT)) { | |
649 _eat(TokenKind.COMMA); | |
650 var baseType = _makeType(init.x); | |
651 var typeArgs = [_makeType(init.y)]; | |
652 var gt = _finishTypeArguments(baseType, 0, typeArgs); | |
653 var name = identifier(); | |
654 init = new DeclaredIdentifier(gt, name, false, _makeSpan(init.span.start
)); | |
655 } | |
656 | |
657 if (_maybeEat(TokenKind.IN)) { | |
658 return _finishForIn(start, _makeDeclaredIdentifier(init)); | |
659 } else { | |
660 return finishExpressionAsStatement(init); | |
661 } | |
662 } | |
663 } | |
664 | |
665 _finishForIn(int start, DeclaredIdentifier di) { | |
666 var expr = expression(); | |
667 _eat(TokenKind.RPAREN); | |
668 var body = statement(); | |
669 return new ForInStatement(di, expr, body, | |
670 _makeSpan(start)); | |
671 } | |
672 | |
673 tryStatement() { | |
674 int start = _peekToken.start; | |
675 _eat(TokenKind.TRY); | |
676 var body = block(); | |
677 var catches = []; | |
678 | |
679 while (_peekKind(TokenKind.CATCH)) { | |
680 catches.add(catchNode()); | |
681 } | |
682 | |
683 var finallyBlock = null; | |
684 if (_maybeEat(TokenKind.FINALLY)) { | |
685 finallyBlock = block(); | |
686 } | |
687 return new TryStatement(body, catches, finallyBlock, _makeSpan(start)); | |
688 } | |
689 | |
690 catchNode() { | |
691 int start = _peekToken.start; | |
692 _eat(TokenKind.CATCH); | |
693 _eatLeftParen(); | |
694 var exc = declaredIdentifier(); | |
695 var trace = null; | |
696 if (_maybeEat(TokenKind.COMMA)) { | |
697 trace = declaredIdentifier(); | |
698 } | |
699 _eat(TokenKind.RPAREN); | |
700 var body = block(); | |
701 return new CatchNode(exc, trace, body, _makeSpan(start)); | |
702 } | |
703 | |
704 switchStatement() { | |
705 int start = _peekToken.start; | |
706 _eat(TokenKind.SWITCH); | |
707 var test = testCondition(); | |
708 var cases = []; | |
709 _eat(TokenKind.LBRACE); | |
710 while (!_maybeEat(TokenKind.RBRACE)) { | |
711 cases.add(caseNode()); | |
712 } | |
713 return new SwitchStatement(test, cases, _makeSpan(start)); | |
714 } | |
715 | |
716 _peekCaseEnd() { | |
717 var kind = _peek(); | |
718 //TODO(efortuna): also if the first is an identifier followed by a colon, we | |
719 //have a label for the case statement. | |
720 return kind == TokenKind.RBRACE || kind == TokenKind.CASE || | |
721 kind == TokenKind.DEFAULT; | |
722 } | |
723 | |
724 caseNode() { | |
725 int start = _peekToken.start; | |
726 var label = null; | |
727 if (_peekIdentifier()) { | |
728 label = identifier(); | |
729 _eat(TokenKind.COLON); | |
730 } | |
731 var cases = []; | |
732 while (true) { | |
733 if (_maybeEat(TokenKind.CASE)) { | |
734 cases.add(expression()); | |
735 _eat(TokenKind.COLON); | |
736 } else if (_maybeEat(TokenKind.DEFAULT)) { | |
737 cases.add(null); | |
738 _eat(TokenKind.COLON); | |
739 } else { | |
740 break; | |
741 } | |
742 } | |
743 if (cases.length == 0) { | |
744 _error('case or default'); | |
745 } | |
746 var stmts = []; | |
747 while (!_peekCaseEnd()) { | |
748 stmts.add(statement()); | |
749 if (_recover && !_recoverTo( | |
750 TokenKind.RBRACE, TokenKind.CASE, TokenKind.DEFAULT)) { | |
751 break; | |
752 } | |
753 } | |
754 return new CaseNode(label, cases, stmts, _makeSpan(start)); | |
755 } | |
756 | |
757 returnStatement() { | |
758 int start = _peekToken.start; | |
759 _eat(TokenKind.RETURN); | |
760 var expr; | |
761 if (_maybeEat(TokenKind.SEMICOLON)) { | |
762 expr = null; | |
763 } else { | |
764 expr = expression(); | |
765 _eatSemicolon(); | |
766 } | |
767 return new ReturnStatement(expr, _makeSpan(start)); | |
768 } | |
769 | |
770 throwStatement() { | |
771 int start = _peekToken.start; | |
772 _eat(TokenKind.THROW); | |
773 var expr; | |
774 if (_maybeEat(TokenKind.SEMICOLON)) { | |
775 expr = null; | |
776 } else { | |
777 expr = expression(); | |
778 _eatSemicolon(); | |
779 } | |
780 return new ThrowStatement(expr, _makeSpan(start)); | |
781 } | |
782 | |
783 assertStatement() { | |
784 int start = _peekToken.start; | |
785 _eat(TokenKind.ASSERT); | |
786 _eatLeftParen(); | |
787 var expr = expression(); | |
788 _eat(TokenKind.RPAREN); | |
789 _eatSemicolon(); | |
790 return new AssertStatement(expr, _makeSpan(start)); | |
791 } | |
792 | |
793 breakStatement() { | |
794 int start = _peekToken.start; | |
795 _eat(TokenKind.BREAK); | |
796 var name = null; | |
797 if (_peekIdentifier()) { | |
798 name = identifier(); | |
799 } | |
800 _eatSemicolon(); | |
801 return new BreakStatement(name, _makeSpan(start)); | |
802 } | |
803 | |
804 continueStatement() { | |
805 int start = _peekToken.start; | |
806 _eat(TokenKind.CONTINUE); | |
807 var name = null; | |
808 if (_peekIdentifier()) { | |
809 name = identifier(); | |
810 } | |
811 _eatSemicolon(); | |
812 return new ContinueStatement(name, _makeSpan(start)); | |
813 } | |
814 | |
815 | |
816 /////////////////////////////////////////////////////////////////// | |
817 // Expression productions | |
818 /////////////////////////////////////////////////////////////////// | |
819 expression() { | |
820 return infixExpression(0); | |
821 } | |
822 | |
823 _makeType(expr) { | |
824 if (expr is VarExpression) { | |
825 return new NameTypeReference(false, expr.name, null, expr.span); | |
826 } else if (expr is DotExpression) { | |
827 var type = _makeType(expr.self); | |
828 if (type.names == null) { | |
829 type.names = [expr.name]; | |
830 } else { | |
831 type.names.add(expr.name); | |
832 } | |
833 type.span = expr.span; | |
834 return type; | |
835 } else { | |
836 _error('expected type reference'); | |
837 return null; | |
838 } | |
839 } | |
840 | |
841 infixExpression(int precedence) { | |
842 return finishInfixExpression(unaryExpression(), precedence); | |
843 } | |
844 | |
845 _finishDeclaredId(type) { | |
846 var name = identifier(); | |
847 return finishPostfixExpression( | |
848 new DeclaredIdentifier(type, name, false, _makeSpan(type.span.start))); | |
849 } | |
850 | |
851 /** | |
852 * Takes an initial binary expression of A < B and turns it into a | |
853 * declared identifier included the A < B piece in the type. | |
854 */ | |
855 _fixAsType(BinaryExpression x) { | |
856 assert(_isBin(x, TokenKind.LT)); | |
857 // TODO(jimhug): good errors when expectations are violated | |
858 if (_maybeEat(TokenKind.GT)) { | |
859 // The simple case of A < B > just becomes a generic type | |
860 var base = _makeType(x.x); | |
861 var typeParam = _makeType(x.y); | |
862 var type = new GenericTypeReference(base, [typeParam], 0, | |
863 _makeSpan(x.span.start)); | |
864 return _finishDeclaredId(type); | |
865 } else { | |
866 // The case of A < B < kicks off a lot more parsing. | |
867 assert(_peekKind(TokenKind.LT)); | |
868 | |
869 var base = _makeType(x.x); | |
870 var paramBase = _makeType(x.y); | |
871 var firstParam = addTypeArguments(paramBase, 1); | |
872 | |
873 var type; | |
874 if (firstParam.depth <= 0) { | |
875 type = new GenericTypeReference(base, [firstParam], 0, | |
876 _makeSpan(x.span.start)); | |
877 } else if (_maybeEat(TokenKind.COMMA)) { | |
878 type = _finishTypeArguments(base, 0, [firstParam]); | |
879 } else { | |
880 _eat(TokenKind.GT); | |
881 type = new GenericTypeReference(base, [firstParam], 0, | |
882 _makeSpan(x.span.start)); | |
883 } | |
884 return _finishDeclaredId(type); | |
885 } | |
886 } | |
887 | |
888 finishInfixExpression(Expression x, int precedence) { | |
889 while (true) { | |
890 int kind = _peek(); | |
891 var prec = TokenKind.infixPrecedence(_peek()); | |
892 if (prec >= precedence) { | |
893 if (kind == TokenKind.LT || kind == TokenKind.GT) { | |
894 if (_isBin(x, TokenKind.LT)) { | |
895 // This must be a generic type according the the Dart grammar. | |
896 // This rule is in the grammar to forbid A < B < C and | |
897 // A < B > C as expressions both because they don't make sense | |
898 // and to make it easier to disambiguate the generic types. | |
899 // There are a number of other comparison operators that are | |
900 // also unallowed to nest in this way, but in the spirit of this | |
901 // "friendly" parser, those will be allowed until a later phase. | |
902 return _fixAsType(x); | |
903 } | |
904 } | |
905 var op = _next(); | |
906 if (op.kind == TokenKind.IS) { | |
907 var isTrue = !_maybeEat(TokenKind.NOT); | |
908 var typeRef = type(); | |
909 x = new IsExpression(isTrue, x, typeRef, _makeSpan(x.span.start)); | |
910 continue; | |
911 } | |
912 // Using prec + 1 ensures that a - b - c will group correctly. | |
913 // Using prec for ASSIGN ops ensures that a = b = c groups correctly. | |
914 var y = infixExpression(prec == 2 ? prec: prec+1); | |
915 if (op.kind == TokenKind.CONDITIONAL) { | |
916 _eat(TokenKind.COLON); | |
917 // Using prec for so "a ? b : c ? d : e" groups correctly as | |
918 // "a ? b : (c ? d : e)" | |
919 var z = infixExpression(prec); | |
920 x = new ConditionalExpression(x, y, z, _makeSpan(x.span.start)); | |
921 } else { | |
922 x = new BinaryExpression(op, x, y, _makeSpan(x.span.start)); | |
923 } | |
924 } else { | |
925 break; | |
926 } | |
927 } | |
928 return x; | |
929 } | |
930 | |
931 _isPrefixUnaryOperator(int kind) { | |
932 switch(kind) { | |
933 case TokenKind.ADD: | |
934 case TokenKind.SUB: | |
935 case TokenKind.NOT: | |
936 case TokenKind.BIT_NOT: | |
937 case TokenKind.INCR: | |
938 case TokenKind.DECR: | |
939 return true; | |
940 default: | |
941 return false; | |
942 } | |
943 } | |
944 | |
945 unaryExpression() { | |
946 int start = _peekToken.start; | |
947 // peek for prefixOperators and incrementOperators | |
948 if (_isPrefixUnaryOperator(_peek())) { | |
949 var tok = _next(); | |
950 var expr = unaryExpression(); | |
951 return new UnaryExpression(tok, expr, _makeSpan(start)); | |
952 } else if (enableAwait && _maybeEat(TokenKind.AWAIT)) { | |
953 var expr = unaryExpression(); | |
954 return new AwaitExpression(expr, _makeSpan(start)); | |
955 } | |
956 | |
957 return finishPostfixExpression(primary()); | |
958 } | |
959 | |
960 argument() { | |
961 int start = _peekToken.start; | |
962 var expr; | |
963 var label = null; | |
964 if (_maybeEat(TokenKind.ELLIPSIS)) { | |
965 label = new Identifier('...', _makeSpan(start)); | |
966 } | |
967 expr = expression(); | |
968 if (label == null && _maybeEat(TokenKind.COLON)) { | |
969 label = _makeLabel(expr); | |
970 expr = expression(); | |
971 } | |
972 return new ArgumentNode(label, expr, _makeSpan(start)); | |
973 } | |
974 | |
975 arguments() { | |
976 var args = []; | |
977 _eatLeftParen(); | |
978 var saved = _inhibitLambda; | |
979 _inhibitLambda = false; | |
980 if (!_maybeEat(TokenKind.RPAREN)) { | |
981 do { | |
982 args.add(argument()); | |
983 } while (_maybeEat(TokenKind.COMMA)); | |
984 _eat(TokenKind.RPAREN); | |
985 } | |
986 _inhibitLambda = saved; | |
987 return args; | |
988 } | |
989 | |
990 finishPostfixExpression(expr) { | |
991 switch(_peek()) { | |
992 case TokenKind.LPAREN: | |
993 return finishCallOrLambdaExpression(expr); | |
994 case TokenKind.LBRACK: | |
995 _eat(TokenKind.LBRACK); | |
996 var index = expression(); | |
997 _eat(TokenKind.RBRACK); | |
998 return finishPostfixExpression(new IndexExpression(expr, index, | |
999 _makeSpan(expr.span.start))); | |
1000 case TokenKind.DOT: | |
1001 _eat(TokenKind.DOT); | |
1002 var name = identifier(); | |
1003 var ret = new DotExpression(expr, name, _makeSpan(expr.span.start)); | |
1004 return finishPostfixExpression(ret); | |
1005 | |
1006 case TokenKind.INCR: | |
1007 case TokenKind.DECR: | |
1008 var tok = _next(); | |
1009 return new PostfixExpression(expr, tok, _makeSpan(expr.span.start)); | |
1010 | |
1011 // These are pseudo-expressions supported for cover grammar | |
1012 // must be forbidden when parsing initializers. | |
1013 // TODO(jmesserly): is this still needed? | |
1014 case TokenKind.ARROW: | |
1015 case TokenKind.LBRACE: | |
1016 return expr; | |
1017 | |
1018 default: | |
1019 if (_peekIdentifier()) { | |
1020 return finishPostfixExpression( | |
1021 new DeclaredIdentifier(_makeType(expr), identifier(), | |
1022 false, _makeSpan(expr.span.start))); | |
1023 } else { | |
1024 return expr; | |
1025 } | |
1026 } | |
1027 } | |
1028 | |
1029 finishCallOrLambdaExpression(expr) { | |
1030 if (_atClosureParameters()) { | |
1031 var formals = formalParameterList(); | |
1032 var body = functionBody(true); | |
1033 return _makeFunction(expr, formals, body); | |
1034 } else { | |
1035 if (expr is DeclaredIdentifier) { | |
1036 _error('illegal target for call, did you mean to declare a function?', | |
1037 expr.span); | |
1038 } | |
1039 var args = arguments(); | |
1040 return finishPostfixExpression( | |
1041 new CallExpression(expr, args, _makeSpan(expr.span.start))); | |
1042 } | |
1043 } | |
1044 | |
1045 /** Checks if the given expression is a binary op of the given kind. */ | |
1046 _isBin(expr, kind) { | |
1047 return expr is BinaryExpression && expr.op.kind == kind; | |
1048 } | |
1049 | |
1050 _makeLiteral(Value value) { | |
1051 return new LiteralExpression(value, value.span); | |
1052 } | |
1053 | |
1054 primary() { | |
1055 int start = _peekToken.start; | |
1056 switch (_peek()) { | |
1057 case TokenKind.THIS: | |
1058 _eat(TokenKind.THIS); | |
1059 return new ThisExpression(_makeSpan(start)); | |
1060 | |
1061 case TokenKind.SUPER: | |
1062 _eat(TokenKind.SUPER); | |
1063 return new SuperExpression(_makeSpan(start)); | |
1064 | |
1065 case TokenKind.CONST: | |
1066 _eat(TokenKind.CONST); | |
1067 if (_peekKind(TokenKind.LBRACK) || _peekKind(TokenKind.INDEX)) { | |
1068 return finishListLiteral(start, true, null); | |
1069 } else if (_peekKind(TokenKind.LBRACE)) { | |
1070 return finishMapLiteral(start, true, null, null); | |
1071 } else if (_peekKind(TokenKind.LT)) { | |
1072 return finishTypedLiteral(start, true); | |
1073 } else { | |
1074 return finishNewExpression(start, true); | |
1075 } | |
1076 | |
1077 case TokenKind.NEW: | |
1078 _eat(TokenKind.NEW); | |
1079 return finishNewExpression(start, false); | |
1080 | |
1081 case TokenKind.LPAREN: | |
1082 return _parenOrLambda(); | |
1083 | |
1084 case TokenKind.LBRACK: | |
1085 case TokenKind.INDEX: | |
1086 return finishListLiteral(start, false, null); | |
1087 case TokenKind.LBRACE: | |
1088 return finishMapLiteral(start, false, null, null); | |
1089 | |
1090 // Literals | |
1091 case TokenKind.NULL: | |
1092 _eat(TokenKind.NULL); | |
1093 return _makeLiteral(Value.fromNull(_makeSpan(start))); | |
1094 | |
1095 // TODO(jimhug): Make Literal creation less wasteful - no dup span/text. | |
1096 case TokenKind.TRUE: | |
1097 _eat(TokenKind.TRUE); | |
1098 return _makeLiteral(Value.fromBool(true, _makeSpan(start))); | |
1099 | |
1100 case TokenKind.FALSE: | |
1101 _eat(TokenKind.FALSE); | |
1102 return _makeLiteral(Value.fromBool(false, _makeSpan(start))); | |
1103 | |
1104 case TokenKind.HEX_INTEGER: | |
1105 var t = _next(); | |
1106 return _makeLiteral(Value.fromInt(t.value, t.span)); | |
1107 | |
1108 case TokenKind.INTEGER: | |
1109 var t = _next(); | |
1110 return _makeLiteral(Value.fromInt(Math.parseInt(t.text), t.span)); | |
1111 | |
1112 case TokenKind.DOUBLE: | |
1113 var t = _next(); | |
1114 return _makeLiteral( | |
1115 Value.fromDouble(Math.parseDouble(t.text), t.span)); | |
1116 | |
1117 case TokenKind.STRING: | |
1118 case TokenKind.STRING_PART: | |
1119 return adjacentStrings(); | |
1120 | |
1121 case TokenKind.LT: | |
1122 return finishTypedLiteral(start, false); | |
1123 | |
1124 case TokenKind.VOID: | |
1125 case TokenKind.VAR: | |
1126 case TokenKind.FINAL: | |
1127 return declaredIdentifier(false); | |
1128 | |
1129 default: | |
1130 if (!_peekIdentifier()) { | |
1131 // TODO(jimhug): Better error message. | |
1132 _errorExpected('expression'); | |
1133 } | |
1134 return new VarExpression(identifier(), _makeSpan(start)); | |
1135 } | |
1136 } | |
1137 | |
1138 adjacentStrings() { | |
1139 int start = _peekToken.start; | |
1140 List<Expression> strings = []; | |
1141 while (_peek() == TokenKind.STRING || _peek() == TokenKind.STRING_PART) { | |
1142 Expression part = null; | |
1143 if (_peek() == TokenKind.STRING) { | |
1144 var t = _next(); | |
1145 part = _makeLiteral(Value.fromString(t.value, t.span)); | |
1146 } else { | |
1147 part = stringInterpolation(); | |
1148 } | |
1149 strings.add(part); | |
1150 } | |
1151 if (strings.length == 1) { | |
1152 return strings[0]; | |
1153 } else { | |
1154 assert(!strings.isEmpty()); | |
1155 return new StringConcatExpression(strings, _makeSpan(start)); | |
1156 } | |
1157 } | |
1158 | |
1159 stringInterpolation() { | |
1160 int start = _peekToken.start; | |
1161 var pieces = new List<Expression>(); | |
1162 var startQuote = null, endQuote = null; | |
1163 while(_peekKind(TokenKind.STRING_PART)) { | |
1164 var token = _next(); | |
1165 pieces.add(_makeLiteral(Value.fromString(token.value, token.span))); | |
1166 if (_maybeEat(TokenKind.LBRACE)) { | |
1167 pieces.add(expression()); | |
1168 _eat(TokenKind.RBRACE); | |
1169 } else if (_maybeEat(TokenKind.THIS)) { | |
1170 pieces.add(new ThisExpression(_previousToken.span)); | |
1171 } else { | |
1172 var id = identifier(); | |
1173 pieces.add(new VarExpression(id, id.span)); | |
1174 } | |
1175 } | |
1176 var tok = _next(); | |
1177 if (tok.kind != TokenKind.STRING) { | |
1178 _errorExpected('interpolated string'); | |
1179 } | |
1180 pieces.add(_makeLiteral(Value.fromString(tok.value, tok.span))); | |
1181 var span = _makeSpan(start); | |
1182 return new StringInterpExpression(pieces, span); | |
1183 } | |
1184 | |
1185 String maybeStringLiteral() { | |
1186 var kind = _peek(); | |
1187 if (kind == TokenKind.STRING) { | |
1188 var t = _next(); | |
1189 return t.value; | |
1190 } else if (kind == TokenKind.STRING_PART) { | |
1191 _next(); | |
1192 _errorExpected('string literal, but found interpolated string start'); | |
1193 } | |
1194 return null; | |
1195 } | |
1196 | |
1197 _parenOrLambda() { | |
1198 int start = _peekToken.start; | |
1199 if (_atClosureParameters()) { | |
1200 var formals = formalParameterList(); | |
1201 var body = functionBody(true); | |
1202 var func = new FunctionDefinition(null, null, null, formals, null, null, | |
1203 body, _makeSpan(start)); | |
1204 return new LambdaExpression(func, func.span); | |
1205 } else { | |
1206 _eatLeftParen(); | |
1207 var saved = _inhibitLambda; | |
1208 _inhibitLambda = false; | |
1209 var expr = expression(); | |
1210 _eat(TokenKind.RPAREN); | |
1211 _inhibitLambda = saved; | |
1212 return new ParenExpression(expr, _makeSpan(start)); | |
1213 } | |
1214 } | |
1215 | |
1216 bool _atClosureParameters() { | |
1217 if (_inhibitLambda) return false; | |
1218 Token after = _peekAfterCloseParen(); | |
1219 return after.kind == TokenKind.ARROW || after.kind == TokenKind.LBRACE; | |
1220 } | |
1221 | |
1222 /** Eats an LPAREN, and advances our after-RPAREN lookahead. */ | |
1223 _eatLeftParen() { | |
1224 _eat(TokenKind.LPAREN); | |
1225 _afterParensIndex++; | |
1226 } | |
1227 | |
1228 Token _peekAfterCloseParen() { | |
1229 if (_afterParensIndex < _afterParens.length) { | |
1230 return _afterParens[_afterParensIndex]; | |
1231 } | |
1232 | |
1233 // Reset the queue | |
1234 _afterParensIndex = 0; | |
1235 _afterParens.clear(); | |
1236 | |
1237 // Start copying tokens as we lookahead | |
1238 var tokens = <Token>[_next()]; // LPAREN | |
1239 _lookaheadAfterParens(tokens); | |
1240 | |
1241 // Put all the lookahead tokens back into the parser's token stream. | |
1242 var after = _peekToken; | |
1243 tokens.add(after); | |
1244 tokenizer = new DivertedTokenSource(tokens, this, tokenizer); | |
1245 _next(); // Re-synchronize parser lookahead state. | |
1246 return after; | |
1247 } | |
1248 | |
1249 /** | |
1250 * This scan for the matching RPAREN to the current LPAREN and saves this | |
1251 * result for all nested parentheses so we don't need to look-head again. | |
1252 */ | |
1253 _lookaheadAfterParens(List<Token> tokens) { | |
1254 // Save a slot in the array. This will hold the token after the parens. | |
1255 int saved = _afterParens.length; | |
1256 _afterParens.add(null); // save a slot | |
1257 while (true) { | |
1258 Token token = _next(); | |
1259 tokens.add(token); | |
1260 int kind = token.kind; | |
1261 if (kind == TokenKind.RPAREN || kind == TokenKind.END_OF_FILE) { | |
1262 _afterParens[saved] = _peekToken; | |
1263 return; | |
1264 } else if (kind == TokenKind.LPAREN) { | |
1265 // Scan anything inside these nested parenthesis | |
1266 _lookaheadAfterParens(tokens); | |
1267 } | |
1268 } | |
1269 } | |
1270 | |
1271 _typeAsIdentifier(type) { | |
1272 if (type.name.name == 'void') { | |
1273 _errorExpected('identifer, but found "${type.name.name}"'); | |
1274 } | |
1275 | |
1276 // TODO(jimhug): lots of errors to check for | |
1277 return type.name; | |
1278 } | |
1279 | |
1280 _specialIdentifier(bool includeOperators) { | |
1281 int start = _peekToken.start; | |
1282 String name; | |
1283 | |
1284 switch (_peek()) { | |
1285 case TokenKind.ELLIPSIS: | |
1286 _eat(TokenKind.ELLIPSIS); | |
1287 _error('rest no longer supported', _previousToken.span); | |
1288 name = identifier().name; | |
1289 break; | |
1290 case TokenKind.THIS: | |
1291 _eat(TokenKind.THIS); | |
1292 _eat(TokenKind.DOT); | |
1293 name = 'this.${identifier().name}'; | |
1294 break; | |
1295 case TokenKind.GET: | |
1296 if (!includeOperators) return null; | |
1297 _eat(TokenKind.GET); | |
1298 if (_peekIdentifier()) { | |
1299 name = 'get:${identifier().name}'; | |
1300 } else { | |
1301 name = 'get'; | |
1302 } | |
1303 break; | |
1304 case TokenKind.SET: | |
1305 if (!includeOperators) return null; | |
1306 _eat(TokenKind.SET); | |
1307 if (_peekIdentifier()) { | |
1308 name = 'set:${identifier().name}'; | |
1309 } else { | |
1310 name = 'set'; | |
1311 } | |
1312 break; | |
1313 case TokenKind.OPERATOR: | |
1314 if (!includeOperators) return null; | |
1315 _eat(TokenKind.OPERATOR); | |
1316 var kind = _peek(); | |
1317 if (kind == TokenKind.NEGATE) { | |
1318 name = ':negate'; | |
1319 _next(); | |
1320 } else { | |
1321 name = TokenKind.binaryMethodName(kind); | |
1322 if (name == null) { | |
1323 // TODO(jimhug): This is a very useful error, but we have to | |
1324 // lose it because operator is a pseudo-keyword... | |
1325 //_errorExpected('legal operator name, but found: ${tok}'); | |
1326 name = 'operator'; | |
1327 } else { | |
1328 _next(); | |
1329 } | |
1330 } | |
1331 break; | |
1332 default: | |
1333 return null; | |
1334 } | |
1335 return new Identifier(name, _makeSpan(start)); | |
1336 } | |
1337 | |
1338 // always includes this and ... as legal names to simplify other code. | |
1339 declaredIdentifier([bool includeOperators=false]) { | |
1340 int start = _peekToken.start; | |
1341 var myType = null; | |
1342 var name = _specialIdentifier(includeOperators); | |
1343 bool isFinal = false; | |
1344 if (name == null) { | |
1345 myType = type(); | |
1346 name = _specialIdentifier(includeOperators); | |
1347 if (name == null) { | |
1348 if (_peekIdentifier()) { | |
1349 name = identifier(); | |
1350 } else if (myType is NameTypeReference && myType.names == null) { | |
1351 name = _typeAsIdentifier(myType); | |
1352 isFinal = myType.isFinal; | |
1353 myType = null; | |
1354 } else { | |
1355 // TODO(jimhug): Where do these errors get handled? | |
1356 } | |
1357 } | |
1358 } | |
1359 return new DeclaredIdentifier(myType, name, isFinal, _makeSpan(start)); | |
1360 } | |
1361 | |
1362 finishNewExpression(int start, bool isConst) { | |
1363 var type = type(); | |
1364 var name = null; | |
1365 if (_maybeEat(TokenKind.DOT)) { | |
1366 name = identifier(); | |
1367 } | |
1368 var args = arguments(); | |
1369 return new NewExpression(isConst, type, name, args, _makeSpan(start)); | |
1370 } | |
1371 | |
1372 finishListLiteral(int start, bool isConst, TypeReference itemType) { | |
1373 if (_maybeEat(TokenKind.INDEX)) { | |
1374 // This is an empty array. | |
1375 return new ListExpression(isConst, itemType, [], _makeSpan(start)); | |
1376 } | |
1377 | |
1378 var values = []; | |
1379 _eat(TokenKind.LBRACK); | |
1380 while (!_maybeEat(TokenKind.RBRACK)) { | |
1381 values.add(expression()); | |
1382 if (_recover && !_recoverTo(TokenKind.RBRACK, TokenKind.COMMA)) break; | |
1383 if (!_maybeEat(TokenKind.COMMA)) { | |
1384 _eat(TokenKind.RBRACK); | |
1385 break; | |
1386 } | |
1387 } | |
1388 return new ListExpression(isConst, itemType, values, _makeSpan(start)); | |
1389 } | |
1390 | |
1391 finishMapLiteral(int start, bool isConst, | |
1392 TypeReference keyType, TypeReference valueType) { | |
1393 var items = []; | |
1394 _eat(TokenKind.LBRACE); | |
1395 while (!_maybeEat(TokenKind.RBRACE)) { | |
1396 // This is deliberately overly permissive - checked in later pass. | |
1397 items.add(expression()); | |
1398 _eat(TokenKind.COLON); | |
1399 items.add(expression()); | |
1400 if (_recover && !_recoverTo(TokenKind.RBRACE, TokenKind.COMMA)) break; | |
1401 if (!_maybeEat(TokenKind.COMMA)) { | |
1402 _eat(TokenKind.RBRACE); | |
1403 break; | |
1404 } | |
1405 } | |
1406 return new MapExpression(isConst, keyType, valueType, items, | |
1407 _makeSpan(start)); | |
1408 } | |
1409 | |
1410 finishTypedLiteral(int start, bool isConst) { | |
1411 var span = _makeSpan(start); | |
1412 | |
1413 final typeToBeNamedLater = new NameTypeReference(false, null, null, span); | |
1414 final genericType = addTypeArguments(typeToBeNamedLater, 0); | |
1415 final typeArgs = genericType.typeArguments; | |
1416 | |
1417 if (_peekKind(TokenKind.LBRACK) || _peekKind(TokenKind.INDEX)) { | |
1418 if (typeArgs.length != 1) { | |
1419 world.error('exactly one type argument expected for list', | |
1420 genericType.span); | |
1421 } | |
1422 return finishListLiteral(start, isConst, typeArgs[0]); | |
1423 } else if (_peekKind(TokenKind.LBRACE)) { | |
1424 var keyType, valueType; | |
1425 if (typeArgs.length == 1) { | |
1426 keyType = null; | |
1427 valueType = typeArgs[0]; | |
1428 } else if (typeArgs.length == 2) { | |
1429 keyType = typeArgs[0]; | |
1430 // making key explicit is just a warning. | |
1431 world.warning( | |
1432 'a map literal takes one type argument specifying the value type', | |
1433 keyType.span); | |
1434 valueType = typeArgs[1]; | |
1435 } // o.w. the type system will detect the mismatch in type arguments. | |
1436 return finishMapLiteral(start, isConst, keyType, valueType); | |
1437 } else { | |
1438 _errorExpected('array or map literal'); | |
1439 } | |
1440 } | |
1441 | |
1442 /////////////////////////////////////////////////////////////////// | |
1443 // Some auxilary productions. | |
1444 /////////////////////////////////////////////////////////////////// | |
1445 _readModifiers() { | |
1446 var modifiers = null; | |
1447 while (true) { | |
1448 switch(_peek()) { | |
1449 case TokenKind.STATIC: | |
1450 case TokenKind.FINAL: | |
1451 case TokenKind.CONST: | |
1452 case TokenKind.ABSTRACT: | |
1453 case TokenKind.FACTORY: | |
1454 if (modifiers == null) modifiers = []; | |
1455 modifiers.add(_next()); | |
1456 break; | |
1457 default: | |
1458 return modifiers; | |
1459 } | |
1460 } | |
1461 | |
1462 return null; | |
1463 } | |
1464 | |
1465 ParameterType typeParameter() { | |
1466 // non-recursive - so always starts from zero depth | |
1467 int start = _peekToken.start; | |
1468 var name = identifier(); | |
1469 var myType = null; | |
1470 if (_maybeEat(TokenKind.EXTENDS)) { | |
1471 myType = type(1); | |
1472 } | |
1473 | |
1474 var tp = new TypeParameter(name, myType, _makeSpan(start)); | |
1475 return new ParameterType(name.name, tp); | |
1476 } | |
1477 | |
1478 List<ParameterType> typeParameters() { | |
1479 // always starts from zero depth | |
1480 _eat(TokenKind.LT); | |
1481 | |
1482 bool closed = false; | |
1483 var ret = []; | |
1484 do { | |
1485 var tp = typeParameter(); | |
1486 ret.add(tp); | |
1487 if (tp.typeParameter.extendsType is GenericTypeReference && | |
1488 tp.typeParameter.extendsType.dynamic.depth == 0) { | |
1489 closed = true; | |
1490 break; | |
1491 } | |
1492 } while (_maybeEat(TokenKind.COMMA)); | |
1493 if (!closed) { | |
1494 _eat(TokenKind.GT); | |
1495 } | |
1496 return ret; | |
1497 } | |
1498 | |
1499 int _eatClosingAngle(int depth) { | |
1500 if (_maybeEat(TokenKind.GT)) { | |
1501 return depth; | |
1502 } else if (depth > 0 && _maybeEat(TokenKind.SAR)) { | |
1503 return depth-1; | |
1504 } else if (depth > 1 && _maybeEat(TokenKind.SHR)) { | |
1505 return depth-2; | |
1506 } else { | |
1507 _errorExpected('>'); | |
1508 return depth; | |
1509 } | |
1510 } | |
1511 | |
1512 addTypeArguments(TypeReference baseType, int depth) { | |
1513 _eat(TokenKind.LT); | |
1514 return _finishTypeArguments(baseType, depth, []); | |
1515 } | |
1516 | |
1517 _finishTypeArguments(TypeReference baseType, int depth, types) { | |
1518 var delta = -1; | |
1519 do { | |
1520 var myType = type(depth+1); | |
1521 types.add(myType); | |
1522 if (myType is GenericTypeReference && myType.depth <= depth) { | |
1523 // TODO(jimhug): Friendly error if peek(COMMA). | |
1524 delta = depth - myType.depth; | |
1525 break; | |
1526 } | |
1527 } while (_maybeEat(TokenKind.COMMA)); | |
1528 if (delta >= 0) { | |
1529 depth -= delta; | |
1530 } else { | |
1531 depth = _eatClosingAngle(depth); | |
1532 } | |
1533 | |
1534 var span = _makeSpan(baseType.span.start); | |
1535 return new GenericTypeReference(baseType, types, depth, span); | |
1536 } | |
1537 | |
1538 typeList() { | |
1539 var types = []; | |
1540 do { | |
1541 types.add(type()); | |
1542 } while (_maybeEat(TokenKind.COMMA)); | |
1543 | |
1544 return types; | |
1545 } | |
1546 | |
1547 nameTypeReference() { | |
1548 int start = _peekToken.start; | |
1549 var name; | |
1550 var names = null; | |
1551 var typeArgs = null; | |
1552 var isFinal = false; | |
1553 | |
1554 switch (_peek()) { | |
1555 case TokenKind.VOID: | |
1556 return new SimpleTypeReference(world.voidType, _next().span); | |
1557 case TokenKind.VAR: | |
1558 return new SimpleTypeReference(world.varType, _next().span); | |
1559 case TokenKind.FINAL: | |
1560 _eat(TokenKind.FINAL); | |
1561 isFinal = true; | |
1562 name = identifier(); | |
1563 break; | |
1564 default: | |
1565 name = identifier(); | |
1566 break; | |
1567 } | |
1568 | |
1569 while (_maybeEat(TokenKind.DOT)) { | |
1570 if (names == null) names = []; | |
1571 names.add(identifier()); | |
1572 } | |
1573 | |
1574 return new NameTypeReference(isFinal, name, names, _makeSpan(start)); | |
1575 } | |
1576 | |
1577 type([int depth = 0]) { | |
1578 var typeRef = nameTypeReference(); | |
1579 | |
1580 if (_peekKind(TokenKind.LT)) { | |
1581 return addTypeArguments(typeRef, depth); | |
1582 } else { | |
1583 return typeRef; | |
1584 } | |
1585 } | |
1586 | |
1587 formalParameter(bool inOptionalBlock) { | |
1588 int start = _peekToken.start; | |
1589 var isThis = false; | |
1590 var isRest = false; | |
1591 var di = declaredIdentifier(false); | |
1592 var type = di.type; | |
1593 var name = di.name; | |
1594 | |
1595 if (name == null) { | |
1596 _error('Formal parameter invalid', _makeSpan(start)); | |
1597 } | |
1598 | |
1599 var value = null; | |
1600 if (_maybeEat(TokenKind.ASSIGN)) { | |
1601 if (!inOptionalBlock) { | |
1602 _error('default values only allowed inside [optional] section'); | |
1603 } | |
1604 value = expression(); | |
1605 } else if (_peekKind(TokenKind.LPAREN)) { | |
1606 var formals = formalParameterList(); | |
1607 var func = new FunctionDefinition(null, type, name, formals, | |
1608 null, null, null, _makeSpan(start)); | |
1609 type = new FunctionTypeReference(false, func, func.span); | |
1610 } | |
1611 if (inOptionalBlock && value == null) { | |
1612 value = _makeLiteral(Value.fromNull(_makeSpan(start))); | |
1613 } | |
1614 | |
1615 return new FormalNode(isThis, isRest, type, name, value, _makeSpan(start)); | |
1616 } | |
1617 | |
1618 formalParameterList() { | |
1619 _eatLeftParen(); | |
1620 var formals = []; | |
1621 var inOptionalBlock = false; | |
1622 if (!_maybeEat(TokenKind.RPAREN)) { | |
1623 if (_maybeEat(TokenKind.LBRACK)) { | |
1624 inOptionalBlock = true; | |
1625 } | |
1626 formals.add(formalParameter(inOptionalBlock)); | |
1627 while (_maybeEat(TokenKind.COMMA)) { | |
1628 if (_maybeEat(TokenKind.LBRACK)) { | |
1629 if (inOptionalBlock) { | |
1630 _error('already inside an optional block', _previousToken.span); | |
1631 } | |
1632 inOptionalBlock = true; | |
1633 } | |
1634 formals.add(formalParameter(inOptionalBlock)); | |
1635 } | |
1636 if (inOptionalBlock) { | |
1637 _eat(TokenKind.RBRACK); | |
1638 } | |
1639 _eat(TokenKind.RPAREN); | |
1640 } | |
1641 return formals; | |
1642 } | |
1643 | |
1644 // Type names are not allowed to use pseudo keywords | |
1645 identifierForType() { | |
1646 var tok = _next(); | |
1647 if (!_isIdentifier(tok.kind)) { | |
1648 _error('expected identifier, but found $tok', tok.span); | |
1649 } | |
1650 if (tok.kind !== TokenKind.IDENTIFIER && tok.kind != TokenKind.NATIVE) { | |
1651 _error('$tok may not be used as a type name', tok.span); | |
1652 } | |
1653 return new Identifier(tok.text, _makeSpan(tok.start)); | |
1654 } | |
1655 | |
1656 identifier() { | |
1657 var tok = _next(); | |
1658 if (!_isIdentifier(tok.kind)) { | |
1659 _error('expected identifier, but found $tok', tok.span); | |
1660 } | |
1661 | |
1662 return new Identifier(tok.text, _makeSpan(tok.start)); | |
1663 } | |
1664 | |
1665 /////////////////////////////////////////////////////////////////// | |
1666 // These last productions handle most ambiguities in grammar | |
1667 // They will convert expressions into other types. | |
1668 /////////////////////////////////////////////////////////////////// | |
1669 | |
1670 /** | |
1671 * Converts an [Expression], [Formals] and a [Statment] body into a | |
1672 * [FunctionDefinition]. | |
1673 */ | |
1674 _makeFunction(expr, formals, body) { | |
1675 var name, type; | |
1676 if (expr is VarExpression) { | |
1677 name = expr.name; | |
1678 type = null; | |
1679 } else if (expr is DeclaredIdentifier) { | |
1680 name = expr.name; | |
1681 type = expr.type; | |
1682 if (name == null) { | |
1683 _error('expected name and type', expr.span); | |
1684 } | |
1685 } else { | |
1686 _error('bad function body', expr.span); | |
1687 } | |
1688 var span = new SourceSpan(expr.span.file, expr.span.start, body.span.end); | |
1689 var func = new FunctionDefinition(null, type, name, formals, null, null, | |
1690 body, span); | |
1691 return new LambdaExpression(func, func.span); | |
1692 } | |
1693 | |
1694 /** Converts an expression to a [DeclaredIdentifier]. */ | |
1695 _makeDeclaredIdentifier(e) { | |
1696 if (e is VarExpression) { | |
1697 return new DeclaredIdentifier(null, e.name, false, e.span); | |
1698 } else if (e is DeclaredIdentifier) { | |
1699 return e; | |
1700 } else { | |
1701 _error('expected declared identifier'); | |
1702 return new DeclaredIdentifier(null, null, false, e.span); | |
1703 } | |
1704 } | |
1705 | |
1706 /** Converts an expression into a label. */ | |
1707 _makeLabel(expr) { | |
1708 if (expr is VarExpression) { | |
1709 return expr.name; | |
1710 } else { | |
1711 _errorExpected('label'); | |
1712 return null; | |
1713 } | |
1714 } | |
1715 } | |
1716 | |
1717 class IncompleteSourceException implements Exception { | |
1718 final Token token; | |
1719 | |
1720 IncompleteSourceException(this.token); | |
1721 | |
1722 String toString() { | |
1723 if (token.span == null) return 'Unexpected $token'; | |
1724 return token.span.toMessageString('Unexpected $token'); | |
1725 } | |
1726 } | |
1727 | |
1728 /** | |
1729 * Stores a token stream that will be used by the parser. Once the parser has | |
1730 * reached the end of this [TokenSource], it switches back to the | |
1731 * [previousTokenizer] | |
1732 */ | |
1733 class DivertedTokenSource implements TokenSource { | |
1734 final List<Token> tokens; | |
1735 final Parser parser; | |
1736 final TokenSource previousTokenizer; | |
1737 DivertedTokenSource(this.tokens, this.parser, this.previousTokenizer); | |
1738 | |
1739 int _pos = 0; | |
1740 next() { | |
1741 var token = tokens[_pos]; | |
1742 ++_pos; | |
1743 if (_pos == tokens.length) { | |
1744 parser.tokenizer = previousTokenizer; | |
1745 } | |
1746 return token; | |
1747 } | |
1748 } | |
OLD | NEW |