Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(9)

Side by Side Diff: utils/css/tokenizer.dart

Issue 9695048: Template parser (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: Siggi's comments Created 8 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « utils/css/token.dart ('k') | utils/css/tokenizer_base.dart » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 class Tokenizer extends lang.TokenizerBase { 5 class Tokenizer extends CSSTokenizerBase {
6 TokenKind cssTokens; 6 TokenKind cssTokens;
7 7
8 bool _selectorParsing; 8 bool _selectorParsing;
9 9
10 Tokenizer(lang.SourceFile source, bool skipWhitespace, [int index = 0]) 10 Tokenizer(SourceFile source, bool skipWhitespace, [int index = 0])
11 : super(source, skipWhitespace, index), _selectorParsing = false { 11 : super(source, skipWhitespace, index), _selectorParsing = false {
12 cssTokens = new TokenKind(); 12 cssTokens = new TokenKind();
13 } 13 }
14 14
15 lang.Token next() { 15 int get startIndex() => _startIndex;
16
17 Token next() {
16 // keep track of our starting position 18 // keep track of our starting position
17 _startIndex = _index; 19 _startIndex = _index;
18 20
19 if (_interpStack != null && _interpStack.depth == 0) { 21 if (_interpStack != null && _interpStack.depth == 0) {
20 var istack = _interpStack; 22 var istack = _interpStack;
21 _interpStack = _interpStack.pop(); 23 _interpStack = _interpStack.pop();
22 24
23 /* TODO(terry): Enable for variable and string interpolation. 25 /* TODO(terry): Enable for variable and string interpolation.
24 * if (istack.isMultiline) { 26 * if (istack.isMultiline) {
25 * return finishMultilineStringBody(istack.quote); 27 * return finishMultilineStringBody(istack.quote);
(...skipping 14 matching lines...) Expand all
40 case cssTokens.tokens[TokenKind.RETURN]: 42 case cssTokens.tokens[TokenKind.RETURN]:
41 return finishWhitespace(); 43 return finishWhitespace();
42 case cssTokens.tokens[TokenKind.END_OF_FILE]: 44 case cssTokens.tokens[TokenKind.END_OF_FILE]:
43 return _finishToken(TokenKind.END_OF_FILE); 45 return _finishToken(TokenKind.END_OF_FILE);
44 case cssTokens.tokens[TokenKind.AT]: 46 case cssTokens.tokens[TokenKind.AT]:
45 return _finishToken(TokenKind.AT); 47 return _finishToken(TokenKind.AT);
46 case cssTokens.tokens[TokenKind.DOT]: 48 case cssTokens.tokens[TokenKind.DOT]:
47 int start = _startIndex; // Start where the dot started. 49 int start = _startIndex; // Start where the dot started.
48 if (maybeEatDigit()) { 50 if (maybeEatDigit()) {
49 // looks like a number dot followed by digit(s). 51 // looks like a number dot followed by digit(s).
50 lang.Token num = finishNumber(); 52 Token number = finishNumber();
51 if (num.kind == TokenKind.INTEGER) { 53 if (number.kind == TokenKind.INTEGER) {
52 // It's a number but it's preceeded by a dot, so make it a double. 54 // It's a number but it's preceeded by a dot, so make it a double.
53 _startIndex = start; 55 _startIndex = start;
54 return _finishToken(TokenKind.DOUBLE); 56 return _finishToken(TokenKind.DOUBLE);
55 } else { 57 } else {
56 // Don't allow dot followed by a double (e.g, '..1'). 58 // Don't allow dot followed by a double (e.g, '..1').
57 return _errorToken(); 59 return _errorToken();
58 } 60 }
59 } else { 61 } else {
60 // It's really a dot. 62 // It's really a dot.
61 return _finishToken(TokenKind.DOT); 63 return _finishToken(TokenKind.DOT);
(...skipping 15 matching lines...) Expand all
77 case cssTokens.tokens[TokenKind.PLUS]: 79 case cssTokens.tokens[TokenKind.PLUS]:
78 if (maybeEatDigit()) { 80 if (maybeEatDigit()) {
79 return finishNumber(); 81 return finishNumber();
80 } else { 82 } else {
81 return _finishToken(TokenKind.PLUS); 83 return _finishToken(TokenKind.PLUS);
82 } 84 }
83 case cssTokens.tokens[TokenKind.MINUS]: 85 case cssTokens.tokens[TokenKind.MINUS]:
84 if (maybeEatDigit()) { 86 if (maybeEatDigit()) {
85 return finishNumber(); 87 return finishNumber();
86 } else if (TokenizerHelpers.isIdentifierStart(ch)) { 88 } else if (TokenizerHelpers.isIdentifierStart(ch)) {
87 return this.finishIdentifier(); 89 return this.finishIdentifier(ch);
88 } else { 90 } else {
89 return _finishToken(TokenKind.MINUS); 91 return _finishToken(TokenKind.MINUS);
90 } 92 }
91 case cssTokens.tokens[TokenKind.GREATER]: 93 case cssTokens.tokens[TokenKind.GREATER]:
92 return _finishToken(TokenKind.GREATER); 94 return _finishToken(TokenKind.GREATER);
93 case cssTokens.tokens[TokenKind.TILDE]: 95 case cssTokens.tokens[TokenKind.TILDE]:
94 if (_maybeEatChar(cssTokens.tokens[TokenKind.EQUALS])) { 96 if (_maybeEatChar(cssTokens.tokens[TokenKind.EQUALS])) {
95 return _finishToken(TokenKind.INCLUDES); // ~= 97 return _finishToken(TokenKind.INCLUDES); // ~=
96 } else { 98 } else {
97 return _finishToken(TokenKind.TILDE); 99 return _finishToken(TokenKind.TILDE);
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
144 } else { 146 } else {
145 return _finishToken(TokenKind.CARET); 147 return _finishToken(TokenKind.CARET);
146 } 148 }
147 case cssTokens.tokens[TokenKind.DOLLAR]: 149 case cssTokens.tokens[TokenKind.DOLLAR]:
148 if (_maybeEatChar(cssTokens.tokens[TokenKind.EQUALS])) { 150 if (_maybeEatChar(cssTokens.tokens[TokenKind.EQUALS])) {
149 return _finishToken(TokenKind.SUFFIX_MATCH); // $= 151 return _finishToken(TokenKind.SUFFIX_MATCH); // $=
150 } else { 152 } else {
151 return _finishToken(TokenKind.DOLLAR); 153 return _finishToken(TokenKind.DOLLAR);
152 } 154 }
153 case cssTokens.tokens[TokenKind.BANG]: 155 case cssTokens.tokens[TokenKind.BANG]:
154 lang.Token tok = finishIdentifier(); 156 Token tok = finishIdentifier(ch);
155 return (tok == null) ? _finishToken(TokenKind.BANG) : tok; 157 return (tok == null) ? _finishToken(TokenKind.BANG) : tok;
156 default: 158 default:
157 if (TokenizerHelpers.isIdentifierStart(ch)) { 159 if (TokenizerHelpers.isIdentifierStart(ch)) {
158 return this.finishIdentifier(); 160 return this.finishIdentifier(ch);
159 } else if (isDigit(ch)) { 161 } else if (isDigit(ch)) {
160 return this.finishNumber(); 162 return this.finishNumber();
161 } else { 163 } else {
162 return _errorToken(); 164 return _errorToken();
163 } 165 }
164 } 166 }
165 } 167 }
166 168
167 // TODO(jmesserly): we need a way to emit human readable error messages from 169 // TODO(jmesserly): we need a way to emit human readable error messages from
168 // the tokenizer. 170 // the tokenizer.
169 lang.Token _errorToken() { 171 Token _errorToken([String message = null]) {
170 return _finishToken(TokenKind.ERROR); 172 return _finishToken(TokenKind.ERROR);
171 } 173 }
172 174
173 int getIdentifierKind() { 175 int getIdentifierKind() {
174 // Is the identifier a unit type? 176 // Is the identifier a unit type?
175 int tokId = TokenKind.matchUnits(_text, _startIndex, _index - _startIndex); 177 int tokId = TokenKind.matchUnits(_text, _startIndex, _index - _startIndex);
176 if (tokId == -1) { 178 if (tokId == -1) {
177 // No, is it a directive? 179 // No, is it a directive?
178 tokId = TokenKind.matchDirectives(_text, _startIndex, _index - _startIndex ); 180 tokId = TokenKind.matchDirectives(_text, _startIndex, _index - _startIndex );
179 } 181 }
180 if (tokId == -1) { 182 if (tokId == -1) {
181 tokId = (_text.substring(_startIndex, _index) == '!important') ? 183 tokId = (_text.substring(_startIndex, _index) == '!important') ?
182 TokenKind.IMPORTANT : -1; 184 TokenKind.IMPORTANT : -1;
183 } 185 }
184 186
185 return tokId >= 0 ? tokId : TokenKind.IDENTIFIER; 187 return tokId >= 0 ? tokId : TokenKind.IDENTIFIER;
186 } 188 }
187 189
188 // Need to override so CSS version of isIdentifierPart is used. 190 // Need to override so CSS version of isIdentifierPart is used.
189 lang.Token finishIdentifier() { 191 Token finishIdentifier(int ch) {
190 while (_index < _text.length) { 192 while (_index < _text.length) {
191 // if (!TokenizerHelpers.isIdentifierPart(_text.charCodeAt(_index++))) { 193 // if (!TokenizerHelpers.isIdentifierPart(_text.charCodeAt(_index++))) {
192 if (!TokenizerHelpers.isIdentifierPart(_text.charCodeAt(_index))) { 194 if (!TokenizerHelpers.isIdentifierPart(_text.charCodeAt(_index))) {
193 // _index--; 195 // _index--;
194 break; 196 break;
195 } else { 197 } else {
196 _index += 1; 198 _index += 1;
197 } 199 }
198 } 200 }
199 if (_interpStack != null && _interpStack.depth == -1) { 201 if (_interpStack != null && _interpStack.depth == -1) {
200 _interpStack.depth = 0; 202 _interpStack.depth = 0;
201 } 203 }
202 int kind = getIdentifierKind(); 204 int kind = getIdentifierKind();
203 if (kind == TokenKind.IDENTIFIER) { 205 if (kind == TokenKind.IDENTIFIER) {
204 return _finishToken(TokenKind.IDENTIFIER); 206 return _finishToken(TokenKind.IDENTIFIER);
205 } else { 207 } else {
206 return _finishToken(kind); 208 return _finishToken(kind);
207 } 209 }
208 } 210 }
209 211
210 lang.Token finishImportant() { 212 Token finishImportant() {
211 213
212 } 214 }
213 215
214 lang.Token finishNumber() { 216 Token finishNumber() {
215 eatDigits(); 217 eatDigits();
216 218
217 if (_peekChar() == 46/*.*/) { 219 if (_peekChar() == 46/*.*/) {
218 // Handle the case of 1.toString(). 220 // Handle the case of 1.toString().
219 _nextChar(); 221 _nextChar();
220 if (isDigit(_peekChar())) { 222 if (isDigit(_peekChar())) {
221 eatDigits(); 223 eatDigits();
222 return _finishToken(TokenKind.DOUBLE); 224 return _finishToken(TokenKind.DOUBLE);
223 } else { 225 } else {
224 _index -= 1; 226 _index -= 1;
(...skipping 22 matching lines...) Expand all
247 } 249 }
248 250
249 bool maybeEatHexDigit() { 251 bool maybeEatHexDigit() {
250 if (_index < _text.length && isHexDigit(_text.charCodeAt(_index))) { 252 if (_index < _text.length && isHexDigit(_text.charCodeAt(_index))) {
251 _index += 1; 253 _index += 1;
252 return true; 254 return true;
253 } 255 }
254 return false; 256 return false;
255 } 257 }
256 258
257 lang.Token finishMultiLineComment() { 259 Token finishMultiLineComment() {
258 while (true) { 260 while (true) {
259 int ch = _nextChar(); 261 int ch = _nextChar();
260 if (ch == 0) { 262 if (ch == 0) {
261 return _finishToken(TokenKind.INCOMPLETE_COMMENT); 263 return _finishToken(TokenKind.INCOMPLETE_COMMENT);
262 } else if (ch == 42/*'*'*/) { 264 } else if (ch == 42/*'*'*/) {
263 if (_maybeEatChar(47/*'/'*/)) { 265 if (_maybeEatChar(47/*'/'*/)) {
264 if (_skipWhitespace) { 266 if (_skipWhitespace) {
265 return next(); 267 return next();
266 } else { 268 } else {
267 return _finishToken(TokenKind.COMMENT); 269 return _finishToken(TokenKind.COMMENT);
(...skipping 11 matching lines...) Expand all
279 } 281 }
280 } 282 }
281 } 283 }
282 } 284 }
283 return _errorToken(); 285 return _errorToken();
284 } 286 }
285 287
286 } 288 }
287 289
288 /** Static helper methods. */ 290 /** Static helper methods. */
291 /** Static helper methods. */
289 class TokenizerHelpers { 292 class TokenizerHelpers {
290 static bool isIdentifierStart(int c) => 293
291 lang.TokenizerHelpers.isIdentifierStart(c) || c == 95 /*_*/ || 294 static bool isIdentifierStart(int c) {
292 c == 45; /*-*/ 295 return ((c >= 97/*a*/ && c <= 122/*z*/) || (c >= 65/*A*/ && c <= 90/*Z*/) ||
296 c == 95/*_*/ || c == 45 /*-*/);
297 }
293 298
294 static bool isDigit(int c) => lang.TokenizerHelpers.isDigit(c); 299 static bool isDigit(int c) {
300 return (c >= 48/*0*/ && c <= 57/*9*/);
301 }
295 302
296 static bool isHexDigit(int c) => lang.TokenizerHelpers.isHexDigit(c); 303 static bool isHexDigit(int c) {
304 return (isDigit(c) || (c >= 97/*a*/ && c <= 102/*f*/) || (c >= 65/*A*/ && c <= 70/*F*/));
305 }
297 306
298 static bool isWhitespace(int c) => lang.TokenizerHelpers.isWhitespace(c); 307 static bool isWhitespace(int c) {
308 return (c == 32/*' '*/ || c == 9/*'\t'*/ || c == 10/*'\n'*/ || c == 13/*'\r' */);
309 }
299 310
300 static bool isIdentifierPart(int c) => 311 static bool isIdentifierPart(int c) {
301 lang.TokenizerHelpers.isIdentifierPart(c) || c == 45 /*-*/; 312 return (isIdentifierStart(c) || isDigit(c) || c == 45 /*-*/);
313 }
314
315 static bool isInterpIdentifierPart(int c) {
316 return (isIdentifierStart(c) || isDigit(c));
317 }
302 } 318 }
319
OLDNEW
« no previous file with comments | « utils/css/token.dart ('k') | utils/css/tokenizer_base.dart » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698