OLD | NEW |
1 # Copyright (c) 2013 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2013 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 """ Lexer for PPAPI IDL | 5 """ Lexer for PPAPI IDL |
6 | 6 |
7 The lexer uses the PLY library to build a tokenizer which understands both | 7 The lexer uses the PLY library to build a tokenizer which understands both |
8 WebIDL and Pepper tokens. | 8 WebIDL and Pepper tokens. |
9 | 9 |
10 WebIDL, and WebIDL regular expressions can be found at: | 10 WebIDL, and WebIDL regular expressions can be found at: |
11 http://www.w3.org/TR/2012/CR-WebIDL-20120419/ | 11 http://www.w3.org/TR/2012/CR-WebIDL-20120419/ |
12 PLY can be found at: | 12 PLY can be found at: |
13 http://www.dabeaz.com/ply/ | 13 http://www.dabeaz.com/ply/ |
14 """ | 14 """ |
15 | 15 |
16 from idl_lexer import IDLLexer | 16 from idl_lexer import IDLLexer |
17 import optparse | 17 import optparse |
18 import os.path | 18 import os.path |
19 import sys | 19 import sys |
20 | 20 |
21 | 21 |
22 # | 22 # |
23 # IDL PPAPI Lexer | 23 # IDL PPAPI Lexer |
24 # | 24 # |
25 class IDLPPAPILexer(IDLLexer): | 25 class IDLPPAPILexer(IDLLexer): |
26 # 'tokens' is a value required by lex which specifies the complete list | 26 # Special multi-character operators |
27 # of valid token types. To WebIDL we add the following token types | 27 def t_LSHIFT(self, t): |
28 IDLLexer.tokens += [ | 28 r'<<' |
29 # Operators | 29 return t; |
30 'LSHIFT', | |
31 'RSHIFT', | |
32 | 30 |
33 # Pepper Extras | 31 def t_RSHIFT(self, t): |
34 'INLINE', | 32 r'>>' |
35 ] | 33 return t; |
36 | 34 |
37 # 'keywords' is a map of string to token type. All tokens matching | |
38 # KEYWORD_OR_SYMBOL are matched against keywords dictionary, to determine | |
39 # if the token is actually a keyword. Add the new keywords to the | |
40 # dictionary and set of tokens | |
41 ppapi_keywords = ['LABEL', 'NAMESPACE', 'STRUCT'] | |
42 for keyword in ppapi_keywords: | |
43 IDLLexer.keywords[ keyword.lower() ] = keyword | |
44 IDLLexer.tokens.append(keyword) | |
45 | |
46 # Special multi-character operators | |
47 t_LSHIFT = r'<<' | |
48 t_RSHIFT = r'>>' | |
49 | |
50 # Return a "preprocessor" inline block | |
51 def t_INLINE(self, t): | 35 def t_INLINE(self, t): |
52 r'\#inline (.|\n)*?\#endinl.*' | 36 r'\#inline (.|\n)*?\#endinl.*' |
53 self.AddLines(t.value.count('\n')) | 37 self.AddLines(t.value.count('\n')) |
54 return t | 38 return t |
| 39 |
| 40 # Return a "preprocessor" inline block |
| 41 def __init__(self): |
| 42 IDLLexer.__init__(self) |
| 43 self._AddTokens(['LSHIFT', 'RSHIFT', 'INLINE']) |
| 44 self._AddKeywords(['label', 'namespace', 'struct']) |
| 45 |
| 46 |
| 47 # If run by itself, attempt to build the lexer |
| 48 if __name__ == '__main__': |
| 49 lexer = IDLPPAPILexer() |
OLD | NEW |