Update To 11.40.268.0
[platform/framework/web/crosswalk.git] / src / mojo / public / tools / bindings / pylib / mojom_tests / parse / lexer_unittest.py
1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5 import imp
6 import os.path
7 import sys
8 import unittest
9
10 def _GetDirAbove(dirname):
11   """Returns the directory "above" this file containing |dirname| (which must
12   also be "above" this file)."""
13   path = os.path.abspath(__file__)
14   while True:
15     path, tail = os.path.split(path)
16     assert tail
17     if tail == dirname:
18       return path
19
20 try:
21   imp.find_module("ply")
22 except ImportError:
23   sys.path.append(os.path.join(_GetDirAbove("mojo"), "third_party"))
24 from ply import lex
25
26 try:
27   imp.find_module("mojom")
28 except ImportError:
29   sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
30 import mojom.parse.lexer
31
32
33 # This (monkey-patching LexToken to make comparison value-based) is evil, but
34 # we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
35 # for object identity.)
36 def _LexTokenEq(self, other):
37   return self.type == other.type and self.value == other.value and \
38          self.lineno == other.lineno and self.lexpos == other.lexpos
39 setattr(lex.LexToken, '__eq__', _LexTokenEq)
40
41
42 def _MakeLexToken(token_type, value, lineno=1, lexpos=0):
43   """Makes a LexToken with the given parameters. (Note that lineno is 1-based,
44   but lexpos is 0-based.)"""
45   rv = lex.LexToken()
46   rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
47   return rv
48
49
50 def _MakeLexTokenForKeyword(keyword, **kwargs):
51   """Makes a LexToken for the given keyword."""
52   return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs)
53
54
55 class LexerTest(unittest.TestCase):
56   """Tests |mojom.parse.lexer.Lexer|."""
57
58   def __init__(self, *args, **kwargs):
59     unittest.TestCase.__init__(self, *args, **kwargs)
60     # Clone all lexer instances from this one, since making a lexer is slow.
61     self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom"))
62
63   def testValidKeywords(self):
64     """Tests valid keywords."""
65     self.assertEquals(self._SingleTokenForInput("handle"),
66                       _MakeLexTokenForKeyword("handle"))
67     self.assertEquals(self._SingleTokenForInput("import"),
68                       _MakeLexTokenForKeyword("import"))
69     self.assertEquals(self._SingleTokenForInput("module"),
70                       _MakeLexTokenForKeyword("module"))
71     self.assertEquals(self._SingleTokenForInput("struct"),
72                       _MakeLexTokenForKeyword("struct"))
73     self.assertEquals(self._SingleTokenForInput("interface"),
74                       _MakeLexTokenForKeyword("interface"))
75     self.assertEquals(self._SingleTokenForInput("enum"),
76                       _MakeLexTokenForKeyword("enum"))
77     self.assertEquals(self._SingleTokenForInput("const"),
78                       _MakeLexTokenForKeyword("const"))
79     self.assertEquals(self._SingleTokenForInput("true"),
80                       _MakeLexTokenForKeyword("true"))
81     self.assertEquals(self._SingleTokenForInput("false"),
82                       _MakeLexTokenForKeyword("false"))
83     self.assertEquals(self._SingleTokenForInput("default"),
84                       _MakeLexTokenForKeyword("default"))
85     self.assertEquals(self._SingleTokenForInput("array"),
86                       _MakeLexTokenForKeyword("array"))
87     self.assertEquals(self._SingleTokenForInput("map"),
88                       _MakeLexTokenForKeyword("map"))
89
90   def testValidIdentifiers(self):
91     """Tests identifiers."""
92     self.assertEquals(self._SingleTokenForInput("abcd"),
93                       _MakeLexToken("NAME", "abcd"))
94     self.assertEquals(self._SingleTokenForInput("AbC_d012_"),
95                       _MakeLexToken("NAME", "AbC_d012_"))
96     self.assertEquals(self._SingleTokenForInput("_0123"),
97                       _MakeLexToken("NAME", "_0123"))
98
99   def testInvalidIdentifiers(self):
100     with self.assertRaisesRegexp(
101         mojom.parse.lexer.LexError,
102         r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
103       self._TokensForInput("$abc")
104     with self.assertRaisesRegexp(
105         mojom.parse.lexer.LexError,
106         r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
107       self._TokensForInput("a$bc")
108
109   def testDecimalIntegerConstants(self):
110     self.assertEquals(self._SingleTokenForInput("0"),
111                       _MakeLexToken("INT_CONST_DEC", "0"))
112     self.assertEquals(self._SingleTokenForInput("1"),
113                       _MakeLexToken("INT_CONST_DEC", "1"))
114     self.assertEquals(self._SingleTokenForInput("123"),
115                       _MakeLexToken("INT_CONST_DEC", "123"))
116     self.assertEquals(self._SingleTokenForInput("10"),
117                       _MakeLexToken("INT_CONST_DEC", "10"))
118
119   def testValidTokens(self):
120     """Tests valid tokens (which aren't tested elsewhere)."""
121     # Keywords tested in |testValidKeywords|.
122     # NAME tested in |testValidIdentifiers|.
123     self.assertEquals(self._SingleTokenForInput("@123"),
124                       _MakeLexToken("ORDINAL", "@123"))
125     self.assertEquals(self._SingleTokenForInput("456"),
126                       _MakeLexToken("INT_CONST_DEC", "456"))
127     self.assertEquals(self._SingleTokenForInput("0x01aB2eF3"),
128                       _MakeLexToken("INT_CONST_HEX", "0x01aB2eF3"))
129     self.assertEquals(self._SingleTokenForInput("123.456"),
130                       _MakeLexToken("FLOAT_CONST", "123.456"))
131     self.assertEquals(self._SingleTokenForInput("\"hello\""),
132                       _MakeLexToken("STRING_LITERAL", "\"hello\""))
133     self.assertEquals(self._SingleTokenForInput("+"),
134                       _MakeLexToken("PLUS", "+"))
135     self.assertEquals(self._SingleTokenForInput("-"),
136                       _MakeLexToken("MINUS", "-"))
137     self.assertEquals(self._SingleTokenForInput("&"),
138                       _MakeLexToken("AMP", "&"))
139     self.assertEquals(self._SingleTokenForInput("?"),
140                       _MakeLexToken("QSTN", "?"))
141     self.assertEquals(self._SingleTokenForInput("="),
142                       _MakeLexToken("EQUALS", "="))
143     self.assertEquals(self._SingleTokenForInput("=>"),
144                       _MakeLexToken("RESPONSE", "=>"))
145     self.assertEquals(self._SingleTokenForInput("("),
146                       _MakeLexToken("LPAREN", "("))
147     self.assertEquals(self._SingleTokenForInput(")"),
148                       _MakeLexToken("RPAREN", ")"))
149     self.assertEquals(self._SingleTokenForInput("["),
150                       _MakeLexToken("LBRACKET", "["))
151     self.assertEquals(self._SingleTokenForInput("]"),
152                       _MakeLexToken("RBRACKET", "]"))
153     self.assertEquals(self._SingleTokenForInput("{"),
154                       _MakeLexToken("LBRACE", "{"))
155     self.assertEquals(self._SingleTokenForInput("}"),
156                       _MakeLexToken("RBRACE", "}"))
157     self.assertEquals(self._SingleTokenForInput("<"),
158                       _MakeLexToken("LANGLE", "<"))
159     self.assertEquals(self._SingleTokenForInput(">"),
160                       _MakeLexToken("RANGLE", ">"))
161     self.assertEquals(self._SingleTokenForInput(";"),
162                       _MakeLexToken("SEMI", ";"))
163     self.assertEquals(self._SingleTokenForInput(","),
164                       _MakeLexToken("COMMA", ","))
165     self.assertEquals(self._SingleTokenForInput("."),
166                       _MakeLexToken("DOT", "."))
167
168   def _TokensForInput(self, input_string):
169     """Gets a list of tokens for the given input string."""
170     lexer = self._zygote_lexer.clone()
171     lexer.input(input_string)
172     rv = []
173     while True:
174       tok = lexer.token()
175       if not tok:
176         return rv
177       rv.append(tok)
178
179   def _SingleTokenForInput(self, input_string):
180     """Gets the single token for the given input string. (Raises an exception if
181     the input string does not result in exactly one token.)"""
182     toks = self._TokensForInput(input_string)
183     assert len(toks) == 1
184     return toks[0]
185
186
187 if __name__ == "__main__":
188   unittest.main()