Upstream version 9.38.198.0
[platform/framework/web/crosswalk.git] / src / third_party / closure_linter / closure_linter / tokenutil_test.py
1 #!/usr/bin/env python
2 #
3 # Copyright 2012 The Closure Linter Authors. All Rights Reserved.
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 #      http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS-IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 """Unit tests for the scopeutil module."""
17
18 # Allow non-Google copyright
19 # pylint: disable=g-bad-file-header
20
21 __author__ = ('nnaze@google.com (Nathan Naze)')
22
23 import unittest as googletest
24
25 from closure_linter import ecmametadatapass
26 from closure_linter import javascripttokens
27 from closure_linter import testutil
28 from closure_linter import tokenutil
29
30
31 class FakeToken(object):
32   pass
33
34
35 class TokenUtilTest(googletest.TestCase):
36
37   def testGetTokenRange(self):
38
39     a = FakeToken()
40     b = FakeToken()
41     c = FakeToken()
42     d = FakeToken()
43     e = FakeToken()
44
45     a.next = b
46     b.next = c
47     c.next = d
48
49     self.assertEquals([a, b, c, d], tokenutil.GetTokenRange(a, d))
50
51     # This is an error as e does not come after a in the token chain.
52     self.assertRaises(Exception, lambda: tokenutil.GetTokenRange(a, e))
53
54   def testTokensToString(self):
55
56     a = FakeToken()
57     b = FakeToken()
58     c = FakeToken()
59     d = FakeToken()
60     e = FakeToken()
61
62     a.string = 'aaa'
63     b.string = 'bbb'
64     c.string = 'ccc'
65     d.string = 'ddd'
66     e.string = 'eee'
67
68     a.line_number = 5
69     b.line_number = 6
70     c.line_number = 6
71     d.line_number = 10
72     e.line_number = 11
73
74     self.assertEquals(
75         'aaa\nbbbccc\n\n\n\nddd\neee',
76         tokenutil.TokensToString([a, b, c, d, e]))
77
78     self.assertEquals(
79         'ddd\neee\naaa\nbbbccc',
80         tokenutil.TokensToString([d, e, a, b, c]),
81         'Neighboring tokens not in line_number order should have a newline '
82         'between them.')
83
84   def testGetPreviousCodeToken(self):
85
86     tokens = testutil.TokenizeSource("""
87 start1. // comment
88     /* another comment */
89     end1
90 """)
91
92     def _GetTokenStartingWith(token_starts_with):
93       for t in tokens:
94         if t.string.startswith(token_starts_with):
95           return t
96
97     self.assertEquals(
98         None,
99         tokenutil.GetPreviousCodeToken(_GetTokenStartingWith('start1')))
100
101     self.assertEquals(
102         'start1.',
103         tokenutil.GetPreviousCodeToken(_GetTokenStartingWith('end1')).string)
104
105   def testGetNextCodeToken(self):
106
107     tokens = testutil.TokenizeSource("""
108 start1. // comment
109     /* another comment */
110     end1
111 """)
112
113     def _GetTokenStartingWith(token_starts_with):
114       for t in tokens:
115         if t.string.startswith(token_starts_with):
116           return t
117
118     self.assertEquals(
119         'end1',
120         tokenutil.GetNextCodeToken(_GetTokenStartingWith('start1')).string)
121
122     self.assertEquals(
123         None,
124         tokenutil.GetNextCodeToken(_GetTokenStartingWith('end1')))
125
126   def testGetIdentifierStart(self):
127
128     tokens = testutil.TokenizeSource("""
129 start1 . // comment
130     prototype. /* another comment */
131     end1
132
133 ['edge'][case].prototype.
134     end2 = function() {}
135 """)
136
137     def _GetTokenStartingWith(token_starts_with):
138       for t in tokens:
139         if t.string.startswith(token_starts_with):
140           return t
141
142     self.assertEquals(
143         'start1',
144         tokenutil.GetIdentifierStart(_GetTokenStartingWith('end1')).string)
145
146     self.assertEquals(
147         'start1',
148         tokenutil.GetIdentifierStart(_GetTokenStartingWith('start1')).string)
149
150     self.assertEquals(
151         None,
152         tokenutil.GetIdentifierStart(_GetTokenStartingWith('end2')))
153
154   def testInsertTokenBefore(self):
155
156     self.AssertInsertTokenAfterBefore(False)
157
158   def testInsertTokenAfter(self):
159
160     self.AssertInsertTokenAfterBefore(True)
161
162   def AssertInsertTokenAfterBefore(self, after):
163
164     new_token = javascripttokens.JavaScriptToken(
165         'a', javascripttokens.JavaScriptTokenType.IDENTIFIER, 1, 1)
166
167     existing_token1 = javascripttokens.JavaScriptToken(
168         'var', javascripttokens.JavaScriptTokenType.KEYWORD, 1, 1)
169     existing_token1.start_index = 0
170     existing_token1.metadata = ecmametadatapass.EcmaMetaData()
171
172     existing_token2 = javascripttokens.JavaScriptToken(
173         ' ', javascripttokens.JavaScriptTokenType.WHITESPACE, 1, 1)
174     existing_token2.start_index = 3
175     existing_token2.metadata = ecmametadatapass.EcmaMetaData()
176     existing_token2.metadata.last_code = existing_token1
177
178     existing_token1.next = existing_token2
179     existing_token2.previous = existing_token1
180
181     if after:
182       tokenutil.InsertTokenAfter(new_token, existing_token1)
183     else:
184       tokenutil.InsertTokenBefore(new_token, existing_token2)
185
186     self.assertEquals(existing_token1, new_token.previous)
187     self.assertEquals(existing_token2, new_token.next)
188
189     self.assertEquals(new_token, existing_token1.next)
190     self.assertEquals(new_token, existing_token2.previous)
191
192     self.assertEquals(existing_token1, new_token.metadata.last_code)
193     self.assertEquals(new_token, existing_token2.metadata.last_code)
194
195     self.assertEquals(0, existing_token1.start_index)
196     self.assertEquals(3, new_token.start_index)
197     self.assertEquals(4, existing_token2.start_index)
198
199   def testGetIdentifierForToken(self):
200
201     tokens = testutil.TokenizeSource("""
202 start1.abc.def.prototype.
203   onContinuedLine
204
205 (start2.abc.def
206   .hij.klm
207   .nop)
208
209 start3.abc.def
210    .hij = function() {};
211
212 // An absurd multi-liner.
213 start4.abc.def.
214    hij.
215    klm = function() {};
216
217 start5 . aaa . bbb . ccc
218   shouldntBePartOfThePreviousSymbol
219
220 start6.abc.def ghi.shouldntBePartOfThePreviousSymbol
221
222 var start7 = 42;
223
224 function start8() {
225
226 }
227
228 start9.abc. // why is there a comment here?
229   def /* another comment */
230   shouldntBePart
231
232 start10.abc // why is there a comment here?
233   .def /* another comment */
234   shouldntBePart
235
236 start11.abc. middle1.shouldNotBeIdentifier
237 """)
238
239     def _GetTokenStartingWith(token_starts_with):
240       for t in tokens:
241         if t.string.startswith(token_starts_with):
242           return t
243
244     self.assertEquals(
245         'start1.abc.def.prototype.onContinuedLine',
246         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start1')))
247
248     self.assertEquals(
249         'start2.abc.def.hij.klm.nop',
250         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start2')))
251
252     self.assertEquals(
253         'start3.abc.def.hij',
254         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start3')))
255
256     self.assertEquals(
257         'start4.abc.def.hij.klm',
258         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start4')))
259
260     self.assertEquals(
261         'start5.aaa.bbb.ccc',
262         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start5')))
263
264     self.assertEquals(
265         'start6.abc.def',
266         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start6')))
267
268     self.assertEquals(
269         'start7',
270         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start7')))
271
272     self.assertEquals(
273         'start8',
274         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start8')))
275
276     self.assertEquals(
277         'start9.abc.def',
278         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start9')))
279
280     self.assertEquals(
281         'start10.abc.def',
282         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start10')))
283
284     self.assertIsNone(
285         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('middle1')))
286
287
288 if __name__ == '__main__':
289   googletest.main()