3 # Copyright 2008 The Closure Linter Authors. All Rights Reserved.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS-IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
17 """Classes to represent tokens and positions within them."""
19 __author__ = ('robbyw@google.com (Robert Walker)',
20 'ajp@google.com (Andy Perelson)')
23 class TokenType(object):
24 """Token types common to all languages."""
26 WHITESPACE = 'whitespace'
27 BLANK_LINE = 'blank line'
31 """Token class for intelligent text splitting.
33 The token class represents a string of characters and an identifying type.
36 type: The type of token.
37 string: The characters the token comprises.
38 length: The length of the token.
39 line: The text of the line the token is found in.
40 line_number: The number of the line the token is found in.
41 values: Dictionary of values returned from the tokens regex match.
42 previous: The token before this one.
43 next: The token after this one.
44 start_index: The character index in the line where this token starts.
45 attached_object: Object containing more information about this token.
46 metadata: Object containing metadata about this token. Must be added by
47 a separate metadata pass.
50 def __init__(self, string, token_type, line, line_number, values=None):
51 """Creates a new Token object.
54 string: The string of input the token contains.
55 token_type: The type of token.
56 line: The text of the line this token is in.
57 line_number: The line number of the token.
58 values: A dict of named values within the token. For instance, a
59 function declaration may have a value called 'name' which captures the
62 self.type = token_type
64 self.length = len(string)
66 self.line_number = line_number
69 # These parts can only be computed when the file is fully tokenized
72 self.start_index = None
74 # This part is set in statetracker.py
75 # TODO(robbyw): Wrap this in to metadata
76 self.attached_object = None
78 # This part is set in *metadatapass.py
81 def IsFirstInLine(self):
82 """Tests if this token is the first token in its line.
85 Whether the token is the first token in its line.
87 return not self.previous or self.previous.line_number != self.line_number
89 def IsLastInLine(self):
90 """Tests if this token is the last token in its line.
93 Whether the token is the last token in its line.
95 return not self.next or self.next.line_number != self.line_number
97 def IsType(self, token_type):
98 """Tests if this token is of the given type.
101 token_type: The type to test for.
104 True if the type of this token matches the type passed in.
106 return self.type == token_type
108 def IsAnyType(self, *token_types):
109 """Tests if this token is any of the given types.
112 token_types: The types to check. Also accepts a single array.
115 True if the type of this token is any of the types passed in.
117 if not isinstance(token_types[0], basestring):
118 return self.type in token_types[0]
120 return self.type in token_types
123 return '<Token: %s, "%s", %r, %d, %r>' % (self.type, self.string,
124 self.values, self.line_number,
128 """Returns a token iterator."""
134 def __reversed__(self):
135 """Returns a reverse-direction token iterator."""