3 # Copyright 2007 The Closure Linter Authors. All Rights Reserved.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS-IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
17 """Light weight EcmaScript state tracker that reads tokens and tracks state."""
19 __author__ = ('robbyw@google.com (Robert Walker)',
20 'ajp@google.com (Andy Perelson)')
24 from closure_linter import javascripttokenizer
25 from closure_linter import javascripttokens
26 from closure_linter import tokenutil
29 Type = javascripttokens.JavaScriptTokenType
32 class DocFlag(object):
33 """Generic doc flag object.
36 flag_type: param, return, define, type, etc.
37 flag_token: The flag token.
38 type_start_token: The first token specifying the flag type,
40 type_end_token: The last token specifying the flag type,
43 name_token: The token specifying the flag name.
45 description_start_token: The first token in the description.
46 description_end_token: The end token in the description.
47 description: The description.
50 # Please keep these lists alphabetized.
52 # The list of standard jsdoc tags is from
53 STANDARD_DOC = frozenset([
57 'consistentIdGenerator',
75 'ngInject', # This annotation is specific to AngularJS.
99 ANNOTATION = frozenset(['preserveTry', 'suppress'])
101 LEGAL_DOC = STANDARD_DOC | ANNOTATION
103 # Includes all Closure Compiler @suppress types.
104 # Not all of these annotations are interpreted by Closure Linter.
107 # - accessControls is supported by the compiler at the expression
108 # and method level to suppress warnings about private/protected
109 # access (method level applies to all references in the method).
110 # The linter mimics the compiler behavior.
111 SUPPRESS_TYPES = frozenset([
113 'ambiguousFunctionDecl',
115 'checkStructDictInheritance',
128 'internetExplorerChecks',
135 'strictModuleDepCheck',
143 'unusedPrivateMembers',
148 HAS_DESCRIPTION = frozenset([
149 'define', 'deprecated', 'desc', 'fileoverview', 'license', 'param',
150 'preserve', 'return', 'supported'])
152 HAS_TYPE = frozenset([
153 'define', 'enum', 'extends', 'implements', 'param', 'return', 'type',
154 'suppress', 'const', 'package', 'private', 'protected', 'public'])
156 CAN_OMIT_TYPE = frozenset(['enum', 'const', 'package', 'private',
157 'protected', 'public'])
159 TYPE_ONLY = frozenset(['enum', 'extends', 'implements', 'suppress', 'type',
160 'const', 'package', 'private', 'protected', 'public'])
162 HAS_NAME = frozenset(['param'])
164 EMPTY_COMMENT_LINE = re.compile(r'^\s*\*?\s*$')
165 EMPTY_STRING = re.compile(r'^\s*$')
167 def __init__(self, flag_token):
168 """Creates the DocFlag object and attaches it to the given start token.
171 flag_token: The starting token of the flag.
173 self.flag_token = flag_token
174 self.flag_type = flag_token.string.strip().lstrip('@')
176 # Extract type, if applicable.
178 self.type_start_token = None
179 self.type_end_token = None
180 if self.flag_type in self.HAS_TYPE:
181 brace = tokenutil.SearchUntil(flag_token, [Type.DOC_START_BRACE],
182 Type.FLAG_ENDING_TYPES)
184 end_token, contents = _GetMatchingEndBraceAndContents(brace)
186 self.type_start_token = brace
187 self.type_end_token = end_token
188 elif (self.flag_type in self.TYPE_ONLY and
189 flag_token.next.type not in Type.FLAG_ENDING_TYPES and
190 flag_token.line_number == flag_token.next.line_number):
191 # b/10407058. If the flag is expected to be followed by a type then
192 # search for type in same line only. If no token after flag in same
193 # line then conclude that no type is specified.
194 self.type_start_token = flag_token.next
195 self.type_end_token, self.type = _GetEndTokenAndContents(
196 self.type_start_token)
197 if self.type is not None:
198 self.type = self.type.strip()
200 # Extract name, if applicable.
201 self.name_token = None
203 if self.flag_type in self.HAS_NAME:
204 # Handle bad case, name could be immediately after flag token.
205 self.name_token = _GetNextPartialIdentifierToken(flag_token)
207 # Handle good case, if found token is after type start, look for
208 # a identifier (substring to cover cases like [cnt] b/4197272) after
209 # type end, since types contain identifiers.
210 if (self.type and self.name_token and
211 tokenutil.Compare(self.name_token, self.type_start_token) > 0):
212 self.name_token = _GetNextPartialIdentifierToken(self.type_end_token)
215 self.name = self.name_token.string
217 # Extract description, if applicable.
218 self.description_start_token = None
219 self.description_end_token = None
220 self.description = None
221 if self.flag_type in self.HAS_DESCRIPTION:
222 search_start_token = flag_token
223 if self.name_token and self.type_end_token:
224 if tokenutil.Compare(self.type_end_token, self.name_token) > 0:
225 search_start_token = self.type_end_token
227 search_start_token = self.name_token
228 elif self.name_token:
229 search_start_token = self.name_token
231 search_start_token = self.type_end_token
233 interesting_token = tokenutil.Search(search_start_token,
234 Type.FLAG_DESCRIPTION_TYPES | Type.FLAG_ENDING_TYPES)
235 if interesting_token.type in Type.FLAG_DESCRIPTION_TYPES:
236 self.description_start_token = interesting_token
237 self.description_end_token, self.description = (
238 _GetEndTokenAndContents(interesting_token))
241 class DocComment(object):
242 """JavaScript doc comment object.
245 ordered_params: Ordered list of parameters documented.
246 start_token: The token that starts the doc comment.
247 end_token: The token that ends the doc comment.
248 suppressions: Map of suppression type to the token that added it.
250 def __init__(self, start_token):
251 """Create the doc comment object.
254 start_token: The first token in the doc comment.
257 self.start_token = start_token
258 self.end_token = None
259 self.suppressions = {}
260 self.invalidated = False
263 def ordered_params(self):
264 """Gives the list of parameter names as a list of strings."""
266 for flag in self.__flags:
267 if flag.flag_type == 'param' and flag.name:
268 params.append(flag.name)
271 def Invalidate(self):
272 """Indicate that the JSDoc is well-formed but we had problems parsing it.
274 This is a short-circuiting mechanism so that we don't emit false
275 positives about well-formed doc comments just because we don't support
278 self.invalidated = True
280 def IsInvalidated(self):
281 """Test whether Invalidate() has been called."""
282 return self.invalidated
284 def AddSuppression(self, token):
285 """Add a new error suppression flag.
288 token: The suppression flag token.
290 #TODO(user): Error if no braces
291 brace = tokenutil.SearchUntil(token, [Type.DOC_START_BRACE],
294 end_token, contents = _GetMatchingEndBraceAndContents(brace)
295 for suppression in contents.split('|'):
296 self.suppressions[suppression] = token
298 def SuppressionOnly(self):
299 """Returns whether this comment contains only suppression flags."""
303 for flag in self.__flags:
304 if flag.flag_type != 'suppress':
309 def AddFlag(self, flag):
310 """Add a new document flag.
313 flag: DocFlag object.
315 self.__flags.append(flag)
317 def InheritsDocumentation(self):
318 """Test if the jsdoc implies documentation inheritance.
321 True if documentation may be pulled off the superclass.
323 return self.HasFlag('inheritDoc') or self.HasFlag('override')
325 def HasFlag(self, flag_type):
326 """Test if the given flag has been set.
329 flag_type: The type of the flag to check.
332 True if the flag is set.
334 for flag in self.__flags:
335 if flag.flag_type == flag_type:
339 def GetFlag(self, flag_type):
340 """Gets the last flag of the given type.
343 flag_type: The type of the flag to get.
346 The last instance of the given flag type in this doc comment.
348 for flag in reversed(self.__flags):
349 if flag.flag_type == flag_type:
352 def GetDocFlags(self):
353 """Return the doc flags for this comment."""
354 return list(self.__flags)
356 def _YieldDescriptionTokens(self):
357 for token in self.start_token:
359 if (token is self.end_token or
360 token.type is javascripttokens.JavaScriptTokenType.DOC_FLAG or
361 token.type not in javascripttokens.JavaScriptTokenType.COMMENT_TYPES):
364 if token.type not in [
365 javascripttokens.JavaScriptTokenType.START_DOC_COMMENT,
366 javascripttokens.JavaScriptTokenType.END_DOC_COMMENT,
367 javascripttokens.JavaScriptTokenType.DOC_PREFIX]:
371 def description(self):
372 return tokenutil.TokensToString(
373 self._YieldDescriptionTokens())
375 def GetTargetIdentifier(self):
376 """Returns the identifier (as a string) that this is a comment for.
378 Note that this uses method uses GetIdentifierForToken to get the full
379 identifier, even if broken up by whitespace, newlines, or comments,
380 and thus could be longer than GetTargetToken().string.
383 The identifier for the token this comment is for.
385 token = self.GetTargetToken()
387 return tokenutil.GetIdentifierForToken(token)
389 def GetTargetToken(self):
390 """Get this comment's target token.
393 The token that is the target of this comment, or None if there isn't one.
396 # File overviews describe the file, not a token.
397 if self.HasFlag('fileoverview'):
400 skip_types = frozenset([
405 target_types = frozenset([
410 token = self.end_token.next
412 if token.type in target_types:
415 # Handles the case of a comment on "var foo = ...'
416 if token.IsKeyword('var'):
417 next_code_token = tokenutil.CustomSearch(
419 lambda t: t.type not in Type.NON_CODE_TYPES)
421 if (next_code_token and
422 next_code_token.IsType(Type.SIMPLE_LVALUE)):
423 return next_code_token
427 # Handles the case of a comment on "function foo () {}"
428 if token.type is Type.FUNCTION_DECLARATION:
429 next_code_token = tokenutil.CustomSearch(
431 lambda t: t.type not in Type.NON_CODE_TYPES)
433 if next_code_token.IsType(Type.FUNCTION_NAME):
434 return next_code_token
438 # Skip types will end the search.
439 if token.type not in skip_types:
444 def CompareParameters(self, params):
445 """Computes the edit distance and list from the function params to the docs.
447 Uses the Levenshtein edit distance algorithm, with code modified from
448 http://en.wikibooks.org/wiki/Algorithm_implementation/Strings/Levenshtein_distance#Python
451 params: The parameter list for the function declaration.
454 The edit distance, the edit list.
456 source_len, target_len = len(self.ordered_params), len(params)
459 for i in range(target_len+1):
460 edit_lists[0].append(['I'] * i)
461 distance[0].append(i)
463 for j in range(1, source_len+1):
464 edit_lists.append([['D'] * j])
467 for i in range(source_len):
468 for j in range(target_len):
470 if self.ordered_params[i] == params[j]:
473 deletion = distance[i][j+1] + 1
474 insertion = distance[i+1][j] + 1
475 substitution = distance[i][j] + cost
479 if deletion <= insertion and deletion <= substitution:
482 edit_list = list(edit_lists[i][j+1])
483 edit_list.append('D')
485 elif insertion <= substitution:
488 edit_list = list(edit_lists[i+1][j])
489 edit_list.append('I')
490 edit_lists[i+1].append(edit_list)
493 # Substitution is best.
495 edit_list = list(edit_lists[i][j])
497 edit_list.append('S')
499 edit_list.append('=')
501 edit_lists[i+1].append(edit_list)
502 distance[i+1].append(best)
504 return distance[source_len][target_len], edit_lists[source_len][target_len]
507 """Returns a string representation of this object.
510 A string representation of this object.
512 return '<DocComment: %s, %s>' % (
513 str(self.ordered_params), str(self.__flags))
517 # Helper methods used by DocFlag and DocComment to parse out flag information.
521 def _GetMatchingEndBraceAndContents(start_brace):
522 """Returns the matching end brace and contents between the two braces.
524 If any FLAG_ENDING_TYPE token is encountered before a matching end brace, then
525 that token is used as the matching ending token. Contents will have all
526 comment prefixes stripped out of them, and all comment prefixes in between the
527 start and end tokens will be split out into separate DOC_PREFIX tokens.
530 start_brace: The DOC_START_BRACE token immediately before desired contents.
533 The matching ending token (DOC_END_BRACE or FLAG_ENDING_TYPE) and a string
534 of the contents between the matching tokens, minus any comment prefixes.
540 # We don't consider the start brace part of the type string.
541 token = start_brace.next
542 while open_count != close_count:
543 if token.type == Type.DOC_START_BRACE:
545 elif token.type == Type.DOC_END_BRACE:
548 if token.type != Type.DOC_PREFIX:
549 contents.append(token.string)
551 if token.type in Type.FLAG_ENDING_TYPES:
555 #Don't include the end token (end brace, end doc comment, etc.) in type.
556 token = token.previous
557 contents = contents[:-1]
559 return token, ''.join(contents)
562 def _GetNextPartialIdentifierToken(start_token):
563 """Returns the first token having identifier as substring after a token.
565 Searches each token after the start to see if it contains an identifier.
566 If found, token is returned. If no identifier is found returns None.
567 Search is abandoned when a FLAG_ENDING_TYPE token is found.
570 start_token: The token to start searching after.
573 The token found containing identifier, None otherwise.
575 token = start_token.next
577 while token and token.type not in Type.FLAG_ENDING_TYPES:
578 match = javascripttokenizer.JavaScriptTokenizer.IDENTIFIER.search(
580 if match is not None and token.type == Type.COMMENT:
588 def _GetEndTokenAndContents(start_token):
589 """Returns last content token and all contents before FLAG_ENDING_TYPE token.
591 Comment prefixes are split into DOC_PREFIX tokens and stripped from the
595 start_token: The token immediately before the first content token.
598 The last content token and a string of all contents including start and
599 end tokens, with comment prefixes stripped.
601 iterator = start_token
602 last_line = iterator.line_number
606 while not iterator.type in Type.FLAG_ENDING_TYPES or doc_depth > 0:
607 if (iterator.IsFirstInLine() and
608 DocFlag.EMPTY_COMMENT_LINE.match(iterator.line)):
609 # If we have a blank comment line, consider that an implicit
610 # ending of the description. This handles a case like:
612 # * @return {boolean} True
614 # * Note: This is a sentence.
616 # The note is not part of the @return description, but there was
617 # no definitive ending token. Rather there was a line containing
618 # only a doc comment prefix or whitespace.
622 # don't prematurely match against a @flag if inside a doc flag
623 # need to think about what is the correct behavior for unterminated
625 if (iterator.type == Type.DOC_START_BRACE and
626 iterator.next.type == Type.DOC_INLINE_FLAG):
628 elif (iterator.type == Type.DOC_END_BRACE and
632 if iterator.type in Type.FLAG_DESCRIPTION_TYPES:
633 contents += iterator.string
634 last_token = iterator
636 iterator = iterator.next
637 if iterator.line_number != last_line:
639 last_line = iterator.line_number
641 end_token = last_token
642 if DocFlag.EMPTY_STRING.match(contents):
645 # Strip trailing newline.
646 contents = contents[:-1]
648 return end_token, contents
651 class Function(object):
652 """Data about a JavaScript function.
655 block_depth: Block depth the function began at.
656 doc: The DocComment associated with the function.
657 has_return: If the function has a return value.
658 has_this: If the function references the 'this' object.
659 is_assigned: If the function is part of an assignment.
660 is_constructor: If the function is a constructor.
661 name: The name of the function, whether given in the function keyword or
662 as the lvalue the function is assigned to.
663 start_token: First token of the function (the function' keyword token).
664 end_token: Last token of the function (the closing '}' token).
665 parameters: List of parameter names.
668 def __init__(self, block_depth, is_assigned, doc, name):
669 self.block_depth = block_depth
670 self.is_assigned = is_assigned
671 self.is_constructor = doc and doc.HasFlag('constructor')
672 self.is_interface = doc and doc.HasFlag('interface')
673 self.has_return = False
674 self.has_throw = False
675 self.has_this = False
678 self.start_token = None
679 self.end_token = None
680 self.parameters = None
683 class StateTracker(object):
684 """EcmaScript state tracker.
686 Tracks block depth, function names, etc. within an EcmaScript token stream.
692 def __init__(self, doc_flag=DocFlag):
693 """Initializes a JavaScript token stream state tracker.
696 doc_flag: An optional custom DocFlag used for validating
699 self._doc_flag = doc_flag
703 """Resets the state tracker to prepare for processing a new page."""
704 self._block_depth = 0
705 self._is_block_close = False
706 self._paren_depth = 0
707 self._function_stack = []
708 self._functions_by_name = {}
709 self._last_comment = None
710 self._doc_comment = None
711 self._cumulative_params = None
712 self._block_types = []
713 self._last_non_space_token = None
714 self._last_line = None
715 self._first_token = None
716 self._documented_identifiers = set()
717 self._variables_in_scope = []
719 def InFunction(self):
720 """Returns true if the current token is within a function.
723 True if the current token is within a function.
725 return bool(self._function_stack)
727 def InConstructor(self):
728 """Returns true if the current token is within a constructor.
731 True if the current token is within a constructor.
733 return self.InFunction() and self._function_stack[-1].is_constructor
735 def InInterfaceMethod(self):
736 """Returns true if the current token is within an interface method.
739 True if the current token is within an interface method.
741 if self.InFunction():
742 if self._function_stack[-1].is_interface:
745 name = self._function_stack[-1].name
746 prototype_index = name.find('.prototype.')
747 if prototype_index != -1:
748 class_function_name = name[0:prototype_index]
749 if (class_function_name in self._functions_by_name and
750 self._functions_by_name[class_function_name].is_interface):
755 def InTopLevelFunction(self):
756 """Returns true if the current token is within a top level function.
759 True if the current token is within a top level function.
761 return len(self._function_stack) == 1 and self.InTopLevel()
763 def InAssignedFunction(self):
764 """Returns true if the current token is within a function variable.
767 True if if the current token is within a function variable
769 return self.InFunction() and self._function_stack[-1].is_assigned
771 def IsFunctionOpen(self):
772 """Returns true if the current token is a function block open.
775 True if the current token is a function block open.
777 return (self._function_stack and
778 self._function_stack[-1].block_depth == self._block_depth - 1)
780 def IsFunctionClose(self):
781 """Returns true if the current token is a function block close.
784 True if the current token is a function block close.
786 return (self._function_stack and
787 self._function_stack[-1].block_depth == self._block_depth)
790 """Returns true if the current token is within a block.
793 True if the current token is within a block.
795 return bool(self._block_depth)
797 def IsBlockClose(self):
798 """Returns true if the current token is a block close.
801 True if the current token is a block close.
803 return self._is_block_close
805 def InObjectLiteral(self):
806 """Returns true if the current token is within an object literal.
809 True if the current token is within an object literal.
811 return self._block_depth and self._block_types[-1] == self.OBJECT_LITERAL
813 def InObjectLiteralDescendant(self):
814 """Returns true if the current token has an object literal ancestor.
817 True if the current token has an object literal ancestor.
819 return self.OBJECT_LITERAL in self._block_types
821 def InParentheses(self):
822 """Returns true if the current token is within parentheses.
825 True if the current token is within parentheses.
827 return bool(self._paren_depth)
829 def ParenthesesDepth(self):
830 """Returns the number of parens surrounding the token.
833 The number of parenthesis surrounding the token.
835 return self._paren_depth
837 def BlockDepth(self):
838 """Returns the number of blocks in which the token is nested.
841 The number of blocks in which the token is nested.
843 return self._block_depth
845 def FunctionDepth(self):
846 """Returns the number of functions in which the token is nested.
849 The number of functions in which the token is nested.
851 return len(self._function_stack)
853 def InTopLevel(self):
854 """Whether we are at the top level in the class.
856 This function call is language specific. In some languages like
857 JavaScript, a function is top level if it is not inside any parenthesis.
858 In languages such as ActionScript, a function is top level if it is directly
861 raise TypeError('Abstract method InTopLevel not implemented')
863 def GetBlockType(self, token):
864 """Determine the block type given a START_BLOCK token.
866 Code blocks come after parameters, keywords like else, and closing parens.
869 token: The current token. Can be assumed to be type START_BLOCK.
871 Code block type for current token.
873 raise TypeError('Abstract method GetBlockType not implemented')
876 """Returns the accumulated input params as an array.
878 In some EcmasSript languages, input params are specified like
879 (param:Type, param2:Type2, ...)
880 in other they are specified just as
882 We handle both formats for specifying parameters here and leave
883 it to the compilers for each language to detect compile errors.
884 This allows more code to be reused between lint checkers for various
885 EcmaScript languages.
888 The accumulated input params as an array.
891 if self._cumulative_params:
892 params = re.compile(r'\s+').sub('', self._cumulative_params).split(',')
893 # Strip out the type from parameters of the form name:Type.
894 params = map(lambda param: param.split(':')[0], params)
898 def GetLastComment(self):
899 """Return the last plain comment that could be used as documentation.
902 The last plain comment that could be used as documentation.
904 return self._last_comment
906 def GetDocComment(self):
907 """Return the most recent applicable documentation comment.
910 The last applicable documentation comment.
912 return self._doc_comment
914 def HasDocComment(self, identifier):
915 """Returns whether the identifier has been documented yet.
918 identifier: The identifier.
921 Whether the identifier has been documented yet.
923 return identifier in self._documented_identifiers
925 def InDocComment(self):
926 """Returns whether the current token is in a doc comment.
929 Whether the current token is in a doc comment.
931 return self._doc_comment and self._doc_comment.end_token is None
933 def GetDocFlag(self):
934 """Returns the current documentation flags.
937 The current documentation flags.
939 return self._doc_flag
941 def IsTypeToken(self, t):
942 if self.InDocComment() and t.type not in (Type.START_DOC_COMMENT,
943 Type.DOC_FLAG, Type.DOC_INLINE_FLAG, Type.DOC_PREFIX):
944 f = tokenutil.SearchUntil(t, [Type.DOC_FLAG], [Type.START_DOC_COMMENT],
946 if (f and f.attached_object.type_start_token is not None and
947 f.attached_object.type_end_token is not None):
948 return (tokenutil.Compare(t, f.attached_object.type_start_token) > 0 and
949 tokenutil.Compare(t, f.attached_object.type_end_token) < 0)
952 def GetFunction(self):
953 """Return the function the current code block is a part of.
956 The current Function object.
958 if self._function_stack:
959 return self._function_stack[-1]
961 def GetBlockDepth(self):
962 """Return the block depth.
965 The current block depth.
967 return self._block_depth
969 def GetLastNonSpaceToken(self):
970 """Return the last non whitespace token."""
971 return self._last_non_space_token
973 def GetLastLine(self):
974 """Return the last line."""
975 return self._last_line
977 def GetFirstToken(self):
978 """Return the very first token in the file."""
979 return self._first_token
981 def IsVariableInScope(self, token_string):
982 """Checks if string is variable in current scope.
984 For given string it checks whether the string is a defined variable
985 (including function param) in current state.
987 E.g. if variables defined (variables in current scope) is docs
988 then docs, docs.length etc will be considered as variable in current
989 scope. This will help in avoding extra goog.require for variables.
992 token_string: String to check if its is a variable in current scope.
995 true if given string is a variable in current scope.
997 for variable in self._variables_in_scope:
998 if (token_string == variable
999 or token_string.startswith(variable + '.')):
1004 def HandleToken(self, token, last_non_space_token):
1005 """Handles the given token and updates state.
1008 token: The token to handle.
1009 last_non_space_token:
1011 self._is_block_close = False
1013 if not self._first_token:
1014 self._first_token = token
1016 # Track block depth.
1018 if type == Type.START_BLOCK:
1019 self._block_depth += 1
1021 # Subclasses need to handle block start very differently because
1022 # whether a block is a CODE or OBJECT_LITERAL block varies significantly
1024 self._block_types.append(self.GetBlockType(token))
1026 # When entering a function body, record its parameters.
1027 if self.InFunction():
1028 function = self._function_stack[-1]
1029 if self._block_depth == function.block_depth + 1:
1030 function.parameters = self.GetParams()
1032 # Track block depth.
1033 elif type == Type.END_BLOCK:
1034 self._is_block_close = not self.InObjectLiteral()
1035 self._block_depth -= 1
1036 self._block_types.pop()
1038 # Track parentheses depth.
1039 elif type == Type.START_PAREN:
1040 self._paren_depth += 1
1042 # Track parentheses depth.
1043 elif type == Type.END_PAREN:
1044 self._paren_depth -= 1
1046 elif type == Type.COMMENT:
1047 self._last_comment = token.string
1049 elif type == Type.START_DOC_COMMENT:
1050 self._last_comment = None
1051 self._doc_comment = DocComment(token)
1053 elif type == Type.END_DOC_COMMENT:
1054 self._doc_comment.end_token = token
1056 elif type in (Type.DOC_FLAG, Type.DOC_INLINE_FLAG):
1057 flag = self._doc_flag(token)
1058 token.attached_object = flag
1059 self._doc_comment.AddFlag(flag)
1061 if flag.flag_type == 'suppress':
1062 self._doc_comment.AddSuppression(token)
1064 elif type == Type.FUNCTION_DECLARATION:
1065 last_code = tokenutil.SearchExcept(token, Type.NON_CODE_TYPES, None,
1068 # Only functions outside of parens are eligible for documentation.
1069 if not self._paren_depth:
1070 doc = self._doc_comment
1073 is_assigned = last_code and (last_code.IsOperator('=') or
1074 last_code.IsOperator('||') or last_code.IsOperator('&&') or
1075 (last_code.IsOperator(':') and not self.InObjectLiteral()))
1077 # TODO(robbyw): This breaks for x[2] = ...
1078 # Must use loop to find full function name in the case of line-wrapped
1079 # declarations (bug 1220601) like:
1081 # bar = function() ...
1082 identifier = tokenutil.Search(last_code, Type.SIMPLE_LVALUE, None, True)
1083 while identifier and identifier.type in (
1084 Type.IDENTIFIER, Type.SIMPLE_LVALUE):
1085 name = identifier.string + name
1086 # Traverse behind us, skipping whitespace and comments.
1088 identifier = identifier.previous
1089 if not identifier or not identifier.type in Type.NON_CODE_TYPES:
1093 next_token = tokenutil.SearchExcept(token, Type.NON_CODE_TYPES)
1094 while next_token and next_token.IsType(Type.FUNCTION_NAME):
1095 name += next_token.string
1096 next_token = tokenutil.Search(next_token, Type.FUNCTION_NAME, 2)
1098 function = Function(self._block_depth, is_assigned, doc, name)
1099 function.start_token = token
1101 self._function_stack.append(function)
1102 self._functions_by_name[name] = function
1104 # Add a delimiter in stack for scope variables to define start of
1105 # function. This helps in popping variables of this function when
1106 # function declaration ends.
1107 self._variables_in_scope.append('')
1109 elif type == Type.START_PARAMETERS:
1110 self._cumulative_params = ''
1112 elif type == Type.PARAMETERS:
1113 self._cumulative_params += token.string
1114 self._variables_in_scope.extend(self.GetParams())
1116 elif type == Type.KEYWORD and token.string == 'return':
1117 next_token = tokenutil.SearchExcept(token, Type.NON_CODE_TYPES)
1118 if not next_token.IsType(Type.SEMICOLON):
1119 function = self.GetFunction()
1121 function.has_return = True
1123 elif type == Type.KEYWORD and token.string == 'throw':
1124 function = self.GetFunction()
1126 function.has_throw = True
1128 elif type == Type.KEYWORD and token.string == 'var':
1129 function = self.GetFunction()
1130 next_token = tokenutil.Search(token, [Type.IDENTIFIER,
1131 Type.SIMPLE_LVALUE])
1134 if next_token.type == Type.SIMPLE_LVALUE:
1135 self._variables_in_scope.append(next_token.values['identifier'])
1137 self._variables_in_scope.append(next_token.string)
1139 elif type == Type.SIMPLE_LVALUE:
1140 identifier = token.values['identifier']
1141 jsdoc = self.GetDocComment()
1143 self._documented_identifiers.add(identifier)
1145 self._HandleIdentifier(identifier, True)
1147 elif type == Type.IDENTIFIER:
1148 self._HandleIdentifier(token.string, False)
1150 # Detect documented non-assignments.
1151 next_token = tokenutil.SearchExcept(token, Type.NON_CODE_TYPES)
1152 if next_token and next_token.IsType(Type.SEMICOLON):
1153 if (self._last_non_space_token and
1154 self._last_non_space_token.IsType(Type.END_DOC_COMMENT)):
1155 self._documented_identifiers.add(token.string)
1157 def _HandleIdentifier(self, identifier, is_assignment):
1158 """Process the given identifier.
1160 Currently checks if it references 'this' and annotates the function
1164 identifier: The identifer to process.
1165 is_assignment: Whether the identifer is being written to.
1167 if identifier == 'this' or identifier.startswith('this.'):
1168 function = self.GetFunction()
1170 function.has_this = True
1172 def HandleAfterToken(self, token):
1173 """Handle updating state after a token has been checked.
1175 This function should be used for destructive state changes such as
1176 deleting a tracked object.
1179 token: The token to handle.
1182 if type == Type.SEMICOLON or type == Type.END_PAREN or (
1183 type == Type.END_BRACKET and
1184 self._last_non_space_token.type not in (
1185 Type.SINGLE_QUOTE_STRING_END, Type.DOUBLE_QUOTE_STRING_END)):
1186 # We end on any numeric array index, but keep going for string based
1187 # array indices so that we pick up manually exported identifiers.
1188 self._doc_comment = None
1189 self._last_comment = None
1191 elif type == Type.END_BLOCK:
1192 self._doc_comment = None
1193 self._last_comment = None
1195 if self.InFunction() and self.IsFunctionClose():
1196 # TODO(robbyw): Detect the function's name for better errors.
1197 function = self._function_stack.pop()
1198 function.end_token = token
1200 # Pop all variables till delimiter ('') those were defined in the
1201 # function being closed so make them out of scope.
1202 while self._variables_in_scope and self._variables_in_scope[-1]:
1203 self._variables_in_scope.pop()
1206 if self._variables_in_scope:
1207 self._variables_in_scope.pop()
1209 elif type == Type.END_PARAMETERS and self._doc_comment:
1210 self._doc_comment = None
1211 self._last_comment = None
1213 if not token.IsAnyType(Type.WHITESPACE, Type.BLANK_LINE):
1214 self._last_non_space_token = token
1216 self._last_line = token.line