[Common] Add javascript code minifier.
authorKamil Lysik <k.lysik@samsung.com>
Tue, 31 Mar 2015 10:31:34 +0000 (12:31 +0200)
committerPawel Andruszkiewicz <p.andruszkie@samsung.com>
Wed, 24 Jun 2015 07:02:49 +0000 (16:02 +0900)
With external JS minifier, we can reduce JavaScript code size
at build time.

Slimit: http://slimit.readthedocs.org/en/latest/
PLY: http://www.dabeaz.com/ply/

Signed-off-by: Kamil Lysik <k.lysik@samsung.com>
Change-Id: Iecfedcf1db6771bd12bb2febb0dffa04d87e0d4d

33 files changed:
tools/js_minimize.py [new file with mode: 0755]
tools/mergejs.py
tools/slimit/CHANGES [new file with mode: 0644]
tools/slimit/CREDIT [new file with mode: 0644]
tools/slimit/LICENSE [new file with mode: 0644]
tools/slimit/__init__.py [new file with mode: 0644]
tools/slimit/ast.py [new file with mode: 0644]
tools/slimit/lexer.py [new file with mode: 0644]
tools/slimit/lextab.py [new file with mode: 0644]
tools/slimit/mangler.py [new file with mode: 0644]
tools/slimit/minifier.py [new file with mode: 0644]
tools/slimit/parser.py [new file with mode: 0644]
tools/slimit/ply/__init__.py [new file with mode: 0644]
tools/slimit/ply/cpp.py [new file with mode: 0644]
tools/slimit/ply/ctokens.py [new file with mode: 0644]
tools/slimit/ply/lex.py [new file with mode: 0644]
tools/slimit/ply/yacc.py [new file with mode: 0644]
tools/slimit/scope.py [new file with mode: 0644]
tools/slimit/tests/__init__.py [new file with mode: 0644]
tools/slimit/tests/test_cmd.py [new file with mode: 0644]
tools/slimit/tests/test_ecmavisitor.py [new file with mode: 0644]
tools/slimit/tests/test_lexer.py [new file with mode: 0644]
tools/slimit/tests/test_mangler.py [new file with mode: 0644]
tools/slimit/tests/test_minifier.py [new file with mode: 0644]
tools/slimit/tests/test_nodevisitor.py [new file with mode: 0644]
tools/slimit/tests/test_parser.py [new file with mode: 0644]
tools/slimit/unicode_chars.py [new file with mode: 0644]
tools/slimit/visitors/__init__.py [new file with mode: 0644]
tools/slimit/visitors/ecmavisitor.py [new file with mode: 0644]
tools/slimit/visitors/minvisitor.py [new file with mode: 0644]
tools/slimit/visitors/nodevisitor.py [new file with mode: 0644]
tools/slimit/visitors/scopevisitor.py [new file with mode: 0644]
tools/slimit/yacctab.py [new file with mode: 0644]

diff --git a/tools/js_minimize.py b/tools/js_minimize.py
new file mode 100755 (executable)
index 0000000..f78126f
--- /dev/null
@@ -0,0 +1,7 @@
+#!/usr/bin/python
+
+import slimit
+
+def minimize(code):
+   return slimit.minify(code)
+
index 8b311023ff5e6b83306a3a3abb23d89d86b348c9..36b776a74ba252fde09426292ed472158fcbef42 100755 (executable)
@@ -7,12 +7,14 @@ import sys
 import getopt
 import glob
 import os
+import js_minimize
 
 class Utils:
     reqfiles = []
     searchfile = '*_api.js'
     startwith = "//= require('"
     endwith = "')"
+    code = ""
 
     @classmethod
     def get_require(self, s):
@@ -37,7 +39,7 @@ class Utils:
     def print_lines(self, filename):
         with open(filename, 'r') as file:
             for line in file:
-                print line
+                self.code += line
 
     @classmethod
     def merge_js_files(self, path):
@@ -56,6 +58,10 @@ class Utils:
                 if fname in filenames:
                     self.print_lines(fname)
 
+    @classmethod
+    def minize_code(self):
+        self.code = js_minimize.minimize(self.code)
+
     @classmethod
     def main(self, argv):
         path = 'js'
@@ -82,6 +88,8 @@ class Utils:
               elif opt in ("-p", "--path"):
                   path = arg
         self.merge_js_files(path)
+        self.minize_code()
+        print self.code
 
 if Utils.__module__ == "__main__":
     Utils.main(sys.argv[1:])
diff --git a/tools/slimit/CHANGES b/tools/slimit/CHANGES
new file mode 100644 (file)
index 0000000..e776080
--- /dev/null
@@ -0,0 +1,115 @@
+Change History
+==============
+0.8.1 (2013-03-26)
+------------------
+- Bug fix: https://github.com/rspivak/slimit/pull/45
+  Fix syntax error in the output of for statement with some form of expressions
+
+0.8.0 (2013-03-23)
+------------------
+- Python 3.x support
+- Bug fix: https://github.com/rspivak/slimit/issues/42
+  slimit removes parentheses from ternary expression, causes syntax error in jQuery
+- Bug fix: https://github.com/rspivak/slimit/issues/37
+  simple identifier in FOR init
+- Bug fix: https://github.com/rspivak/slimit/issues/36
+  using $ for mangled function names conflicts with jQuery
+
+0.7.4 (2012-06-5)
+------------------
+- Bug fix: https://github.com/rspivak/slimit/issues/34
+  'class' is reserved keyword now
+
+0.7.3 (2012-05-21)
+------------------
+- Bug fix (unary op in FOR init): https://github.com/rspivak/slimit/pull/33
+
+0.7.2 (2012-05-17)
+------------------
+- Added support for get/set properties:
+  https://github.com/rspivak/slimit/issues/32
+
+0.7.1 (2012-05-10)
+------------------
+- Function call support in FOR init section:
+  https://github.com/rspivak/slimit/pull/31
+
+0.7 (2012-04-16)
+----------------
+- Multiline string support: https://github.com/rspivak/slimit/issues/24
+
+0.6.2 (2012-04-07)
+------------------
+- Bug fix: https://github.com/rspivak/slimit/issues/29
+- Bug fix: https://github.com/rspivak/slimit/issues/28
+
+0.6.1 (2012-03-15)
+------------------
+- Added command-line option *-t/--mangle-toplevel* to turn on
+  global scope name mangling. As of this version it's off by
+  default: https://github.com/rspivak/slimit/issues/27
+- Removed dependency on a 'distribute' package
+- Bug fix: https://github.com/rspivak/slimit/issues/26
+- Bug fix: https://github.com/rspivak/slimit/issues/25
+
+0.6 (2012-02-04)
+----------------
+- Added optimization: foo["bar"] ==> foo.bar
+- Added base class for custom AST node visitors
+- Documentation updates
+- Bug fix: https://github.com/rspivak/slimit/issues/22
+- Bug fix: https://github.com/rspivak/slimit/issues/21
+
+0.5.5 (2011-10-05)
+------------------
+- Bugfix: https://github.com/rspivak/slimit/issues/7
+
+0.5.4 (2011-10-01)
+------------------
+- Bugfix: https://github.com/rspivak/slimit/issues/6
+  Division with "this" fails
+
+0.5.3 (2011-06-29)
+------------------
+- Bugfix: https://github.com/rspivak/slimit/issues/5
+
+0.5.2 (2011-06-14)
+------------------
+- Bugfix: https://github.com/rspivak/slimit/issues/4
+- Bugfix: https://github.com/rspivak/slimit/issues/3
+
+0.5.1 (2011-06-06)
+------------------
+- Bugfix: https://github.com/rspivak/slimit/issues/2
+
+0.5 (2011-06-06)
+----------------
+- Added name mangling
+
+0.4 (2011-05-12)
+----------------
+- Minify more by removing block braces { }
+- More tests
+
+0.3.2 (2011-05-09)
+------------------
+- More hacks to use pre-generated lex and yacc tables when called from
+  the command line
+
+0.3.1 (2011-05-09)
+------------------
+- Use pre-generated lex and yacc tables when called from the command line
+
+0.3 (2011-05-09)
+----------------
+- Added minifier
+
+0.2 (2011-05-07)
+----------------
+- Added a JavaScript parser
+- Added pretty printer
+- Added node visitor
+
+0.1 (2011-05-02)
+----------------
+- Initial public version. It contains only a JavaScript lexer
diff --git a/tools/slimit/CREDIT b/tools/slimit/CREDIT
new file mode 100644 (file)
index 0000000..fc7557c
--- /dev/null
@@ -0,0 +1,27 @@
+Patches
+-------
+
+- Waldemar Kornewald
+- Maurizio Sambati https://github.com/duilio
+- Aron Griffis https://github.com/agriffis
+- lelit https://github.com/lelit
+- Dan McDougall https://github.com/liftoff
+- harig https://github.com/harig
+- Mike Taylor https://github.com/miketaylr
+
+
+Bug reports
+-----------
+
+- Rui Pereira
+- Dima Kozlov
+- BadKnees https://github.com/BadKnees
+- Waldemar Kornewald
+- Michał Bartoszkiewicz https://github.com/embe
+- Hasan Yasin Öztürk https://github.com/hasanyasin
+- David K. Hess https://github.com/davidkhess
+- Robert Cadena https://github.com/rcadena
+- rivol https://github.com/rivol
+- Maurizio Sambati https://github.com/duilio
+- fdev31 https://github.com/fdev31
+- edmellum https://github.com/edmellum
\ No newline at end of file
diff --git a/tools/slimit/LICENSE b/tools/slimit/LICENSE
new file mode 100644 (file)
index 0000000..c134376
--- /dev/null
@@ -0,0 +1,19 @@
+Copyright (c) 2011 Ruslan Spivak
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/tools/slimit/__init__.py b/tools/slimit/__init__.py
new file mode 100644 (file)
index 0000000..8b5dd72
--- /dev/null
@@ -0,0 +1,27 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+from minifier import minify
diff --git a/tools/slimit/ast.py b/tools/slimit/ast.py
new file mode 100644 (file)
index 0000000..7458967
--- /dev/null
@@ -0,0 +1,415 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+
+class Node(object):
+    def __init__(self, children=None):
+        self._children_list = [] if children is None else children
+
+    def __iter__(self):
+        for child in self.children():
+            if child is not None:
+                yield child
+
+    def children(self):
+        return self._children_list
+
+    def to_ecma(self):
+        # Can't import at module level as ecmavisitor depends
+        # on ast module...
+        from slimit.visitors.ecmavisitor import ECMAVisitor
+        visitor = ECMAVisitor()
+        return visitor.visit(self)
+
+class Program(Node):
+    pass
+
+class Block(Node):
+    pass
+
+class Boolean(Node):
+    def __init__(self, value):
+        self.value = value
+
+    def children(self):
+        return []
+
+class Null(Node):
+    def __init__(self, value):
+        self.value = value
+
+    def children(self):
+        return []
+
+class Number(Node):
+    def __init__(self, value):
+        self.value = value
+
+    def children(self):
+        return []
+
+class Identifier(Node):
+    def __init__(self, value):
+        self.value = value
+
+    def children(self):
+        return []
+
+class String(Node):
+    def __init__(self, value):
+        self.value = value
+
+    def children(self):
+        return []
+
+class Regex(Node):
+    def __init__(self, value):
+        self.value = value
+
+    def children(self):
+        return []
+
+class Array(Node):
+    def __init__(self, items):
+        self.items = items
+
+    def children(self):
+        return self.items
+
+class Object(Node):
+    def __init__(self, properties=None):
+        self.properties = [] if properties is None else properties
+
+    def children(self):
+        return self.properties
+
+class NewExpr(Node):
+    def __init__(self, identifier, args=None):
+        self.identifier = identifier
+        self.args = [] if args is None else args
+
+    def children(self):
+        return [self.identifier, self.args]
+
+class FunctionCall(Node):
+    def __init__(self, identifier, args=None):
+        self.identifier = identifier
+        self.args = [] if args is None else args
+
+    def children(self):
+        return [self.identifier] + self.args
+
+class BracketAccessor(Node):
+    def __init__(self, node, expr):
+        self.node = node
+        self.expr = expr
+
+    def children(self):
+        return [self.node, self.expr]
+
+class DotAccessor(Node):
+    def __init__(self, node, identifier):
+        self.node = node
+        self.identifier = identifier
+
+    def children(self):
+        return [self.node, self.identifier]
+
+class Assign(Node):
+    def __init__(self, op, left, right):
+        self.op = op
+        self.left = left
+        self.right = right
+
+    def children(self):
+        return [self.left, self.right]
+
+class GetPropAssign(Node):
+    def __init__(self, prop_name, elements):
+        """elements - function body"""
+        self.prop_name = prop_name
+        self.elements = elements
+
+    def children(self):
+        return [self.prop_name] + self.elements
+
+class SetPropAssign(Node):
+    def __init__(self, prop_name, parameters, elements):
+        """elements - function body"""
+        self.prop_name = prop_name
+        self.parameters = parameters
+        self.elements = elements
+
+    def children(self):
+        return [self.prop_name] + self.parameters + self.elements
+
+class VarStatement(Node):
+    pass
+
+class VarDecl(Node):
+    def __init__(self, identifier, initializer=None):
+        self.identifier = identifier
+        self.identifier._mangle_candidate = True
+        self.initializer = initializer
+
+    def children(self):
+        return [self.identifier, self.initializer]
+
+class UnaryOp(Node):
+    def __init__(self, op, value, postfix=False):
+        self.op = op
+        self.value = value
+        self.postfix = postfix
+
+    def children(self):
+        return [self.value]
+
+class BinOp(Node):
+    def __init__(self, op, left, right):
+        self.op = op
+        self.left = left
+        self.right = right
+
+    def children(self):
+        return [self.left, self.right]
+
+class Conditional(Node):
+    """Conditional Operator ( ? : )"""
+    def __init__(self, predicate, consequent, alternative):
+        self.predicate = predicate
+        self.consequent = consequent
+        self.alternative = alternative
+
+    def children(self):
+        return [self.predicate, self.consequent, self.alternative]
+
+class If(Node):
+    def __init__(self, predicate, consequent, alternative=None):
+        self.predicate = predicate
+        self.consequent = consequent
+        self.alternative = alternative
+
+    def children(self):
+        return [self.predicate, self.consequent, self.alternative]
+
+class DoWhile(Node):
+    def __init__(self, predicate, statement):
+        self.predicate = predicate
+        self.statement = statement
+
+    def children(self):
+        return [self.predicate, self.statement]
+
+class While(Node):
+    def __init__(self, predicate, statement):
+        self.predicate = predicate
+        self.statement = statement
+
+    def children(self):
+        return [self.predicate, self.statement]
+
+class For(Node):
+    def __init__(self, init, cond, count, statement):
+        self.init = init
+        self.cond = cond
+        self.count = count
+        self.statement = statement
+
+    def children(self):
+        return [self.init, self.cond, self.count, self.statement]
+
+class ForIn(Node):
+    def __init__(self, item, iterable, statement):
+        self.item = item
+        self.iterable = iterable
+        self.statement = statement
+
+    def children(self):
+        return [self.item, self.iterable, self.statement]
+
+class Continue(Node):
+    def __init__(self, identifier=None):
+        self.identifier = identifier
+
+    def children(self):
+        return [self.identifier]
+
+class Break(Node):
+    def __init__(self, identifier=None):
+        self.identifier = identifier
+
+    def children(self):
+        return [self.identifier]
+
+class Return(Node):
+    def __init__(self, expr=None):
+        self.expr = expr
+
+    def children(self):
+        return [self.expr]
+
+class With(Node):
+    def __init__(self, expr, statement):
+        self.expr = expr
+        self.statement = statement
+
+    def children(self):
+        return [self.expr, self.statement]
+
+class Switch(Node):
+    def __init__(self, expr, cases, default=None):
+        self.expr = expr
+        self.cases = cases
+        self.default = default
+
+    def children(self):
+        return [self.expr] + self.cases + [self.default]
+
+class Case(Node):
+    def __init__(self, expr, elements):
+        self.expr = expr
+        self.elements = elements if elements is not None else []
+
+    def children(self):
+        return [self.expr] + self.elements
+
+class Default(Node):
+    def __init__(self, elements):
+        self.elements = elements if elements is not None else []
+
+    def children(self):
+        return self.elements
+
+class Label(Node):
+    def __init__(self, identifier, statement):
+        self.identifier = identifier
+        self.statement = statement
+
+    def children(self):
+        return [self.identifier, self.statement]
+
+class Throw(Node):
+    def __init__(self, expr):
+        self.expr = expr
+
+    def children(self):
+        return [self.expr]
+
+class Try(Node):
+    def __init__(self, statements, catch=None, fin=None):
+        self.statements = statements
+        self.catch = catch
+        self.fin = fin
+
+    def children(self):
+        return [self.statements] + [self.catch, self.fin]
+
+class Catch(Node):
+    def __init__(self, identifier, elements):
+        self.identifier = identifier
+        # CATCH identifiers are subject to name mangling. we need to mark them.
+        self.identifier._mangle_candidate = True
+        self.elements = elements
+
+    def children(self):
+        return [self.identifier, self.elements]
+
+class Finally(Node):
+    def __init__(self, elements):
+        self.elements = elements
+
+    def children(self):
+        return self.elements
+
+class Debugger(Node):
+    def __init__(self, value):
+        self.value = value
+
+    def children(self):
+        return []
+
+
+class FuncBase(Node):
+    def __init__(self, identifier, parameters, elements):
+        self.identifier = identifier
+        self.parameters = parameters if parameters is not None else []
+        self.elements = elements if elements is not None else []
+        self._init_ids()
+
+    def _init_ids(self):
+        # function declaration/expression name and parameters are identifiers
+        # and therefore are subject to name mangling. we need to mark them.
+        if self.identifier is not None:
+            self.identifier._mangle_candidate = True
+        for param in self.parameters:
+            param._mangle_candidate = True
+
+    def children(self):
+        return [self.identifier] + self.parameters + self.elements
+
+class FuncDecl(FuncBase):
+    pass
+
+# The only difference is that function expression might not have an identifier
+class FuncExpr(FuncBase):
+    pass
+
+
+class Comma(Node):
+    def __init__(self, left, right):
+        self.left = left
+        self.right = right
+
+    def children(self):
+        return [self.left, self.right]
+
+class EmptyStatement(Node):
+    def __init__(self, value):
+        self.value = value
+
+    def children(self):
+        return []
+
+class ExprStatement(Node):
+    def __init__(self, expr):
+        self.expr = expr
+
+    def children(self):
+        return [self.expr]
+
+class Elision(Node):
+    def __init__(self, value):
+        self.value = value
+
+    def children(self):
+        return []
+
+class This(Node):
+    def __init__(self):
+        pass
+
+    def children(self):
+        return []
diff --git a/tools/slimit/lexer.py b/tools/slimit/lexer.py
new file mode 100644 (file)
index 0000000..06cd7c5
--- /dev/null
@@ -0,0 +1,437 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+import ply.lex
+
+from slimit.unicode_chars import (
+    LETTER,
+    DIGIT,
+    COMBINING_MARK,
+    CONNECTOR_PUNCTUATION,
+    )
+
+# See "Regular Expression Literals" at
+# http://www.mozilla.org/js/language/js20-2002-04/rationale/syntax.html
+TOKENS_THAT_IMPLY_DIVISON = frozenset([
+    'ID',
+    'NUMBER',
+    'STRING',
+    'REGEX',
+    'TRUE',
+    'FALSE',
+    'NULL',
+    'THIS',
+    'PLUSPLUS',
+    'MINUSMINUS',
+    'RPAREN',
+    'RBRACE',
+    'RBRACKET',
+    ])
+
+
+class Lexer(object):
+    """A JavaScript lexer.
+
+    >>> from slimit.lexer import Lexer
+    >>> lexer = Lexer()
+
+    Lexer supports iteration:
+
+    >>> lexer.input('a = 1;')
+    >>> for token in lexer:
+    ...     print token
+    ...
+    LexToken(ID,'a',1,0)
+    LexToken(EQ,'=',1,2)
+    LexToken(NUMBER,'1',1,4)
+    LexToken(SEMI,';',1,5)
+
+    Or call one token at a time with 'token' method:
+
+    >>> lexer.input('a = 1;')
+    >>> while True:
+    ...     token = lexer.token()
+    ...     if not token:
+    ...         break
+    ...     print token
+    ...
+    LexToken(ID,'a',1,0)
+    LexToken(EQ,'=',1,2)
+    LexToken(NUMBER,'1',1,4)
+    LexToken(SEMI,';',1,5)
+
+    >>> lexer.input('a = 1;')
+    >>> token = lexer.token()
+    >>> token.type, token.value, token.lineno, token.lexpos
+    ('ID', 'a', 1, 0)
+
+    For more information see:
+    http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-262.pdf
+    """
+    def __init__(self):
+        self.prev_token = None
+        self.cur_token = None
+        self.next_tokens = []
+        self.build()
+
+    def build(self, **kwargs):
+        """Build the lexer."""
+        self.lexer = ply.lex.lex(object=self, **kwargs)
+
+    def input(self, text):
+        self.lexer.input(text)
+
+    def token(self):
+        if self.next_tokens:
+            return self.next_tokens.pop()
+
+        lexer = self.lexer
+        while True:
+            pos = lexer.lexpos
+            try:
+                char = lexer.lexdata[pos]
+                while char in ' \t':
+                    pos += 1
+                    char = lexer.lexdata[pos]
+                next_char = lexer.lexdata[pos + 1]
+            except IndexError:
+                tok = self._get_update_token()
+                if tok is not None and tok.type == 'LINE_TERMINATOR':
+                    continue
+                else:
+                    return tok
+
+            if char != '/' or (char == '/' and next_char in ('/', '*')):
+                tok = self._get_update_token()
+                if tok.type in ('LINE_TERMINATOR',
+                                'LINE_COMMENT', 'BLOCK_COMMENT'):
+                    continue
+                else:
+                    return tok
+
+            # current character is '/' which is either division or regex
+            cur_token = self.cur_token
+            is_division_allowed = (
+                cur_token is not None and
+                cur_token.type in TOKENS_THAT_IMPLY_DIVISON
+                )
+            if is_division_allowed:
+                return self._get_update_token()
+            else:
+                self.prev_token = self.cur_token
+                self.cur_token = self._read_regex()
+                return self.cur_token
+
+    def auto_semi(self, token):
+        if (token is None or token.type == 'RBRACE'
+            or self._is_prev_token_lt()
+            ):
+            if token:
+                self.next_tokens.append(token)
+            return self._create_semi_token(token)
+
+    def _is_prev_token_lt(self):
+        return self.prev_token and self.prev_token.type == 'LINE_TERMINATOR'
+
+    def _read_regex(self):
+        self.lexer.begin('regex')
+        token = self.lexer.token()
+        self.lexer.begin('INITIAL')
+        return token
+
+    def _get_update_token(self):
+        self.prev_token = self.cur_token
+        self.cur_token = self.lexer.token()
+        # insert semicolon before restricted tokens
+        # See section 7.9.1 ECMA262
+        if (self.cur_token is not None
+            and self.cur_token.type == 'LINE_TERMINATOR'
+            and self.prev_token is not None
+            and self.prev_token.type in ['BREAK', 'CONTINUE',
+                                         'RETURN', 'THROW']
+            ):
+            return self._create_semi_token(self.cur_token)
+        return self.cur_token
+
+    def _create_semi_token(self, orig_token):
+        token = ply.lex.LexToken()
+        token.type = 'SEMI'
+        token.value = ';'
+        if orig_token is not None:
+            token.lineno = orig_token.lineno
+            token.lexpos = orig_token.lexpos
+        else:
+            token.lineno = 0
+            token.lexpos = 0
+        return token
+
+    # iterator protocol
+    def __iter__(self):
+        return self
+
+    def next(self):
+        token = self.token()
+        if not token:
+            raise StopIteration
+
+        return token
+
+    states = (
+        ('regex', 'exclusive'),
+        )
+
+    keywords = (
+        'BREAK', 'CASE', 'CATCH', 'CONTINUE', 'DEBUGGER', 'DEFAULT', 'DELETE',
+        'DO', 'ELSE', 'FINALLY', 'FOR', 'FUNCTION', 'IF', 'IN',
+        'INSTANCEOF', 'NEW', 'RETURN', 'SWITCH', 'THIS', 'THROW', 'TRY',
+        'TYPEOF', 'VAR', 'VOID', 'WHILE', 'WITH', 'NULL', 'TRUE', 'FALSE',
+        # future reserved words - well, it's uncommented now to make
+        # IE8 happy because it chokes up on minification:
+        # obj["class"] -> obj.class
+        'CLASS', 'CONST', 'ENUM', 'EXPORT', 'EXTENDS', 'IMPORT', 'SUPER',
+        )
+    keywords_dict = dict((key.lower(), key) for key in keywords)
+
+    tokens = (
+        # Punctuators
+        'PERIOD', 'COMMA', 'SEMI', 'COLON',     # . , ; :
+        'PLUS', 'MINUS', 'MULT', 'DIV', 'MOD',  # + - * / %
+        'BAND', 'BOR', 'BXOR', 'BNOT',          # & | ^ ~
+        'CONDOP',                               # conditional operator ?
+        'NOT',                                  # !
+        'LPAREN', 'RPAREN',                     # ( and )
+        'LBRACE', 'RBRACE',                     # { and }
+        'LBRACKET', 'RBRACKET',                 # [ and ]
+        'EQ', 'EQEQ', 'NE',                     # = == !=
+        'STREQ', 'STRNEQ',                      # === and !==
+        'LT', 'GT',                             # < and >
+        'LE', 'GE',                             # <= and >=
+        'OR', 'AND',                            # || and &&
+        'PLUSPLUS', 'MINUSMINUS',               # ++ and --
+        'LSHIFT',                               # <<
+        'RSHIFT', 'URSHIFT',                    # >> and >>>
+        'PLUSEQUAL', 'MINUSEQUAL',              # += and -=
+        'MULTEQUAL', 'DIVEQUAL',                # *= and /=
+        'LSHIFTEQUAL',                          # <<=
+        'RSHIFTEQUAL', 'URSHIFTEQUAL',          # >>= and >>>=
+        'ANDEQUAL', 'MODEQUAL',                 # &= and %=
+        'XOREQUAL', 'OREQUAL',                  # ^= and |=
+
+        # Terminal types
+        'NUMBER', 'STRING', 'ID', 'REGEX',
+
+        # Properties
+        'GETPROP', 'SETPROP',
+
+        # Comments
+        'LINE_COMMENT', 'BLOCK_COMMENT',
+
+        'LINE_TERMINATOR',
+        ) + keywords
+
+    # adapted from https://bitbucket.org/ned/jslex
+    t_regex_REGEX = r"""(?:
+        /                       # opening slash
+        # First character is..
+        (?: [^*\\/[]            # anything but * \ / or [
+        |   \\.                 # or an escape sequence
+        |   \[                  # or a class, which has
+                (?: [^\]\\]     # anything but \ or ]
+                |   \\.         # or an escape sequence
+                )*              # many times
+            \]
+        )
+        # Following characters are same, except for excluding a star
+        (?: [^\\/[]             # anything but \ / or [
+        |   \\.                 # or an escape sequence
+        |   \[                  # or a class, which has
+                (?: [^\]\\]     # anything but \ or ]
+                |   \\.         # or an escape sequence
+                )*              # many times
+            \]
+        )*                      # many times
+        /                       # closing slash
+        [a-zA-Z0-9]*            # trailing flags
+        )
+        """
+
+    t_regex_ignore = ' \t'
+
+    def t_regex_error(self, token):
+        raise TypeError(
+            "Error parsing regular expression '%s' at %s" % (
+                token.value, token.lineno)
+            )
+
+    # Punctuators
+    t_PERIOD        = r'\.'
+    t_COMMA         = r','
+    t_SEMI          = r';'
+    t_COLON         = r':'
+    t_PLUS          = r'\+'
+    t_MINUS         = r'-'
+    t_MULT          = r'\*'
+    t_DIV           = r'/'
+    t_MOD           = r'%'
+    t_BAND          = r'&'
+    t_BOR           = r'\|'
+    t_BXOR          = r'\^'
+    t_BNOT          = r'~'
+    t_CONDOP        = r'\?'
+    t_NOT           = r'!'
+    t_LPAREN        = r'\('
+    t_RPAREN        = r'\)'
+    t_LBRACE        = r'{'
+    t_RBRACE        = r'}'
+    t_LBRACKET      = r'\['
+    t_RBRACKET      = r'\]'
+    t_EQ            = r'='
+    t_EQEQ          = r'=='
+    t_NE            = r'!='
+    t_STREQ         = r'==='
+    t_STRNEQ        = r'!=='
+    t_LT            = r'<'
+    t_GT            = r'>'
+    t_LE            = r'<='
+    t_GE            = r'>='
+    t_OR            = r'\|\|'
+    t_AND           = r'&&'
+    t_PLUSPLUS      = r'\+\+'
+    t_MINUSMINUS    = r'--'
+    t_LSHIFT        = r'<<'
+    t_RSHIFT        = r'>>'
+    t_URSHIFT       = r'>>>'
+    t_PLUSEQUAL     = r'\+='
+    t_MINUSEQUAL    = r'-='
+    t_MULTEQUAL     = r'\*='
+    t_DIVEQUAL      = r'/='
+    t_LSHIFTEQUAL   = r'<<='
+    t_RSHIFTEQUAL   = r'>>='
+    t_URSHIFTEQUAL  = r'>>>='
+    t_ANDEQUAL      = r'&='
+    t_MODEQUAL      = r'%='
+    t_XOREQUAL      = r'\^='
+    t_OREQUAL       = r'\|='
+
+    t_LINE_COMMENT  = r'//[^\r\n]*'
+    t_BLOCK_COMMENT = r'/\*[^*]*\*+([^/*][^*]*\*+)*/'
+
+    t_LINE_TERMINATOR = r'[\n\r]+'
+
+    t_ignore = ' \t'
+
+    t_NUMBER = r"""
+    (?:
+        0[xX][0-9a-fA-F]+              # hex_integer_literal
+     |  0[0-7]+                        # or octal_integer_literal (spec B.1.1)
+     |  (?:                            # or decimal_literal
+            (?:0|[1-9][0-9]*)          # decimal_integer_literal
+            \.                         # dot
+            [0-9]*                     # decimal_digits_opt
+            (?:[eE][+-]?[0-9]+)?       # exponent_part_opt
+         |
+            \.                         # dot
+            [0-9]+                     # decimal_digits
+            (?:[eE][+-]?[0-9]+)?       # exponent_part_opt
+         |
+            (?:0|[1-9][0-9]*)          # decimal_integer_literal
+            (?:[eE][+-]?[0-9]+)?       # exponent_part_opt
+         )
+    )
+    """
+
+    string = r"""
+    (?:
+        # double quoted string
+        (?:"                               # opening double quote
+            (?: [^"\\\n\r]                 # no \, line terminators or "
+                | \\[a-zA-Z!-\/:-@\[-`{-~] # or escaped characters
+                | \\x[0-9a-fA-F]{2}        # or hex_escape_sequence
+                | \\u[0-9a-fA-F]{4}        # or unicode_escape_sequence
+            )*?                            # zero or many times
+            (?: \\\n                       # multiline ?
+              (?:
+                [^"\\\n\r]                 # no \, line terminators or "
+                | \\[a-zA-Z!-\/:-@\[-`{-~] # or escaped characters
+                | \\x[0-9a-fA-F]{2}        # or hex_escape_sequence
+                | \\u[0-9a-fA-F]{4}        # or unicode_escape_sequence
+              )*?                          # zero or many times
+            )*
+        ")                                 # closing double quote
+        |
+        # single quoted string
+        (?:'                               # opening single quote
+            (?: [^'\\\n\r]                 # no \, line terminators or '
+                | \\[a-zA-Z!-\/:-@\[-`{-~] # or escaped characters
+                | \\x[0-9a-fA-F]{2}        # or hex_escape_sequence
+                | \\u[0-9a-fA-F]{4}        # or unicode_escape_sequence
+            )*?                            # zero or many times
+            (?: \\\n                       # multiline ?
+              (?:
+                [^'\\\n\r]                 # no \, line terminators or '
+                | \\[a-zA-Z!-\/:-@\[-`{-~] # or escaped characters
+                | \\x[0-9a-fA-F]{2}        # or hex_escape_sequence
+                | \\u[0-9a-fA-F]{4}        # or unicode_escape_sequence
+              )*?                          # zero or many times
+            )*
+        ')                                 # closing single quote
+    )
+    """  # "
+
+    @ply.lex.TOKEN(string)
+    def t_STRING(self, token):
+        # remove escape + new line sequence used for strings
+        # written across multiple lines of code
+        token.value = token.value.replace('\\\n', '')
+        return token
+
+    # XXX: <ZWNJ> <ZWJ> ?
+    identifier_start = r'(?:' + r'[a-zA-Z_$]' + r'|' + LETTER + r')+'
+    identifier_part = (
+        r'(?:' + COMBINING_MARK + r'|' + r'[0-9a-zA-Z_$]' + r'|' + DIGIT +
+        r'|' + CONNECTOR_PUNCTUATION + r')*'
+        )
+    identifier = identifier_start + identifier_part
+
+    getprop = r'get' + r'(?=\s' + identifier + r')'
+    @ply.lex.TOKEN(getprop)
+    def t_GETPROP(self, token):
+        return token
+
+    setprop = r'set' + r'(?=\s' + identifier + r')'
+    @ply.lex.TOKEN(setprop)
+    def t_SETPROP(self, token):
+        return token
+
+    @ply.lex.TOKEN(identifier)
+    def t_ID(self, token):
+        token.type = self.keywords_dict.get(token.value, 'ID')
+        return token
+
+    def t_error(self, token):
+        print 'Illegal character %r at %s:%s after %s' % (
+            token.value[0], token.lineno, token.lexpos, self.prev_token)
+        token.lexer.skip(1)
diff --git a/tools/slimit/lextab.py b/tools/slimit/lextab.py
new file mode 100644 (file)
index 0000000..f2d1f6f
--- /dev/null
@@ -0,0 +1,9 @@
+# lextab.py. This file automatically created by PLY (version 3.4). Don't edit!
+_tabversion   = '3.4'
+_lextokens    = {'BOR': 1, 'LBRACKET': 1, 'WITH': 1, 'MINUS': 1, 'RPAREN': 1, 'PLUS': 1, 'VOID': 1, 'BLOCK_COMMENT': 1, 'GT': 1, 'RBRACE': 1, 'PERIOD': 1, 'GE': 1, 'VAR': 1, 'THIS': 1, 'MINUSEQUAL': 1, 'TYPEOF': 1, 'OR': 1, 'DELETE': 1, 'DIVEQUAL': 1, 'RETURN': 1, 'RSHIFTEQUAL': 1, 'EQEQ': 1, 'SETPROP': 1, 'BNOT': 1, 'URSHIFTEQUAL': 1, 'TRUE': 1, 'COLON': 1, 'FUNCTION': 1, 'LINE_COMMENT': 1, 'FOR': 1, 'PLUSPLUS': 1, 'ELSE': 1, 'TRY': 1, 'EQ': 1, 'AND': 1, 'LBRACE': 1, 'CONTINUE': 1, 'NOT': 1, 'OREQUAL': 1, 'MOD': 1, 'RSHIFT': 1, 'DEFAULT': 1, 'WHILE': 1, 'NEW': 1, 'CASE': 1, 'MODEQUAL': 1, 'NE': 1, 'MULTEQUAL': 1, 'SWITCH': 1, 'CATCH': 1, 'STREQ': 1, 'INSTANCEOF': 1, 'PLUSEQUAL': 1, 'GETPROP': 1, 'FALSE': 1, 'CONDOP': 1, 'BREAK': 1, 'LINE_TERMINATOR': 1, 'ANDEQUAL': 1, 'DO': 1, 'NUMBER': 1, 'LSHIFT': 1, 'DIV': 1, 'NULL': 1, 'MULT': 1, 'DEBUGGER': 1, 'LE': 1, 'SEMI': 1, 'BXOR': 1, 'LT': 1, 'COMMA': 1, 'REGEX': 1, 'STRING': 1, 'BAND': 1, 'FINALLY': 1, 'STRNEQ': 1, 'LPAREN': 1, 'IN': 1, 'MINUSMINUS': 1, 'ID': 1, 'IF': 1, 'XOREQUAL': 1, 'LSHIFTEQUAL': 1, 'URSHIFT': 1, 'RBRACKET': 1, 'THROW': 1}
+_lexreflags   = 0
+_lexliterals  = ''
+_lexstateinfo = {'regex': 'exclusive', 'INITIAL': 'inclusive'}
+_lexstatere   = {'regex': [('(?P<t_regex_REGEX>(?:\n        /                       # opening slash\n        # First character is..\n        (?: [^*\\\\/[]            # anything but * \\ / or [\n        |   \\\\.                 # or an escape sequence\n        |   \\[                  # or a class, which has\n                (?: [^\\]\\\\]     # anything but \\ or ]\n                |   \\\\.         # or an escape sequence\n                )*              # many times\n            \\]\n        )\n        # Following characters are same, except for excluding a star\n        (?: [^\\\\/[]             # anything but \\ / or [\n        |   \\\\.                 # or an escape sequence\n        |   \\[                  # or a class, which has\n                (?: [^\\]\\\\]     # anything but \\ or ]\n                |   \\\\.         # or an escape sequence\n                )*              # many times\n            \\]\n        )*                      # many times\n        /                       # closing slash\n        [a-zA-Z0-9]*            # trailing flags\n        )\n        )', [None, (None, 'REGEX')])], 'INITIAL': [(u'(?P<t_STRING>\n    (?:\n        # double quoted string\n        (?:"                               # opening double quote\n            (?: [^"\\\\\\n\\r]                 # no \\, line terminators or "\n                | \\\\[a-zA-Z!-\\/:-@\\[-`{-~] # or escaped characters\n                | \\\\x[0-9a-fA-F]{2}        # or hex_escape_sequence\n                | \\\\u[0-9a-fA-F]{4}        # or unicode_escape_sequence\n            )*?                            # zero or many times\n            (?: \\\\\\n                       # multiline ?\n              (?:\n                [^"\\\\\\n\\r]                 # no \\, line terminators or "\n                | \\\\[a-zA-Z!-\\/:-@\\[-`{-~] # or escaped characters\n                | \\\\x[0-9a-fA-F]{2}        # or hex_escape_sequence\n                | \\\\u[0-9a-fA-F]{4}        # or unicode_escape_sequence\n              )*?                          # zero or many times\n            )*\n        ")                                 # closing double quote\n        |\n        # single quoted string\n        (?:\'                               # opening single quote\n            (?: [^\'\\\\\\n\\r]                 # no \\, line terminators or \'\n                | \\\\[a-zA-Z!-\\/:-@\\[-`{-~] # or escaped characters\n                | \\\\x[0-9a-fA-F]{2}        # or hex_escape_sequence\n                | \\\\u[0-9a-fA-F]{4}        # or unicode_escape_sequence\n            )*?                            # zero or many times\n            (?: \\\\\\n                       # multiline ?\n              (?:\n                [^\'\\\\\\n\\r]                 # no \\, line terminators or \'\n                | \\\\[a-zA-Z!-\\/:-@\\[-`{-~] # or escaped characters\n                | \\\\x[0-9a-fA-F]{2}        # or hex_escape_sequence\n                | \\\\u[0-9a-fA-F]{4}        # or unicode_escape_sequence\n              )*?                          # zero or many times\n            )*\n        \')                                 # closing single quote\n    )\n    )|(?P<t_GETPROP>get(?=\\s(?:[a-zA-Z_$]|[A-Za-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u0523\u0531-\u0556\u0559\u0561-\u0587\u05d0-\u05ea\u05f0-\u05f2\u0621-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971\u0972\u097b-\u097f\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3d\u0c58\u0c59\u0c60\u0c61\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0\u0ce1\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d28\u0d2a-\u0d39\u0d3d\u0d60\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e87\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa\u0eab\u0ead-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc\u0edd\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10d0-\u10fa\u10fc\u1100-\u1159\u115f-\u11a2\u11a8-\u11f9\u1200-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f4\u1401-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1877\u1880-\u18a8\u18aa\u1900-\u191c\u1950-\u196d\u1970-\u1974\u1980-\u19a9\u19c1-\u19c7\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae\u1baf\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u2094\u2102\u2107\u210a-\u2113\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2183\u2184\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2c6f\u2c71-\u2c7d\u2c80-\u2ce4\u2d00-\u2d25\u2d30-\u2d65\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2e2f\u3005\u3006\u3031-\u3035\u303b\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31b7\u31f0-\u31ff\u3400\u4db5\u4e00\u9fc3\ua000-\ua48c\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua65f\ua662-\ua66e\ua67f-\ua697\ua717-\ua71f\ua722-\ua788\ua78b\ua78c\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uac00\ud7a3\uf900-\ufa2d\ufa30-\ufa6a\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc])+(?:[\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u065e\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0900-\u0902\u093c\u0941-\u0948\u094d\u0951-\u0955\u0962\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2\u09e3\u0a01\u0a02\u0a3c\u0a41\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a70\u0a71\u0a75\u0a81\u0a82\u0abc\u0ac1-\u0ac5\u0ac7\u0ac8\u0acd\u0ae2\u0ae3\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62\u0b63\u0b82\u0bc0\u0bcd\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0cbc\u0cbf\u0cc6\u0ccc\u0ccd\u0ce2\u0ce3\u0d41-\u0d44\u0d4d\u0d62\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0f18\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86\u0f87\u0f90-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039\u103a\u103d\u103e\u1058\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085\u1086\u108d\u109d\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927\u1928\u1932\u1939-\u193b\u1a17\u1a18\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80\u1b81\u1ba2-\u1ba5\u1ba8\u1ba9\u1c2c-\u1c33\u1c36\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1dc0-\u1de6\u1dfd-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua66f\ua67c\ua67d\ua6f0\ua6f1\ua802\ua806\ua80b\ua825\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31\uaa32\uaa35\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe26]|[\u0903\u093e-\u0940\u0949-\u094c\u094e\u0982\u0983\u09be-\u09c0\u09c7\u09c8\u09cb\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb\u0acc\u0b02\u0b03\u0b3e\u0b40\u0b47\u0b48\u0b4b\u0b4c\u0b57\u0bbe\u0bbf\u0bc1\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7\u0cc8\u0cca\u0ccb\u0cd5\u0cd6\u0d02\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2\u0df3\u0f3e\u0f3f\u0f7f\u102b\u102c\u1031\u1038\u103b\u103c\u1056\u1057\u1062-\u1064\u1067-\u106d\u1083\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7\u17c8\u1923-\u1926\u1929-\u192b\u1930\u1931\u1933-\u1938\u19b0-\u19c0\u19c8\u19c9\u1a19-\u1a1b\u1a55\u1a57\u1a61\u1a63\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43\u1b44\u1b82\u1ba1\u1ba6\u1ba7\u1baa\u1c24-\u1c2b\u1c34\u1c35\u1ce1\u1cf2\ua823\ua824\ua827\ua880\ua881\ua8b4-\ua8c3\ua952\ua953\ua983\ua9b4\ua9b5\ua9ba\ua9bb\ua9bd-\ua9c0\uaa2f\uaa30\uaa33\uaa34\uaa4d\uaa7b\uabe3\uabe4\uabe6\uabe7\uabe9\uabea\uabec]|[0-9a-zA-Z_$]|[0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19da\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19]|[_\u203f\u2040\u2054\ufe33\ufe34\ufe4d-\ufe4f\uff3f])*))|(?P<t_SETPROP>set(?=\\s(?:[a-zA-Z_$]|[A-Za-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u0523\u0531-\u0556\u0559\u0561-\u0587\u05d0-\u05ea\u05f0-\u05f2\u0621-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971\u0972\u097b-\u097f\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3d\u0c58\u0c59\u0c60\u0c61\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0\u0ce1\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d28\u0d2a-\u0d39\u0d3d\u0d60\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e87\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa\u0eab\u0ead-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc\u0edd\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10d0-\u10fa\u10fc\u1100-\u1159\u115f-\u11a2\u11a8-\u11f9\u1200-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f4\u1401-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1877\u1880-\u18a8\u18aa\u1900-\u191c\u1950-\u196d\u1970-\u1974\u1980-\u19a9\u19c1-\u19c7\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae\u1baf\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u2094\u2102\u2107\u210a-\u2113\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2183\u2184\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2c6f\u2c71-\u2c7d\u2c80-\u2ce4\u2d00-\u2d25\u2d30-\u2d65\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2e2f\u3005\u3006\u3031-\u3035\u303b\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31b7\u31f0-\u31ff\u3400\u4db5\u4e00\u9fc3\ua000-\ua48c\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua65f\ua662-\ua66e\ua67f-\ua697\ua717-\ua71f\ua722-\ua788\ua78b\ua78c\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uac00\ud7a3\uf900-\ufa2d\ufa30-\ufa6a\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc])+(?:[\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u065e\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0900-\u0902\u093c\u0941-\u0948\u094d\u0951-\u0955\u0962\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2\u09e3\u0a01\u0a02\u0a3c\u0a41\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a70\u0a71\u0a75\u0a81\u0a82\u0abc\u0ac1-\u0ac5\u0ac7\u0ac8\u0acd\u0ae2\u0ae3\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62\u0b63\u0b82\u0bc0\u0bcd\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0cbc\u0cbf\u0cc6\u0ccc\u0ccd\u0ce2\u0ce3\u0d41-\u0d44\u0d4d\u0d62\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0f18\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86\u0f87\u0f90-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039\u103a\u103d\u103e\u1058\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085\u1086\u108d\u109d\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927\u1928\u1932\u1939-\u193b\u1a17\u1a18\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80\u1b81\u1ba2-\u1ba5\u1ba8\u1ba9\u1c2c-\u1c33\u1c36\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1dc0-\u1de6\u1dfd-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua66f\ua67c\ua67d\ua6f0\ua6f1\ua802\ua806\ua80b\ua825\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31\uaa32\uaa35\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe26]|[\u0903\u093e-\u0940\u0949-\u094c\u094e\u0982\u0983\u09be-\u09c0\u09c7\u09c8\u09cb\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb\u0acc\u0b02\u0b03\u0b3e\u0b40\u0b47\u0b48\u0b4b\u0b4c\u0b57\u0bbe\u0bbf\u0bc1\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7\u0cc8\u0cca\u0ccb\u0cd5\u0cd6\u0d02\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2\u0df3\u0f3e\u0f3f\u0f7f\u102b\u102c\u1031\u1038\u103b\u103c\u1056\u1057\u1062-\u1064\u1067-\u106d\u1083\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7\u17c8\u1923-\u1926\u1929-\u192b\u1930\u1931\u1933-\u1938\u19b0-\u19c0\u19c8\u19c9\u1a19-\u1a1b\u1a55\u1a57\u1a61\u1a63\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43\u1b44\u1b82\u1ba1\u1ba6\u1ba7\u1baa\u1c24-\u1c2b\u1c34\u1c35\u1ce1\u1cf2\ua823\ua824\ua827\ua880\ua881\ua8b4-\ua8c3\ua952\ua953\ua983\ua9b4\ua9b5\ua9ba\ua9bb\ua9bd-\ua9c0\uaa2f\uaa30\uaa33\uaa34\uaa4d\uaa7b\uabe3\uabe4\uabe6\uabe7\uabe9\uabea\uabec]|[0-9a-zA-Z_$]|[0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19da\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19]|[_\u203f\u2040\u2054\ufe33\ufe34\ufe4d-\ufe4f\uff3f])*))|(?P<t_ID>(?:[a-zA-Z_$]|[A-Za-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u0523\u0531-\u0556\u0559\u0561-\u0587\u05d0-\u05ea\u05f0-\u05f2\u0621-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971\u0972\u097b-\u097f\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3d\u0c58\u0c59\u0c60\u0c61\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0\u0ce1\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d28\u0d2a-\u0d39\u0d3d\u0d60\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e87\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa\u0eab\u0ead-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc\u0edd\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10d0-\u10fa\u10fc\u1100-\u1159\u115f-\u11a2\u11a8-\u11f9\u1200-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f4\u1401-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1877\u1880-\u18a8\u18aa\u1900-\u191c\u1950-\u196d\u1970-\u1974\u1980-\u19a9\u19c1-\u19c7\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae\u1baf\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u2094\u2102\u2107\u210a-\u2113\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2183\u2184\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2c6f\u2c71-\u2c7d\u2c80-\u2ce4\u2d00-\u2d25\u2d30-\u2d65\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2e2f\u3005\u3006\u3031-\u3035\u303b\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31b7\u31f0-\u31ff\u3400\u4db5\u4e00\u9fc3\ua000-\ua48c\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua65f\ua662-\ua66e\ua67f-\ua697\ua717-\ua71f\ua722-\ua788\ua78b\ua78c\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uac00\ud7a3\uf900-\ufa2d\ufa30-\ufa6a\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc])+(?:[\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u065e\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0900-\u0902\u093c\u0941-\u0948\u094d\u0951-\u0955\u0962\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2\u09e3\u0a01\u0a02\u0a3c\u0a41\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a70\u0a71\u0a75\u0a81\u0a82\u0abc\u0ac1-\u0ac5\u0ac7\u0ac8\u0acd\u0ae2\u0ae3\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62\u0b63\u0b82\u0bc0\u0bcd\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0cbc\u0cbf\u0cc6\u0ccc\u0ccd\u0ce2\u0ce3\u0d41-\u0d44\u0d4d\u0d62\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0f18\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86\u0f87\u0f90-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039\u103a\u103d\u103e\u1058\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085\u1086\u108d\u109d\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927\u1928\u1932\u1939-\u193b\u1a17\u1a18\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80\u1b81\u1ba2-\u1ba5\u1ba8\u1ba9\u1c2c-\u1c33\u1c36\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1dc0-\u1de6\u1dfd-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua66f\ua67c\ua67d\ua6f0\ua6f1\ua802\ua806\ua80b\ua825\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31\uaa32\uaa35\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe26]|[\u0903\u093e-\u0940\u0949-\u094c\u094e\u0982\u0983\u09be-\u09c0\u09c7\u09c8\u09cb\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb\u0acc\u0b02\u0b03\u0b3e\u0b40\u0b47\u0b48\u0b4b\u0b4c\u0b57\u0bbe\u0bbf\u0bc1\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7\u0cc8\u0cca\u0ccb\u0cd5\u0cd6\u0d02\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2\u0df3\u0f3e\u0f3f\u0f7f\u102b\u102c\u1031\u1038\u103b\u103c\u1056\u1057\u1062-\u1064\u1067-\u106d\u1083\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7\u17c8\u1923-\u1926\u1929-\u192b\u1930\u1931\u1933-\u1938\u19b0-\u19c0\u19c8\u19c9\u1a19-\u1a1b\u1a55\u1a57\u1a61\u1a63\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43\u1b44\u1b82\u1ba1\u1ba6\u1ba7\u1baa\u1c24-\u1c2b\u1c34\u1c35\u1ce1\u1cf2\ua823\ua824\ua827\ua880\ua881\ua8b4-\ua8c3\ua952\ua953\ua983\ua9b4\ua9b5\ua9ba\ua9bb\ua9bd-\ua9c0\uaa2f\uaa30\uaa33\uaa34\uaa4d\uaa7b\uabe3\uabe4\uabe6\uabe7\uabe9\uabea\uabec]|[0-9a-zA-Z_$]|[0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19da\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19]|[_\u203f\u2040\u2054\ufe33\ufe34\ufe4d-\ufe4f\uff3f])*)|(?P<t_NUMBER>\n    (?:\n        0[xX][0-9a-fA-F]+              # hex_integer_literal\n     |  0[0-7]+                        # or octal_integer_literal (spec B.1.1)\n     |  (?:                            # or decimal_literal\n            (?:0|[1-9][0-9]*)          # decimal_integer_literal\n            \\.                         # dot\n            [0-9]*                     # decimal_digits_opt\n            (?:[eE][+-]?[0-9]+)?       # exponent_part_opt\n         |\n            \\.                         # dot\n            [0-9]+                     # decimal_digits\n            (?:[eE][+-]?[0-9]+)?       # exponent_part_opt\n         |\n            (?:0|[1-9][0-9]*)          # decimal_integer_literal\n            (?:[eE][+-]?[0-9]+)?       # exponent_part_opt\n         )\n    )\n    )|(?P<t_BLOCK_COMMENT>/\\*[^*]*\\*+([^/*][^*]*\\*+)*/)|(?P<t_LINE_COMMENT>//[^\\r\\n]*)|(?P<t_LINE_TERMINATOR>[\\n\\r]+)|(?P<t_PLUSPLUS>\\+\\+)|(?P<t_OR>\\|\\|)|(?P<t_URSHIFTEQUAL>>>>=)|(?P<t_XOREQUAL>\\^=)|(?P<t_OREQUAL>\\|=)|(?P<t_LSHIFTEQUAL><<=)|(?P<t_STRNEQ>!==)|(?P<t_RSHIFTEQUAL>>>=)|(?P<t_URSHIFT>>>>)|(?P<t_PLUSEQUAL>\\+=)|(?P<t_MULTEQUAL>\\*=)|(?P<t_STREQ>===)|(?P<t_PERIOD>\\.)|(?P<t_PLUS>\\+)|(?P<t_MODEQUAL>%=)|(?P<t_DIVEQUAL>/=)|(?P<t_RBRACKET>\\])|(?P<t_CONDOP>\\?)|(?P<t_BOR>\\|)|(?P<t_LSHIFT><<)|(?P<t_LE><=)|(?P<t_BXOR>\\^)|(?P<t_LPAREN>\\()|(?P<t_MULT>\\*)|(?P<t_NE>!=)|(?P<t_MINUSMINUS>--)|(?P<t_AND>&&)|(?P<t_LBRACKET>\\[)|(?P<t_GE>>=)|(?P<t_RPAREN>\\))|(?P<t_RSHIFT>>>)|(?P<t_ANDEQUAL>&=)|(?P<t_MINUSEQUAL>-=)|(?P<t_EQEQ>==)|(?P<t_LBRACE>{)|(?P<t_LT><)|(?P<t_COMMA>,)|(?P<t_EQ>=)|(?P<t_BNOT>~)|(?P<t_RBRACE>})|(?P<t_DIV>/)|(?P<t_MOD>%)|(?P<t_SEMI>;)|(?P<t_MINUS>-)|(?P<t_GT>>)|(?P<t_COLON>:)|(?P<t_BAND>&)|(?P<t_NOT>!)', [None, (u't_STRING', 'STRING'), (u't_GETPROP', 'GETPROP'), (u't_SETPROP', 'SETPROP'), (u't_ID', 'ID'), (None, 'NUMBER'), (None, 'BLOCK_COMMENT'), None, (None, 'LINE_COMMENT'), (None, 'LINE_TERMINATOR'), (None, 'PLUSPLUS'), (None, 'OR'), (None, 'URSHIFTEQUAL'), (None, 'XOREQUAL'), (None, 'OREQUAL'), (None, 'LSHIFTEQUAL'), (None, 'STRNEQ'), (None, 'RSHIFTEQUAL'), (None, 'URSHIFT'), (None, 'PLUSEQUAL'), (None, 'MULTEQUAL'), (None, 'STREQ'), (None, 'PERIOD'), (None, 'PLUS'), (None, 'MODEQUAL'), (None, 'DIVEQUAL'), (None, 'RBRACKET'), (None, 'CONDOP'), (None, 'BOR'), (None, 'LSHIFT'), (None, 'LE'), (None, 'BXOR'), (None, 'LPAREN'), (None, 'MULT'), (None, 'NE'), (None, 'MINUSMINUS'), (None, 'AND'), (None, 'LBRACKET'), (None, 'GE'), (None, 'RPAREN'), (None, 'RSHIFT'), (None, 'ANDEQUAL'), (None, 'MINUSEQUAL'), (None, 'EQEQ'), (None, 'LBRACE'), (None, 'LT'), (None, 'COMMA'), (None, 'EQ'), (None, 'BNOT'), (None, 'RBRACE'), (None, 'DIV'), (None, 'MOD'), (None, 'SEMI'), (None, 'MINUS'), (None, 'GT'), (None, 'COLON'), (None, 'BAND'), (None, 'NOT')])]}
+_lexstateignore = {'regex': ' \t', 'INITIAL': ' \t'}
+_lexstateerrorf = {'regex': 't_regex_error', 'INITIAL': 't_error'}
diff --git a/tools/slimit/mangler.py b/tools/slimit/mangler.py
new file mode 100644 (file)
index 0000000..a8cdb9e
--- /dev/null
@@ -0,0 +1,51 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+from slimit.scope import SymbolTable
+from slimit.visitors.scopevisitor import (
+    ScopeTreeVisitor,
+    fill_scope_references,
+    mangle_scope_tree,
+    NameManglerVisitor,
+    )
+
+
+def mangle(tree, toplevel=False):
+    """Mangle names.
+
+    Args:
+        toplevel: defaults to False. Defines if global
+        scope should be mangled or not.
+    """
+    sym_table = SymbolTable()
+    visitor = ScopeTreeVisitor(sym_table)
+    visitor.visit(tree)
+
+    fill_scope_references(tree)
+    mangle_scope_tree(sym_table.globals, toplevel)
+
+    mangler = NameManglerVisitor()
+    mangler.visit(tree)
diff --git a/tools/slimit/minifier.py b/tools/slimit/minifier.py
new file mode 100644 (file)
index 0000000..a046eff
--- /dev/null
@@ -0,0 +1,70 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+import sys
+import optparse
+import textwrap
+
+from slimit import mangler
+from slimit.parser import Parser
+from slimit.visitors.minvisitor import ECMAMinifier
+
+
+def minify(text, mangle=False, mangle_toplevel=False):
+    parser = Parser()
+    tree = parser.parse(text)
+    if mangle:
+        mangler.mangle(tree, toplevel=mangle_toplevel)
+    minified = ECMAMinifier().visit(tree)
+    return minified
+
+
+def main(argv=None, inp=sys.stdin, out=sys.stdout):
+    usage = textwrap.dedent("""\
+    %prog [options] [input file]
+
+    If no input file is provided STDIN is used by default.
+    Minified JavaScript code is printed to STDOUT.
+    """)
+    parser = optparse.OptionParser(usage=usage)
+    parser.add_option('-m', '--mangle', action='store_true',
+                      dest='mangle', default=False, help='mangle names')
+    parser.add_option('-t', '--mangle-toplevel', action='store_true',
+                      dest='mangle_toplevel', default=False,
+                      help='mangle top level scope (defaults to False)')
+
+    if argv is None:
+        argv = sys.argv[1:]
+    options, args = parser.parse_args(argv)
+
+    if len(args) == 1:
+        text = open(args[0]).read()
+    else:
+        text = inp.read()
+
+    minified = minify(
+        text, mangle=options.mangle, mangle_toplevel=options.mangle_toplevel)
+    out.write(minified)
diff --git a/tools/slimit/parser.py b/tools/slimit/parser.py
new file mode 100644 (file)
index 0000000..d892166
--- /dev/null
@@ -0,0 +1,1223 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+import ply.yacc
+
+from slimit import ast
+from slimit.lexer import Lexer
+
+try:
+    from slimit import lextab, yacctab
+except ImportError:
+    lextab, yacctab = 'lextab', 'yacctab'
+
+
+class Parser(object):
+    """JavaScript parser(ECMA-262 5th edition grammar).
+
+    The '*noin' variants are needed to avoid confusing the `in` operator in
+    a relational expression with the `in` operator in a `for` statement.
+
+    '*nobf' stands for 'no brace or function'
+    """
+
+    def __init__(self, lex_optimize=True, lextab=lextab,
+                 yacc_optimize=True, yacctab=yacctab, yacc_debug=False):
+        self.lex_optimize = lex_optimize
+        self.lextab = lextab
+        self.yacc_optimize = yacc_optimize
+        self.yacctab = yacctab
+        self.yacc_debug = yacc_debug
+
+        self.lexer = Lexer()
+        self.lexer.build(optimize=lex_optimize, lextab=lextab)
+        self.tokens = self.lexer.tokens
+
+        self.parser = ply.yacc.yacc(
+            module=self, optimize=yacc_optimize,
+            debug=yacc_debug, tabmodule=yacctab, start='program')
+
+        # https://github.com/rspivak/slimit/issues/29
+        # lexer.auto_semi can cause a loop in a parser
+        # when a parser error happens on a token right after
+        # a newline.
+        # We keep record of the tokens that caused p_error
+        # and if the token has already been seen - we raise
+        # a SyntaxError exception to avoid looping over and
+        # over again.
+        self._error_tokens = {}
+
+    def _has_been_seen_before(self, token):
+        if token is None:
+            return False
+        key = token.type, token.value, token.lineno, token.lexpos
+        return key in self._error_tokens
+
+    def _mark_as_seen(self, token):
+        if token is None:
+            return
+        key = token.type, token.value, token.lineno, token.lexpos
+        self._error_tokens[key] = True
+
+    def _raise_syntax_error(self, token):
+        raise SyntaxError(
+            'Unexpected token (%s, %r) at %s:%s between %s and %s' % (
+                token.type, token.value, token.lineno, token.lexpos,
+                self.lexer.prev_token, self.lexer.token())
+            )
+
+    def parse(self, text, debug=False):
+        return self.parser.parse(text, lexer=self.lexer, debug=debug)
+
+    def p_empty(self, p):
+        """empty :"""
+        pass
+
+    def p_auto_semi(self, p):
+        """auto_semi : error"""
+        pass
+
+    def p_error(self, token):
+        # https://github.com/rspivak/slimit/issues/29
+        if self._has_been_seen_before(token):
+            self._raise_syntax_error(token)
+
+        if token is None or token.type != 'SEMI':
+            next_token = self.lexer.auto_semi(token)
+            if next_token is not None:
+                # https://github.com/rspivak/slimit/issues/29
+                self._mark_as_seen(token)
+                self.parser.errok()
+                return next_token
+
+        self._raise_syntax_error(token)
+
+    # Comment rules
+    # def p_single_line_comment(self, p):
+    #     """single_line_comment : LINE_COMMENT"""
+    #     pass
+
+    # def p_multi_line_comment(self, p):
+    #     """multi_line_comment : BLOCK_COMMENT"""
+    #     pass
+
+    # Main rules
+
+    def p_program(self, p):
+        """program : source_elements"""
+        p[0] = ast.Program(p[1])
+
+    def p_source_elements(self, p):
+        """source_elements : empty
+                           | source_element_list
+        """
+        p[0] = p[1]
+
+    def p_source_element_list(self, p):
+        """source_element_list : source_element
+                               | source_element_list source_element
+        """
+        if len(p) == 2: # single source element
+            p[0] = [p[1]]
+        else:
+            p[1].append(p[2])
+            p[0] = p[1]
+
+    def p_source_element(self, p):
+        """source_element : statement
+                          | function_declaration
+        """
+        p[0] = p[1]
+
+    def p_statement(self, p):
+        """statement : block
+                     | variable_statement
+                     | empty_statement
+                     | expr_statement
+                     | if_statement
+                     | iteration_statement
+                     | continue_statement
+                     | break_statement
+                     | return_statement
+                     | with_statement
+                     | switch_statement
+                     | labelled_statement
+                     | throw_statement
+                     | try_statement
+                     | debugger_statement
+                     | function_declaration
+        """
+        p[0] = p[1]
+
+    # By having source_elements in the production we support
+    # also function_declaration inside blocks
+    def p_block(self, p):
+        """block : LBRACE source_elements RBRACE"""
+        p[0] = ast.Block(p[2])
+
+    def p_literal(self, p):
+        """literal : null_literal
+                   | boolean_literal
+                   | numeric_literal
+                   | string_literal
+                   | regex_literal
+        """
+        p[0] = p[1]
+
+    def p_boolean_literal(self, p):
+        """boolean_literal : TRUE
+                           | FALSE
+        """
+        p[0] = ast.Boolean(p[1])
+
+    def p_null_literal(self, p):
+        """null_literal : NULL"""
+        p[0] = ast.Null(p[1])
+
+    def p_numeric_literal(self, p):
+        """numeric_literal : NUMBER"""
+        p[0] = ast.Number(p[1])
+
+    def p_string_literal(self, p):
+        """string_literal : STRING"""
+        p[0] = ast.String(p[1])
+
+    def p_regex_literal(self, p):
+        """regex_literal : REGEX"""
+        p[0] = ast.Regex(p[1])
+
+    def p_identifier(self, p):
+        """identifier : ID"""
+        p[0] = ast.Identifier(p[1])
+
+    ###########################################
+    # Expressions
+    ###########################################
+    def p_primary_expr(self, p):
+        """primary_expr : primary_expr_no_brace
+                        | object_literal
+        """
+        p[0] = p[1]
+
+    def p_primary_expr_no_brace_1(self, p):
+        """primary_expr_no_brace : identifier"""
+        p[1]._mangle_candidate = True
+        p[1]._in_expression = True
+        p[0] = p[1]
+
+    def p_primary_expr_no_brace_2(self, p):
+        """primary_expr_no_brace : THIS"""
+        p[0] = ast.This()
+
+    def p_primary_expr_no_brace_3(self, p):
+        """primary_expr_no_brace : literal
+                                 | array_literal
+        """
+        p[0] = p[1]
+
+    def p_primary_expr_no_brace_4(self, p):
+        """primary_expr_no_brace : LPAREN expr RPAREN"""
+        p[2]._parens = True
+        p[0] = p[2]
+
+    def p_array_literal_1(self, p):
+        """array_literal : LBRACKET elision_opt RBRACKET"""
+        p[0] = ast.Array(items=p[2])
+
+    def p_array_literal_2(self, p):
+        """array_literal : LBRACKET element_list RBRACKET
+                         | LBRACKET element_list COMMA elision_opt RBRACKET
+        """
+        items = p[2]
+        if len(p) == 6:
+            items.extend(p[4])
+        p[0] = ast.Array(items=items)
+
+
+    def p_element_list(self, p):
+        """element_list : elision_opt assignment_expr
+                        | element_list COMMA elision_opt assignment_expr
+        """
+        if len(p) == 3:
+            p[0] = p[1] + [p[2]]
+        else:
+            p[1].extend(p[3])
+            p[1].append(p[4])
+            p[0] = p[1]
+
+    def p_elision_opt_1(self, p):
+        """elision_opt : empty"""
+        p[0] = []
+
+    def p_elision_opt_2(self, p):
+        """elision_opt : elision"""
+        p[0] = p[1]
+
+    def p_elision(self, p):
+        """elision : COMMA
+                   | elision COMMA
+        """
+        if len(p) == 2:
+            p[0] = [ast.Elision(p[1])]
+        else:
+            p[1].append(ast.Elision(p[2]))
+            p[0] = p[1]
+
+    def p_object_literal(self, p):
+        """object_literal : LBRACE RBRACE
+                          | LBRACE property_list RBRACE
+                          | LBRACE property_list COMMA RBRACE
+        """
+        if len(p) == 3:
+            p[0] = ast.Object()
+        else:
+            p[0] = ast.Object(properties=p[2])
+
+    def p_property_list(self, p):
+        """property_list : property_assignment
+                         | property_list COMMA property_assignment
+        """
+        if len(p) == 2:
+            p[0] = [p[1]]
+        else:
+            p[1].append(p[3])
+            p[0] = p[1]
+
+    # XXX: GET / SET
+    def p_property_assignment(self, p):
+        """property_assignment \
+             : property_name COLON assignment_expr
+             | GETPROP property_name LPAREN RPAREN LBRACE function_body RBRACE
+             | SETPROP property_name LPAREN formal_parameter_list RPAREN \
+                   LBRACE function_body RBRACE
+        """
+        if len(p) == 4:
+            p[0] = ast.Assign(left=p[1], op=p[2], right=p[3])
+        elif len(p) == 8:
+            p[0] = ast.GetPropAssign(prop_name=p[2], elements=p[6])
+        else:
+            p[0] = ast.SetPropAssign(
+                prop_name=p[2], parameters=p[4], elements=p[7])
+
+    def p_property_name(self, p):
+        """property_name : identifier
+                         | string_literal
+                         | numeric_literal
+        """
+        p[0] = p[1]
+
+    # 11.2 Left-Hand-Side Expressions
+    def p_member_expr(self, p):
+        """member_expr : primary_expr
+                       | function_expr
+                       | member_expr LBRACKET expr RBRACKET
+                       | member_expr PERIOD identifier
+                       | NEW member_expr arguments
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        elif p[1] == 'new':
+            p[0] = ast.NewExpr(p[2], p[3])
+        elif p[2] == '.':
+            p[0] = ast.DotAccessor(p[1], p[3])
+        else:
+            p[0] = ast.BracketAccessor(p[1], p[3])
+
+    def p_member_expr_nobf(self, p):
+        """member_expr_nobf : primary_expr_no_brace
+                            | function_expr
+                            | member_expr_nobf LBRACKET expr RBRACKET
+                            | member_expr_nobf PERIOD identifier
+                            | NEW member_expr arguments
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        elif p[1] == 'new':
+            p[0] = ast.NewExpr(p[2], p[3])
+        elif p[2] == '.':
+            p[0] = ast.DotAccessor(p[1], p[3])
+        else:
+            p[0] = ast.BracketAccessor(p[1], p[3])
+
+    def p_new_expr(self, p):
+        """new_expr : member_expr
+                    | NEW new_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.NewExpr(p[2])
+
+    def p_new_expr_nobf(self, p):
+        """new_expr_nobf : member_expr_nobf
+                         | NEW new_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.NewExpr(p[2])
+
+    def p_call_expr(self, p):
+        """call_expr : member_expr arguments
+                     | call_expr arguments
+                     | call_expr LBRACKET expr RBRACKET
+                     | call_expr PERIOD identifier
+        """
+        if len(p) == 3:
+            p[0] = ast.FunctionCall(p[1], p[2])
+        elif len(p) == 4:
+            p[0] = ast.DotAccessor(p[1], p[3])
+        else:
+            p[0] = ast.BracketAccessor(p[1], p[3])
+
+    def p_call_expr_nobf(self, p):
+        """call_expr_nobf : member_expr_nobf arguments
+                          | call_expr_nobf arguments
+                          | call_expr_nobf LBRACKET expr RBRACKET
+                          | call_expr_nobf PERIOD identifier
+        """
+        if len(p) == 3:
+            p[0] = ast.FunctionCall(p[1], p[2])
+        elif len(p) == 4:
+            p[0] = ast.DotAccessor(p[1], p[3])
+        else:
+            p[0] = ast.BracketAccessor(p[1], p[3])
+
+    def p_arguments(self, p):
+        """arguments : LPAREN RPAREN
+                     | LPAREN argument_list RPAREN
+        """
+        if len(p) == 4:
+            p[0] = p[2]
+
+    def p_argument_list(self, p):
+        """argument_list : assignment_expr
+                         | argument_list COMMA assignment_expr
+        """
+        if len(p) == 2:
+            p[0] = [p[1]]
+        else:
+            p[1].append(p[3])
+            p[0] = p[1]
+
+    def p_lef_hand_side_expr(self, p):
+        """left_hand_side_expr : new_expr
+                               | call_expr
+        """
+        p[0] = p[1]
+
+    def p_lef_hand_side_expr_nobf(self, p):
+        """left_hand_side_expr_nobf : new_expr_nobf
+                                    | call_expr_nobf
+        """
+        p[0] = p[1]
+
+    # 11.3 Postfix Expressions
+    def p_postfix_expr(self, p):
+        """postfix_expr : left_hand_side_expr
+                        | left_hand_side_expr PLUSPLUS
+                        | left_hand_side_expr MINUSMINUS
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.UnaryOp(op=p[2], value=p[1], postfix=True)
+
+    def p_postfix_expr_nobf(self, p):
+        """postfix_expr_nobf : left_hand_side_expr_nobf
+                             | left_hand_side_expr_nobf PLUSPLUS
+                             | left_hand_side_expr_nobf MINUSMINUS
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.UnaryOp(op=p[2], value=p[1], postfix=True)
+
+    # 11.4 Unary Operators
+    def p_unary_expr(self, p):
+        """unary_expr : postfix_expr
+                      | unary_expr_common
+        """
+        p[0] = p[1]
+
+    def p_unary_expr_nobf(self, p):
+        """unary_expr_nobf : postfix_expr_nobf
+                           | unary_expr_common
+        """
+        p[0] = p[1]
+
+    def p_unary_expr_common(self, p):
+        """unary_expr_common : DELETE unary_expr
+                             | VOID unary_expr
+                             | TYPEOF unary_expr
+                             | PLUSPLUS unary_expr
+                             | MINUSMINUS unary_expr
+                             | PLUS unary_expr
+                             | MINUS unary_expr
+                             | BNOT unary_expr
+                             | NOT unary_expr
+        """
+        p[0] = ast.UnaryOp(p[1], p[2])
+
+    # 11.5 Multiplicative Operators
+    def p_multiplicative_expr(self, p):
+        """multiplicative_expr : unary_expr
+                               | multiplicative_expr MULT unary_expr
+                               | multiplicative_expr DIV unary_expr
+                               | multiplicative_expr MOD unary_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_multiplicative_expr_nobf(self, p):
+        """multiplicative_expr_nobf : unary_expr_nobf
+                                    | multiplicative_expr_nobf MULT unary_expr
+                                    | multiplicative_expr_nobf DIV unary_expr
+                                    | multiplicative_expr_nobf MOD unary_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    # 11.6 Additive Operators
+    def p_additive_expr(self, p):
+        """additive_expr : multiplicative_expr
+                         | additive_expr PLUS multiplicative_expr
+                         | additive_expr MINUS multiplicative_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_additive_expr_nobf(self, p):
+        """additive_expr_nobf : multiplicative_expr_nobf
+                              | additive_expr_nobf PLUS multiplicative_expr
+                              | additive_expr_nobf MINUS multiplicative_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    # 11.7 Bitwise Shift Operators
+    def p_shift_expr(self, p):
+        """shift_expr : additive_expr
+                      | shift_expr LSHIFT additive_expr
+                      | shift_expr RSHIFT additive_expr
+                      | shift_expr URSHIFT additive_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_shift_expr_nobf(self, p):
+        """shift_expr_nobf : additive_expr_nobf
+                           | shift_expr_nobf LSHIFT additive_expr
+                           | shift_expr_nobf RSHIFT additive_expr
+                           | shift_expr_nobf URSHIFT additive_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+
+    # 11.8 Relational Operators
+    def p_relational_expr(self, p):
+        """relational_expr : shift_expr
+                           | relational_expr LT shift_expr
+                           | relational_expr GT shift_expr
+                           | relational_expr LE shift_expr
+                           | relational_expr GE shift_expr
+                           | relational_expr INSTANCEOF shift_expr
+                           | relational_expr IN shift_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_relational_expr_noin(self, p):
+        """relational_expr_noin : shift_expr
+                                | relational_expr_noin LT shift_expr
+                                | relational_expr_noin GT shift_expr
+                                | relational_expr_noin LE shift_expr
+                                | relational_expr_noin GE shift_expr
+                                | relational_expr_noin INSTANCEOF shift_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_relational_expr_nobf(self, p):
+        """relational_expr_nobf : shift_expr_nobf
+                                | relational_expr_nobf LT shift_expr
+                                | relational_expr_nobf GT shift_expr
+                                | relational_expr_nobf LE shift_expr
+                                | relational_expr_nobf GE shift_expr
+                                | relational_expr_nobf INSTANCEOF shift_expr
+                                | relational_expr_nobf IN shift_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    # 11.9 Equality Operators
+    def p_equality_expr(self, p):
+        """equality_expr : relational_expr
+                         | equality_expr EQEQ relational_expr
+                         | equality_expr NE relational_expr
+                         | equality_expr STREQ relational_expr
+                         | equality_expr STRNEQ relational_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_equality_expr_noin(self, p):
+        """equality_expr_noin : relational_expr_noin
+                              | equality_expr_noin EQEQ relational_expr
+                              | equality_expr_noin NE relational_expr
+                              | equality_expr_noin STREQ relational_expr
+                              | equality_expr_noin STRNEQ relational_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_equality_expr_nobf(self, p):
+        """equality_expr_nobf : relational_expr_nobf
+                              | equality_expr_nobf EQEQ relational_expr
+                              | equality_expr_nobf NE relational_expr
+                              | equality_expr_nobf STREQ relational_expr
+                              | equality_expr_nobf STRNEQ relational_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    # 11.10 Binary Bitwise Operators
+    def p_bitwise_and_expr(self, p):
+        """bitwise_and_expr : equality_expr
+                            | bitwise_and_expr BAND equality_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_bitwise_and_expr_noin(self, p):
+        """bitwise_and_expr_noin \
+            : equality_expr_noin
+            | bitwise_and_expr_noin BAND equality_expr_noin
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_bitwise_and_expr_nobf(self, p):
+        """bitwise_and_expr_nobf \
+            : equality_expr_nobf
+            | bitwise_and_expr_nobf BAND equality_expr_nobf
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_bitwise_xor_expr(self, p):
+        """bitwise_xor_expr : bitwise_and_expr
+                            | bitwise_xor_expr BXOR bitwise_and_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_bitwise_xor_expr_noin(self, p):
+        """
+        bitwise_xor_expr_noin \
+            : bitwise_and_expr_noin
+            | bitwise_xor_expr_noin BXOR bitwise_and_expr_noin
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_bitwise_xor_expr_nobf(self, p):
+        """
+        bitwise_xor_expr_nobf \
+            : bitwise_and_expr_nobf
+            | bitwise_xor_expr_nobf BXOR bitwise_and_expr_nobf
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_bitwise_or_expr(self, p):
+        """bitwise_or_expr : bitwise_xor_expr
+                           | bitwise_or_expr BOR bitwise_xor_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_bitwise_or_expr_noin(self, p):
+        """
+        bitwise_or_expr_noin \
+            : bitwise_xor_expr_noin
+            | bitwise_or_expr_noin BOR bitwise_xor_expr_noin
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_bitwise_or_expr_nobf(self, p):
+        """
+        bitwise_or_expr_nobf \
+            : bitwise_xor_expr_nobf
+            | bitwise_or_expr_nobf BOR bitwise_xor_expr_nobf
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    # 11.11 Binary Logical Operators
+    def p_logical_and_expr(self, p):
+        """logical_and_expr : bitwise_or_expr
+                            | logical_and_expr AND bitwise_or_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_logical_and_expr_noin(self, p):
+        """
+        logical_and_expr_noin : bitwise_or_expr_noin
+                              | logical_and_expr_noin AND bitwise_or_expr_noin
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_logical_and_expr_nobf(self, p):
+        """
+        logical_and_expr_nobf : bitwise_or_expr_nobf
+                              | logical_and_expr_nobf AND bitwise_or_expr_nobf
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_logical_or_expr(self, p):
+        """logical_or_expr : logical_and_expr
+                           | logical_or_expr OR logical_and_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_logical_or_expr_noin(self, p):
+        """logical_or_expr_noin : logical_and_expr_noin
+                                | logical_or_expr_noin OR logical_and_expr_noin
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    def p_logical_or_expr_nobf(self, p):
+        """logical_or_expr_nobf : logical_and_expr_nobf
+                                | logical_or_expr_nobf OR logical_and_expr_nobf
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3])
+
+    # 11.12 Conditional Operator ( ? : )
+    def p_conditional_expr(self, p):
+        """
+        conditional_expr \
+            : logical_or_expr
+            | logical_or_expr CONDOP assignment_expr COLON assignment_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.Conditional(
+                predicate=p[1], consequent=p[3], alternative=p[5])
+
+    def p_conditional_expr_noin(self, p):
+        """
+        conditional_expr_noin \
+            : logical_or_expr_noin
+            | logical_or_expr_noin CONDOP assignment_expr_noin COLON \
+                  assignment_expr_noin
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.Conditional(
+                predicate=p[1], consequent=p[3], alternative=p[5])
+
+    def p_conditional_expr_nobf(self, p):
+        """
+        conditional_expr_nobf \
+            : logical_or_expr_nobf
+            | logical_or_expr_nobf CONDOP assignment_expr COLON assignment_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.Conditional(
+                predicate=p[1], consequent=p[3], alternative=p[5])
+
+    # 11.13 Assignment Operators
+    def p_assignment_expr(self, p):
+        """
+        assignment_expr \
+            : conditional_expr
+            | left_hand_side_expr assignment_operator assignment_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.Assign(left=p[1], op=p[2], right=p[3])
+
+    def p_assignment_expr_noin(self, p):
+        """
+        assignment_expr_noin \
+            : conditional_expr_noin
+            | left_hand_side_expr assignment_operator assignment_expr_noin
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.Assign(left=p[1], op=p[2], right=p[3])
+
+    def p_assignment_expr_nobf(self, p):
+        """
+        assignment_expr_nobf \
+            : conditional_expr_nobf
+            | left_hand_side_expr_nobf assignment_operator assignment_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.Assign(left=p[1], op=p[2], right=p[3])
+
+    def p_assignment_operator(self, p):
+        """assignment_operator : EQ
+                               | MULTEQUAL
+                               | DIVEQUAL
+                               | MODEQUAL
+                               | PLUSEQUAL
+                               | MINUSEQUAL
+                               | LSHIFTEQUAL
+                               | RSHIFTEQUAL
+                               | URSHIFTEQUAL
+                               | ANDEQUAL
+                               | XOREQUAL
+                               | OREQUAL
+        """
+        p[0] = p[1]
+
+    # 11.4 Comma Operator
+    def p_expr(self, p):
+        """expr : assignment_expr
+                | expr COMMA assignment_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.Comma(left=p[1], right=p[3])
+
+    def p_expr_noin(self, p):
+        """expr_noin : assignment_expr_noin
+                     | expr_noin COMMA assignment_expr_noin
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.Comma(left=p[1], right=p[3])
+
+    def p_expr_nobf(self, p):
+        """expr_nobf : assignment_expr_nobf
+                     | expr_nobf COMMA assignment_expr
+        """
+        if len(p) == 2:
+            p[0] = p[1]
+        else:
+            p[0] = ast.Comma(left=p[1], right=p[3])
+
+    # 12.2 Variable Statement
+    def p_variable_statement(self, p):
+        """variable_statement : VAR variable_declaration_list SEMI
+                              | VAR variable_declaration_list auto_semi
+        """
+        p[0] = ast.VarStatement(p[2])
+
+    def p_variable_declaration_list(self, p):
+        """
+        variable_declaration_list \
+            : variable_declaration
+            | variable_declaration_list COMMA variable_declaration
+        """
+        if len(p) == 2:
+            p[0] = [p[1]]
+        else:
+            p[1].append(p[3])
+            p[0] = p[1]
+
+    def p_variable_declaration_list_noin(self, p):
+        """
+        variable_declaration_list_noin \
+            : variable_declaration_noin
+            | variable_declaration_list_noin COMMA variable_declaration_noin
+        """
+        if len(p) == 2:
+            p[0] = [p[1]]
+        else:
+            p[1].append(p[3])
+            p[0] = p[1]
+
+    def p_variable_declaration(self, p):
+        """variable_declaration : identifier
+                                | identifier initializer
+        """
+        if len(p) == 2:
+            p[0] = ast.VarDecl(p[1])
+        else:
+            p[0] = ast.VarDecl(p[1], p[2])
+
+    def p_variable_declaration_noin(self, p):
+        """variable_declaration_noin : identifier
+                                     | identifier initializer_noin
+        """
+        if len(p) == 2:
+            p[0] = ast.VarDecl(p[1])
+        else:
+            p[0] = ast.VarDecl(p[1], p[2])
+
+    def p_initializer(self, p):
+        """initializer : EQ assignment_expr"""
+        p[0] = p[2]
+
+    def p_initializer_noin(self, p):
+        """initializer_noin : EQ assignment_expr_noin"""
+        p[0] = p[2]
+
+    # 12.3 Empty Statement
+    def p_empty_statement(self, p):
+        """empty_statement : SEMI"""
+        p[0] = ast.EmptyStatement(p[1])
+
+    # 12.4 Expression Statement
+    def p_expr_statement(self, p):
+        """expr_statement : expr_nobf SEMI
+                          | expr_nobf auto_semi
+        """
+        p[0] = ast.ExprStatement(p[1])
+
+    # 12.5 The if Statement
+    def p_if_statement_1(self, p):
+        """if_statement : IF LPAREN expr RPAREN statement"""
+        p[0] = ast.If(predicate=p[3], consequent=p[5])
+
+    def p_if_statement_2(self, p):
+        """if_statement : IF LPAREN expr RPAREN statement ELSE statement"""
+        p[0] = ast.If(predicate=p[3], consequent=p[5], alternative=p[7])
+
+    # 12.6 Iteration Statements
+    def p_iteration_statement_1(self, p):
+        """
+        iteration_statement \
+            : DO statement WHILE LPAREN expr RPAREN SEMI
+            | DO statement WHILE LPAREN expr RPAREN auto_semi
+        """
+        p[0] = ast.DoWhile(predicate=p[5], statement=p[2])
+
+    def p_iteration_statement_2(self, p):
+        """iteration_statement : WHILE LPAREN expr RPAREN statement"""
+        p[0] = ast.While(predicate=p[3], statement=p[5])
+
+    def p_iteration_statement_3(self, p):
+        """
+        iteration_statement \
+            : FOR LPAREN expr_noin_opt SEMI expr_opt SEMI expr_opt RPAREN \
+                  statement
+            | FOR LPAREN VAR variable_declaration_list_noin SEMI expr_opt SEMI\
+                  expr_opt RPAREN statement
+        """
+        if len(p) == 10:
+            p[0] = ast.For(init=p[3], cond=p[5], count=p[7], statement=p[9])
+        else:
+            init = ast.VarStatement(p[4])
+            p[0] = ast.For(init=init, cond=p[6], count=p[8], statement=p[10])
+
+    def p_iteration_statement_4(self, p):
+        """
+        iteration_statement \
+            : FOR LPAREN left_hand_side_expr IN expr RPAREN statement
+        """
+        p[0] = ast.ForIn(item=p[3], iterable=p[5], statement=p[7])
+
+    def p_iteration_statement_5(self, p):
+        """
+        iteration_statement : \
+            FOR LPAREN VAR identifier IN expr RPAREN statement
+        """
+        p[0] = ast.ForIn(item=ast.VarDecl(p[4]), iterable=p[6], statement=p[8])
+
+    def p_iteration_statement_6(self, p):
+        """
+        iteration_statement \
+          : FOR LPAREN VAR identifier initializer_noin IN expr RPAREN statement
+        """
+        p[0] = ast.ForIn(item=ast.VarDecl(identifier=p[4], initializer=p[5]),
+                         iterable=p[7], statement=p[9])
+
+    def p_expr_opt(self, p):
+        """expr_opt : empty
+                    | expr
+        """
+        p[0] = p[1]
+
+    def p_expr_noin_opt(self, p):
+        """expr_noin_opt : empty
+                         | expr_noin
+        """
+        p[0] = p[1]
+
+    # 12.7 The continue Statement
+    def p_continue_statement_1(self, p):
+        """continue_statement : CONTINUE SEMI
+                              | CONTINUE auto_semi
+        """
+        p[0] = ast.Continue()
+
+    def p_continue_statement_2(self, p):
+        """continue_statement : CONTINUE identifier SEMI
+                              | CONTINUE identifier auto_semi
+        """
+        p[0] = ast.Continue(p[2])
+
+    # 12.8 The break Statement
+    def p_break_statement_1(self, p):
+        """break_statement : BREAK SEMI
+                           | BREAK auto_semi
+        """
+        p[0] = ast.Break()
+
+    def p_break_statement_2(self, p):
+        """break_statement : BREAK identifier SEMI
+                           | BREAK identifier auto_semi
+        """
+        p[0] = ast.Break(p[2])
+
+
+    # 12.9 The return Statement
+    def p_return_statement_1(self, p):
+        """return_statement : RETURN SEMI
+                            | RETURN auto_semi
+        """
+        p[0] = ast.Return()
+
+    def p_return_statement_2(self, p):
+        """return_statement : RETURN expr SEMI
+                            | RETURN expr auto_semi
+        """
+        p[0] = ast.Return(expr=p[2])
+
+    # 12.10 The with Statement
+    def p_with_statement(self, p):
+        """with_statement : WITH LPAREN expr RPAREN statement"""
+        p[0] = ast.With(expr=p[3], statement=p[5])
+
+    # 12.11 The switch Statement
+    def p_switch_statement(self, p):
+        """switch_statement : SWITCH LPAREN expr RPAREN case_block"""
+        cases = []
+        default = None
+        # iterate over return values from case_block
+        for item in p[5]:
+            if isinstance(item, ast.Default):
+                default = item
+            elif isinstance(item, list):
+                cases.extend(item)
+
+        p[0] = ast.Switch(expr=p[3], cases=cases, default=default)
+
+    def p_case_block(self, p):
+        """
+        case_block \
+            : LBRACE case_clauses_opt RBRACE
+            | LBRACE case_clauses_opt default_clause case_clauses_opt RBRACE
+        """
+        p[0] = p[2:-1]
+
+    def p_case_clauses_opt(self, p):
+        """case_clauses_opt : empty
+                            | case_clauses
+        """
+        p[0] = p[1]
+
+    def p_case_clauses(self, p):
+        """case_clauses : case_clause
+                        | case_clauses case_clause
+        """
+        if len(p) == 2:
+            p[0] = [p[1]]
+        else:
+            p[1].append(p[2])
+            p[0] = p[1]
+
+    def p_case_clause(self, p):
+        """case_clause : CASE expr COLON source_elements"""
+        p[0] = ast.Case(expr=p[2], elements=p[4])
+
+    def p_default_clause(self, p):
+        """default_clause : DEFAULT COLON source_elements"""
+        p[0] = ast.Default(elements=p[3])
+
+    # 12.12 Labelled Statements
+    def p_labelled_statement(self, p):
+        """labelled_statement : identifier COLON statement"""
+        p[0] = ast.Label(identifier=p[1], statement=p[3])
+
+    # 12.13 The throw Statement
+    def p_throw_statement(self, p):
+        """throw_statement : THROW expr SEMI
+                           | THROW expr auto_semi
+        """
+        p[0] = ast.Throw(expr=p[2])
+
+    # 12.14 The try Statement
+    def p_try_statement_1(self, p):
+        """try_statement : TRY block catch"""
+        p[0] = ast.Try(statements=p[2], catch=p[3])
+
+    def p_try_statement_2(self, p):
+        """try_statement : TRY block finally"""
+        p[0] = ast.Try(statements=p[2], fin=p[3])
+
+    def p_try_statement_3(self, p):
+        """try_statement : TRY block catch finally"""
+        p[0] = ast.Try(statements=p[2], catch=p[3], fin=p[4])
+
+    def p_catch(self, p):
+        """catch : CATCH LPAREN identifier RPAREN block"""
+        p[0] = ast.Catch(identifier=p[3], elements=p[5])
+
+    def p_finally(self, p):
+        """finally : FINALLY block"""
+        p[0] = ast.Finally(elements=p[2])
+
+    # 12.15 The debugger statement
+    def p_debugger_statement(self, p):
+        """debugger_statement : DEBUGGER SEMI
+                              | DEBUGGER auto_semi
+        """
+        p[0] = ast.Debugger(p[1])
+
+    # 13 Function Definition
+    def p_function_declaration(self, p):
+        """
+        function_declaration \
+            : FUNCTION identifier LPAREN RPAREN LBRACE function_body RBRACE
+            | FUNCTION identifier LPAREN formal_parameter_list RPAREN LBRACE \
+                 function_body RBRACE
+        """
+        if len(p) == 8:
+            p[0] = ast.FuncDecl(
+                identifier=p[2], parameters=None, elements=p[6])
+        else:
+            p[0] = ast.FuncDecl(
+                identifier=p[2], parameters=p[4], elements=p[7])
+
+    def p_function_expr_1(self, p):
+        """
+        function_expr \
+            : FUNCTION LPAREN RPAREN LBRACE function_body RBRACE
+            | FUNCTION LPAREN formal_parameter_list RPAREN \
+                LBRACE function_body RBRACE
+        """
+        if len(p) == 7:
+            p[0] = ast.FuncExpr(
+                identifier=None, parameters=None, elements=p[5])
+        else:
+            p[0] = ast.FuncExpr(
+                identifier=None, parameters=p[3], elements=p[6])
+
+    def p_function_expr_2(self, p):
+        """
+        function_expr \
+            : FUNCTION identifier LPAREN RPAREN LBRACE function_body RBRACE
+            | FUNCTION identifier LPAREN formal_parameter_list RPAREN \
+                LBRACE function_body RBRACE
+        """
+        if len(p) == 8:
+            p[0] = ast.FuncExpr(
+                identifier=p[2], parameters=None, elements=p[6])
+        else:
+            p[0] = ast.FuncExpr(
+                identifier=p[2], parameters=p[4], elements=p[7])
+
+
+    def p_formal_parameter_list(self, p):
+        """formal_parameter_list : identifier
+                                 | formal_parameter_list COMMA identifier
+        """
+        if len(p) == 2:
+            p[0] = [p[1]]
+        else:
+            p[1].append(p[3])
+            p[0] = p[1]
+
+    def p_function_body(self, p):
+        """function_body : source_elements"""
+        p[0] = p[1]
diff --git a/tools/slimit/ply/__init__.py b/tools/slimit/ply/__init__.py
new file mode 100644 (file)
index 0000000..853a985
--- /dev/null
@@ -0,0 +1,4 @@
+# PLY package
+# Author: David Beazley (dave@dabeaz.com)
+
+__all__ = ['lex','yacc']
diff --git a/tools/slimit/ply/cpp.py b/tools/slimit/ply/cpp.py
new file mode 100644 (file)
index 0000000..5cad682
--- /dev/null
@@ -0,0 +1,898 @@
+# -----------------------------------------------------------------------------
+# cpp.py
+#
+# Author:  David Beazley (http://www.dabeaz.com)
+# Copyright (C) 2007
+# All rights reserved
+#
+# This module implements an ANSI-C style lexical preprocessor for PLY. 
+# -----------------------------------------------------------------------------
+from __future__ import generators
+
+# -----------------------------------------------------------------------------
+# Default preprocessor lexer definitions.   These tokens are enough to get
+# a basic preprocessor working.   Other modules may import these if they want
+# -----------------------------------------------------------------------------
+
+tokens = (
+   'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT', 'CPP_POUND','CPP_DPOUND'
+)
+
+literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\""
+
+# Whitespace
+def t_CPP_WS(t):
+    r'\s+'
+    t.lexer.lineno += t.value.count("\n")
+    return t
+
+t_CPP_POUND = r'\#'
+t_CPP_DPOUND = r'\#\#'
+
+# Identifier
+t_CPP_ID = r'[A-Za-z_][\w_]*'
+
+# Integer literal
+def CPP_INTEGER(t):
+    r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU]|[lL]|[uU][lL]|[lL][uU])?)'
+    return t
+
+t_CPP_INTEGER = CPP_INTEGER
+
+# Floating literal
+t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
+
+# String literal
+def t_CPP_STRING(t):
+    r'\"([^\\\n]|(\\(.|\n)))*?\"'
+    t.lexer.lineno += t.value.count("\n")
+    return t
+
+# Character constant 'c' or L'c'
+def t_CPP_CHAR(t):
+    r'(L)?\'([^\\\n]|(\\(.|\n)))*?\''
+    t.lexer.lineno += t.value.count("\n")
+    return t
+
+# Comment
+def t_CPP_COMMENT(t):
+    r'(/\*(.|\n)*?\*/)|(//.*?\n)'
+    t.lexer.lineno += t.value.count("\n")
+    return t
+    
+def t_error(t):
+    t.type = t.value[0]
+    t.value = t.value[0]
+    t.lexer.skip(1)
+    return t
+
+import re
+import copy
+import time
+import os.path
+
+# -----------------------------------------------------------------------------
+# trigraph()
+# 
+# Given an input string, this function replaces all trigraph sequences. 
+# The following mapping is used:
+#
+#     ??=    #
+#     ??/    \
+#     ??'    ^
+#     ??(    [
+#     ??)    ]
+#     ??!    |
+#     ??<    {
+#     ??>    }
+#     ??-    ~
+# -----------------------------------------------------------------------------
+
+_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''')
+_trigraph_rep = {
+    '=':'#',
+    '/':'\\',
+    "'":'^',
+    '(':'[',
+    ')':']',
+    '!':'|',
+    '<':'{',
+    '>':'}',
+    '-':'~'
+}
+
+def trigraph(input):
+    return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input)
+
+# ------------------------------------------------------------------
+# Macro object
+#
+# This object holds information about preprocessor macros
+#
+#    .name      - Macro name (string)
+#    .value     - Macro value (a list of tokens)
+#    .arglist   - List of argument names
+#    .variadic  - Boolean indicating whether or not variadic macro
+#    .vararg    - Name of the variadic parameter
+#
+# When a macro is created, the macro replacement token sequence is
+# pre-scanned and used to create patch lists that are later used
+# during macro expansion
+# ------------------------------------------------------------------
+
+class Macro(object):
+    def __init__(self,name,value,arglist=None,variadic=False):
+        self.name = name
+        self.value = value
+        self.arglist = arglist
+        self.variadic = variadic
+        if variadic:
+            self.vararg = arglist[-1]
+        self.source = None
+
+# ------------------------------------------------------------------
+# Preprocessor object
+#
+# Object representing a preprocessor.  Contains macro definitions,
+# include directories, and other information
+# ------------------------------------------------------------------
+
+class Preprocessor(object):
+    def __init__(self,lexer=None):
+        if lexer is None:
+            lexer = lex.lexer
+        self.lexer = lexer
+        self.macros = { }
+        self.path = []
+        self.temp_path = []
+
+        # Probe the lexer for selected tokens
+        self.lexprobe()
+
+        tm = time.localtime()
+        self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm))
+        self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm))
+        self.parser = None
+
+    # -----------------------------------------------------------------------------
+    # tokenize()
+    #
+    # Utility function. Given a string of text, tokenize into a list of tokens
+    # -----------------------------------------------------------------------------
+
+    def tokenize(self,text):
+        tokens = []
+        self.lexer.input(text)
+        while True:
+            tok = self.lexer.token()
+            if not tok: break
+            tokens.append(tok)
+        return tokens
+
+    # ---------------------------------------------------------------------
+    # error()
+    #
+    # Report a preprocessor error/warning of some kind
+    # ----------------------------------------------------------------------
+
+    def error(self,file,line,msg):
+        print("%s:%d %s" % (file,line,msg))
+
+    # ----------------------------------------------------------------------
+    # lexprobe()
+    #
+    # This method probes the preprocessor lexer object to discover
+    # the token types of symbols that are important to the preprocessor.
+    # If this works right, the preprocessor will simply "work"
+    # with any suitable lexer regardless of how tokens have been named.
+    # ----------------------------------------------------------------------
+
+    def lexprobe(self):
+
+        # Determine the token type for identifiers
+        self.lexer.input("identifier")
+        tok = self.lexer.token()
+        if not tok or tok.value != "identifier":
+            print("Couldn't determine identifier type")
+        else:
+            self.t_ID = tok.type
+
+        # Determine the token type for integers
+        self.lexer.input("12345")
+        tok = self.lexer.token()
+        if not tok or int(tok.value) != 12345:
+            print("Couldn't determine integer type")
+        else:
+            self.t_INTEGER = tok.type
+            self.t_INTEGER_TYPE = type(tok.value)
+
+        # Determine the token type for strings enclosed in double quotes
+        self.lexer.input("\"filename\"")
+        tok = self.lexer.token()
+        if not tok or tok.value != "\"filename\"":
+            print("Couldn't determine string type")
+        else:
+            self.t_STRING = tok.type
+
+        # Determine the token type for whitespace--if any
+        self.lexer.input("  ")
+        tok = self.lexer.token()
+        if not tok or tok.value != "  ":
+            self.t_SPACE = None
+        else:
+            self.t_SPACE = tok.type
+
+        # Determine the token type for newlines
+        self.lexer.input("\n")
+        tok = self.lexer.token()
+        if not tok or tok.value != "\n":
+            self.t_NEWLINE = None
+            print("Couldn't determine token for newlines")
+        else:
+            self.t_NEWLINE = tok.type
+
+        self.t_WS = (self.t_SPACE, self.t_NEWLINE)
+
+        # Check for other characters used by the preprocessor
+        chars = [ '<','>','#','##','\\','(',')',',','.']
+        for c in chars:
+            self.lexer.input(c)
+            tok = self.lexer.token()
+            if not tok or tok.value != c:
+                print("Unable to lex '%s' required for preprocessor" % c)
+
+    # ----------------------------------------------------------------------
+    # add_path()
+    #
+    # Adds a search path to the preprocessor.  
+    # ----------------------------------------------------------------------
+
+    def add_path(self,path):
+        self.path.append(path)
+
+    # ----------------------------------------------------------------------
+    # group_lines()
+    #
+    # Given an input string, this function splits it into lines.  Trailing whitespace
+    # is removed.   Any line ending with \ is grouped with the next line.  This
+    # function forms the lowest level of the preprocessor---grouping into text into
+    # a line-by-line format.
+    # ----------------------------------------------------------------------
+
+    def group_lines(self,input):
+        lex = self.lexer.clone()
+        lines = [x.rstrip() for x in input.splitlines()]
+        for i in xrange(len(lines)):
+            j = i+1
+            while lines[i].endswith('\\') and (j < len(lines)):
+                lines[i] = lines[i][:-1]+lines[j]
+                lines[j] = ""
+                j += 1
+
+        input = "\n".join(lines)
+        lex.input(input)
+        lex.lineno = 1
+
+        current_line = []
+        while True:
+            tok = lex.token()
+            if not tok:
+                break
+            current_line.append(tok)
+            if tok.type in self.t_WS and '\n' in tok.value:
+                yield current_line
+                current_line = []
+
+        if current_line:
+            yield current_line
+
+    # ----------------------------------------------------------------------
+    # tokenstrip()
+    # 
+    # Remove leading/trailing whitespace tokens from a token list
+    # ----------------------------------------------------------------------
+
+    def tokenstrip(self,tokens):
+        i = 0
+        while i < len(tokens) and tokens[i].type in self.t_WS:
+            i += 1
+        del tokens[:i]
+        i = len(tokens)-1
+        while i >= 0 and tokens[i].type in self.t_WS:
+            i -= 1
+        del tokens[i+1:]
+        return tokens
+
+
+    # ----------------------------------------------------------------------
+    # collect_args()
+    #
+    # Collects comma separated arguments from a list of tokens.   The arguments
+    # must be enclosed in parenthesis.  Returns a tuple (tokencount,args,positions)
+    # where tokencount is the number of tokens consumed, args is a list of arguments,
+    # and positions is a list of integers containing the starting index of each
+    # argument.  Each argument is represented by a list of tokens.
+    #
+    # When collecting arguments, leading and trailing whitespace is removed
+    # from each argument.  
+    #
+    # This function properly handles nested parenthesis and commas---these do not
+    # define new arguments.
+    # ----------------------------------------------------------------------
+
+    def collect_args(self,tokenlist):
+        args = []
+        positions = []
+        current_arg = []
+        nesting = 1
+        tokenlen = len(tokenlist)
+    
+        # Search for the opening '('.
+        i = 0
+        while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
+            i += 1
+
+        if (i < tokenlen) and (tokenlist[i].value == '('):
+            positions.append(i+1)
+        else:
+            self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments")
+            return 0, [], []
+
+        i += 1
+
+        while i < tokenlen:
+            t = tokenlist[i]
+            if t.value == '(':
+                current_arg.append(t)
+                nesting += 1
+            elif t.value == ')':
+                nesting -= 1
+                if nesting == 0:
+                    if current_arg:
+                        args.append(self.tokenstrip(current_arg))
+                        positions.append(i)
+                    return i+1,args,positions
+                current_arg.append(t)
+            elif t.value == ',' and nesting == 1:
+                args.append(self.tokenstrip(current_arg))
+                positions.append(i+1)
+                current_arg = []
+            else:
+                current_arg.append(t)
+            i += 1
+    
+        # Missing end argument
+        self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
+        return 0, [],[]
+
+    # ----------------------------------------------------------------------
+    # macro_prescan()
+    #
+    # Examine the macro value (token sequence) and identify patch points
+    # This is used to speed up macro expansion later on---we'll know
+    # right away where to apply patches to the value to form the expansion
+    # ----------------------------------------------------------------------
+    
+    def macro_prescan(self,macro):
+        macro.patch     = []             # Standard macro arguments 
+        macro.str_patch = []             # String conversion expansion
+        macro.var_comma_patch = []       # Variadic macro comma patch
+        i = 0
+        while i < len(macro.value):
+            if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist:
+                argnum = macro.arglist.index(macro.value[i].value)
+                # Conversion of argument to a string
+                if i > 0 and macro.value[i-1].value == '#':
+                    macro.value[i] = copy.copy(macro.value[i])
+                    macro.value[i].type = self.t_STRING
+                    del macro.value[i-1]
+                    macro.str_patch.append((argnum,i-1))
+                    continue
+                # Concatenation
+                elif (i > 0 and macro.value[i-1].value == '##'):
+                    macro.patch.append(('c',argnum,i-1))
+                    del macro.value[i-1]
+                    continue
+                elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'):
+                    macro.patch.append(('c',argnum,i))
+                    i += 1
+                    continue
+                # Standard expansion
+                else:
+                    macro.patch.append(('e',argnum,i))
+            elif macro.value[i].value == '##':
+                if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \
+                        ((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \
+                        (macro.value[i+1].value == macro.vararg):
+                    macro.var_comma_patch.append(i-1)
+            i += 1
+        macro.patch.sort(key=lambda x: x[2],reverse=True)
+
+    # ----------------------------------------------------------------------
+    # macro_expand_args()
+    #
+    # Given a Macro and list of arguments (each a token list), this method
+    # returns an expanded version of a macro.  The return value is a token sequence
+    # representing the replacement macro tokens
+    # ----------------------------------------------------------------------
+
+    def macro_expand_args(self,macro,args):
+        # Make a copy of the macro token sequence
+        rep = [copy.copy(_x) for _x in macro.value]
+
+        # Make string expansion patches.  These do not alter the length of the replacement sequence
+        
+        str_expansion = {}
+        for argnum, i in macro.str_patch:
+            if argnum not in str_expansion:
+                str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\")
+            rep[i] = copy.copy(rep[i])
+            rep[i].value = str_expansion[argnum]
+
+        # Make the variadic macro comma patch.  If the variadic macro argument is empty, we get rid
+        comma_patch = False
+        if macro.variadic and not args[-1]:
+            for i in macro.var_comma_patch:
+                rep[i] = None
+                comma_patch = True
+
+        # Make all other patches.   The order of these matters.  It is assumed that the patch list
+        # has been sorted in reverse order of patch location since replacements will cause the
+        # size of the replacement sequence to expand from the patch point.
+        
+        expanded = { }
+        for ptype, argnum, i in macro.patch:
+            # Concatenation.   Argument is left unexpanded
+            if ptype == 'c':
+                rep[i:i+1] = args[argnum]
+            # Normal expansion.  Argument is macro expanded first
+            elif ptype == 'e':
+                if argnum not in expanded:
+                    expanded[argnum] = self.expand_macros(args[argnum])
+                rep[i:i+1] = expanded[argnum]
+
+        # Get rid of removed comma if necessary
+        if comma_patch:
+            rep = [_i for _i in rep if _i]
+
+        return rep
+
+
+    # ----------------------------------------------------------------------
+    # expand_macros()
+    #
+    # Given a list of tokens, this function performs macro expansion.
+    # The expanded argument is a dictionary that contains macros already
+    # expanded.  This is used to prevent infinite recursion.
+    # ----------------------------------------------------------------------
+
+    def expand_macros(self,tokens,expanded=None):
+        if expanded is None:
+            expanded = {}
+        i = 0
+        while i < len(tokens):
+            t = tokens[i]
+            if t.type == self.t_ID:
+                if t.value in self.macros and t.value not in expanded:
+                    # Yes, we found a macro match
+                    expanded[t.value] = True
+                    
+                    m = self.macros[t.value]
+                    if not m.arglist:
+                        # A simple macro
+                        ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded)
+                        for e in ex:
+                            e.lineno = t.lineno
+                        tokens[i:i+1] = ex
+                        i += len(ex)
+                    else:
+                        # A macro with arguments
+                        j = i + 1
+                        while j < len(tokens) and tokens[j].type in self.t_WS:
+                            j += 1
+                        if tokens[j].value == '(':
+                            tokcount,args,positions = self.collect_args(tokens[j:])
+                            if not m.variadic and len(args) !=  len(m.arglist):
+                                self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist)))
+                                i = j + tokcount
+                            elif m.variadic and len(args) < len(m.arglist)-1:
+                                if len(m.arglist) > 2:
+                                    self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1))
+                                else:
+                                    self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1))
+                                i = j + tokcount
+                            else:
+                                if m.variadic:
+                                    if len(args) == len(m.arglist)-1:
+                                        args.append([])
+                                    else:
+                                        args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
+                                        del args[len(m.arglist):]
+                                        
+                                # Get macro replacement text
+                                rep = self.macro_expand_args(m,args)
+                                rep = self.expand_macros(rep,expanded)
+                                for r in rep:
+                                    r.lineno = t.lineno
+                                tokens[i:j+tokcount] = rep
+                                i += len(rep)
+                    del expanded[t.value]
+                    continue
+                elif t.value == '__LINE__':
+                    t.type = self.t_INTEGER
+                    t.value = self.t_INTEGER_TYPE(t.lineno)
+                
+            i += 1
+        return tokens
+
+    # ----------------------------------------------------------------------    
+    # evalexpr()
+    # 
+    # Evaluate an expression token sequence for the purposes of evaluating
+    # integral expressions.
+    # ----------------------------------------------------------------------
+
+    def evalexpr(self,tokens):
+        # tokens = tokenize(line)
+        # Search for defined macros
+        i = 0
+        while i < len(tokens):
+            if tokens[i].type == self.t_ID and tokens[i].value == 'defined':
+                j = i + 1
+                needparen = False
+                result = "0L"
+                while j < len(tokens):
+                    if tokens[j].type in self.t_WS:
+                        j += 1
+                        continue
+                    elif tokens[j].type == self.t_ID:
+                        if tokens[j].value in self.macros:
+                            result = "1L"
+                        else:
+                            result = "0L"
+                        if not needparen: break
+                    elif tokens[j].value == '(':
+                        needparen = True
+                    elif tokens[j].value == ')':
+                        break
+                    else:
+                        self.error(self.source,tokens[i].lineno,"Malformed defined()")
+                    j += 1
+                tokens[i].type = self.t_INTEGER
+                tokens[i].value = self.t_INTEGER_TYPE(result)
+                del tokens[i+1:j+1]
+            i += 1
+        tokens = self.expand_macros(tokens)
+        for i,t in enumerate(tokens):
+            if t.type == self.t_ID:
+                tokens[i] = copy.copy(t)
+                tokens[i].type = self.t_INTEGER
+                tokens[i].value = self.t_INTEGER_TYPE("0L")
+            elif t.type == self.t_INTEGER:
+                tokens[i] = copy.copy(t)
+                # Strip off any trailing suffixes
+                tokens[i].value = str(tokens[i].value)
+                while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
+                    tokens[i].value = tokens[i].value[:-1]
+        
+        expr = "".join([str(x.value) for x in tokens])
+        expr = expr.replace("&&"," and ")
+        expr = expr.replace("||"," or ")
+        expr = expr.replace("!"," not ")
+        try:
+            result = eval(expr)
+        except StandardError:
+            self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
+            result = 0
+        return result
+
+    # ----------------------------------------------------------------------
+    # parsegen()
+    #
+    # Parse an input string/
+    # ----------------------------------------------------------------------
+    def parsegen(self,input,source=None):
+
+        # Replace trigraph sequences
+        t = trigraph(input)
+        lines = self.group_lines(t)
+
+        if not source:
+            source = ""
+            
+        self.define("__FILE__ \"%s\"" % source)
+
+        self.source = source
+        chunk = []
+        enable = True
+        iftrigger = False
+        ifstack = []
+
+        for x in lines:
+            for i,tok in enumerate(x):
+                if tok.type not in self.t_WS: break
+            if tok.value == '#':
+                # Preprocessor directive
+
+                for tok in x:
+                    if tok in self.t_WS and '\n' in tok.value:
+                        chunk.append(tok)
+                
+                dirtokens = self.tokenstrip(x[i+1:])
+                if dirtokens:
+                    name = dirtokens[0].value
+                    args = self.tokenstrip(dirtokens[1:])
+                else:
+                    name = ""
+                    args = []
+                
+                if name == 'define':
+                    if enable:
+                        for tok in self.expand_macros(chunk):
+                            yield tok
+                        chunk = []
+                        self.define(args)
+                elif name == 'include':
+                    if enable:
+                        for tok in self.expand_macros(chunk):
+                            yield tok
+                        chunk = []
+                        oldfile = self.macros['__FILE__']
+                        for tok in self.include(args):
+                            yield tok
+                        self.macros['__FILE__'] = oldfile
+                        self.source = source
+                elif name == 'undef':
+                    if enable:
+                        for tok in self.expand_macros(chunk):
+                            yield tok
+                        chunk = []
+                        self.undef(args)
+                elif name == 'ifdef':
+                    ifstack.append((enable,iftrigger))
+                    if enable:
+                        if not args[0].value in self.macros:
+                            enable = False
+                            iftrigger = False
+                        else:
+                            iftrigger = True
+                elif name == 'ifndef':
+                    ifstack.append((enable,iftrigger))
+                    if enable:
+                        if args[0].value in self.macros:
+                            enable = False
+                            iftrigger = False
+                        else:
+                            iftrigger = True
+                elif name == 'if':
+                    ifstack.append((enable,iftrigger))
+                    if enable:
+                        result = self.evalexpr(args)
+                        if not result:
+                            enable = False
+                            iftrigger = False
+                        else:
+                            iftrigger = True
+                elif name == 'elif':
+                    if ifstack:
+                        if ifstack[-1][0]:     # We only pay attention if outer "if" allows this
+                            if enable:         # If already true, we flip enable False
+                                enable = False
+                            elif not iftrigger:   # If False, but not triggered yet, we'll check expression
+                                result = self.evalexpr(args)
+                                if result:
+                                    enable  = True
+                                    iftrigger = True
+                    else:
+                        self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
+                        
+                elif name == 'else':
+                    if ifstack:
+                        if ifstack[-1][0]:
+                            if enable:
+                                enable = False
+                            elif not iftrigger:
+                                enable = True
+                                iftrigger = True
+                    else:
+                        self.error(self.source,dirtokens[0].lineno,"Misplaced #else")
+
+                elif name == 'endif':
+                    if ifstack:
+                        enable,iftrigger = ifstack.pop()
+                    else:
+                        self.error(self.source,dirtokens[0].lineno,"Misplaced #endif")
+                else:
+                    # Unknown preprocessor directive
+                    pass
+
+            else:
+                # Normal text
+                if enable:
+                    chunk.extend(x)
+
+        for tok in self.expand_macros(chunk):
+            yield tok
+        chunk = []
+
+    # ----------------------------------------------------------------------
+    # include()
+    #
+    # Implementation of file-inclusion
+    # ----------------------------------------------------------------------
+
+    def include(self,tokens):
+        # Try to extract the filename and then process an include file
+        if not tokens:
+            return
+        if tokens:
+            if tokens[0].value != '<' and tokens[0].type != self.t_STRING:
+                tokens = self.expand_macros(tokens)
+
+            if tokens[0].value == '<':
+                # Include <...>
+                i = 1
+                while i < len(tokens):
+                    if tokens[i].value == '>':
+                        break
+                    i += 1
+                else:
+                    print("Malformed #include <...>")
+                    return
+                filename = "".join([x.value for x in tokens[1:i]])
+                path = self.path + [""] + self.temp_path
+            elif tokens[0].type == self.t_STRING:
+                filename = tokens[0].value[1:-1]
+                path = self.temp_path + [""] + self.path
+            else:
+                print("Malformed #include statement")
+                return
+        for p in path:
+            iname = os.path.join(p,filename)
+            try:
+                data = open(iname,"r").read()
+                dname = os.path.dirname(iname)
+                if dname:
+                    self.temp_path.insert(0,dname)
+                for tok in self.parsegen(data,filename):
+                    yield tok
+                if dname:
+                    del self.temp_path[0]
+                break
+            except IOError:
+                pass
+        else:
+            print("Couldn't find '%s'" % filename)
+
+    # ----------------------------------------------------------------------
+    # define()
+    #
+    # Define a new macro
+    # ----------------------------------------------------------------------
+
+    def define(self,tokens):
+        if isinstance(tokens,(str,unicode)):
+            tokens = self.tokenize(tokens)
+
+        linetok = tokens
+        try:
+            name = linetok[0]
+            if len(linetok) > 1:
+                mtype = linetok[1]
+            else:
+                mtype = None
+            if not mtype:
+                m = Macro(name.value,[])
+                self.macros[name.value] = m
+            elif mtype.type in self.t_WS:
+                # A normal macro
+                m = Macro(name.value,self.tokenstrip(linetok[2:]))
+                self.macros[name.value] = m
+            elif mtype.value == '(':
+                # A macro with arguments
+                tokcount, args, positions = self.collect_args(linetok[1:])
+                variadic = False
+                for a in args:
+                    if variadic:
+                        print("No more arguments may follow a variadic argument")
+                        break
+                    astr = "".join([str(_i.value) for _i in a])
+                    if astr == "...":
+                        variadic = True
+                        a[0].type = self.t_ID
+                        a[0].value = '__VA_ARGS__'
+                        variadic = True
+                        del a[1:]
+                        continue
+                    elif astr[-3:] == "..." and a[0].type == self.t_ID:
+                        variadic = True
+                        del a[1:]
+                        # If, for some reason, "." is part of the identifier, strip off the name for the purposes
+                        # of macro expansion
+                        if a[0].value[-3:] == '...':
+                            a[0].value = a[0].value[:-3]
+                        continue
+                    if len(a) > 1 or a[0].type != self.t_ID:
+                        print("Invalid macro argument")
+                        break
+                else:
+                    mvalue = self.tokenstrip(linetok[1+tokcount:])
+                    i = 0
+                    while i < len(mvalue):
+                        if i+1 < len(mvalue):
+                            if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##':
+                                del mvalue[i]
+                                continue
+                            elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS:
+                                del mvalue[i+1]
+                        i += 1
+                    m = Macro(name.value,mvalue,[x[0].value for x in args],variadic)
+                    self.macro_prescan(m)
+                    self.macros[name.value] = m
+            else:
+                print("Bad macro definition")
+        except LookupError:
+            print("Bad macro definition")
+
+    # ----------------------------------------------------------------------
+    # undef()
+    #
+    # Undefine a macro
+    # ----------------------------------------------------------------------
+
+    def undef(self,tokens):
+        id = tokens[0].value
+        try:
+            del self.macros[id]
+        except LookupError:
+            pass
+
+    # ----------------------------------------------------------------------
+    # parse()
+    #
+    # Parse input text.
+    # ----------------------------------------------------------------------
+    def parse(self,input,source=None,ignore={}):
+        self.ignore = ignore
+        self.parser = self.parsegen(input,source)
+        
+    # ----------------------------------------------------------------------
+    # token()
+    #
+    # Method to return individual tokens
+    # ----------------------------------------------------------------------
+    def token(self):
+        try:
+            while True:
+                tok = next(self.parser)
+                if tok.type not in self.ignore: return tok
+        except StopIteration:
+            self.parser = None
+            return None
+
+if __name__ == '__main__':
+    import ply.lex as lex
+    lexer = lex.lex()
+
+    # Run a preprocessor
+    import sys
+    f = open(sys.argv[1])
+    input = f.read()
+
+    p = Preprocessor(lexer)
+    p.parse(input,sys.argv[1])
+    while True:
+        tok = p.token()
+        if not tok: break
+        print(p.source, tok)
+
+
+
+
+    
+
+
+
+
+
+
diff --git a/tools/slimit/ply/ctokens.py b/tools/slimit/ply/ctokens.py
new file mode 100644 (file)
index 0000000..dd5f102
--- /dev/null
@@ -0,0 +1,133 @@
+# ----------------------------------------------------------------------
+# ctokens.py
+#
+# Token specifications for symbols in ANSI C and C++.  This file is
+# meant to be used as a library in other tokenizers.
+# ----------------------------------------------------------------------
+
+# Reserved words
+
+tokens = [
+    # Literals (identifier, integer constant, float constant, string constant, char const)
+    'ID', 'TYPEID', 'ICONST', 'FCONST', 'SCONST', 'CCONST',
+
+    # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
+    'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
+    'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
+    'LOR', 'LAND', 'LNOT',
+    'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
+    
+    # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
+    'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
+    'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
+
+    # Increment/decrement (++,--)
+    'PLUSPLUS', 'MINUSMINUS',
+
+    # Structure dereference (->)
+    'ARROW',
+
+    # Ternary operator (?)
+    'TERNARY',
+    
+    # Delimeters ( ) [ ] { } , . ; :
+    'LPAREN', 'RPAREN',
+    'LBRACKET', 'RBRACKET',
+    'LBRACE', 'RBRACE',
+    'COMMA', 'PERIOD', 'SEMI', 'COLON',
+
+    # Ellipsis (...)
+    'ELLIPSIS',
+]
+    
+# Operators
+t_PLUS             = r'\+'
+t_MINUS            = r'-'
+t_TIMES            = r'\*'
+t_DIVIDE           = r'/'
+t_MODULO           = r'%'
+t_OR               = r'\|'
+t_AND              = r'&'
+t_NOT              = r'~'
+t_XOR              = r'\^'
+t_LSHIFT           = r'<<'
+t_RSHIFT           = r'>>'
+t_LOR              = r'\|\|'
+t_LAND             = r'&&'
+t_LNOT             = r'!'
+t_LT               = r'<'
+t_GT               = r'>'
+t_LE               = r'<='
+t_GE               = r'>='
+t_EQ               = r'=='
+t_NE               = r'!='
+
+# Assignment operators
+
+t_EQUALS           = r'='
+t_TIMESEQUAL       = r'\*='
+t_DIVEQUAL         = r'/='
+t_MODEQUAL         = r'%='
+t_PLUSEQUAL        = r'\+='
+t_MINUSEQUAL       = r'-='
+t_LSHIFTEQUAL      = r'<<='
+t_RSHIFTEQUAL      = r'>>='
+t_ANDEQUAL         = r'&='
+t_OREQUAL          = r'\|='
+t_XOREQUAL         = r'^='
+
+# Increment/decrement
+t_INCREMENT        = r'\+\+'
+t_DECREMENT        = r'--'
+
+# ->
+t_ARROW            = r'->'
+
+# ?
+t_TERNARY          = r'\?'
+
+# Delimeters
+t_LPAREN           = r'\('
+t_RPAREN           = r'\)'
+t_LBRACKET         = r'\['
+t_RBRACKET         = r'\]'
+t_LBRACE           = r'\{'
+t_RBRACE           = r'\}'
+t_COMMA            = r','
+t_PERIOD           = r'\.'
+t_SEMI             = r';'
+t_COLON            = r':'
+t_ELLIPSIS         = r'\.\.\.'
+
+# Identifiers
+t_ID = r'[A-Za-z_][A-Za-z0-9_]*'
+
+# Integer literal
+t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
+
+# Floating literal
+t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
+
+# String literal
+t_STRING = r'\"([^\\\n]|(\\.))*?\"'
+
+# Character constant 'c' or L'c'
+t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\''
+
+# Comment (C-Style)
+def t_COMMENT(t):
+    r'/\*(.|\n)*?\*/'
+    t.lexer.lineno += t.value.count('\n')
+    return t
+
+# Comment (C++-Style)
+def t_CPPCOMMENT(t):
+    r'//.*\n'
+    t.lexer.lineno += 1
+    return t
+
+
+    
+
+
+
diff --git a/tools/slimit/ply/lex.py b/tools/slimit/ply/lex.py
new file mode 100644 (file)
index 0000000..bd32da9
--- /dev/null
@@ -0,0 +1,1058 @@
+# -----------------------------------------------------------------------------
+# ply: lex.py
+#
+# Copyright (C) 2001-2011,
+# David M. Beazley (Dabeaz LLC)
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+# 
+# * Redistributions of source code must retain the above copyright notice,
+#   this list of conditions and the following disclaimer.  
+# * Redistributions in binary form must reproduce the above copyright notice, 
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.  
+# * Neither the name of the David Beazley or Dabeaz LLC may be used to
+#   endorse or promote products derived from this software without
+#  specific prior written permission. 
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+# -----------------------------------------------------------------------------
+
+__version__    = "3.4"
+__tabversion__ = "3.2"       # Version of table file used
+
+import re, sys, types, copy, os
+
+# This tuple contains known string types
+try:
+    # Python 2.6
+    StringTypes = (types.StringType, types.UnicodeType)
+except AttributeError:
+    # Python 3.0
+    StringTypes = (str, bytes)
+
+# Extract the code attribute of a function. Different implementations
+# are for Python 2/3 compatibility.
+
+if sys.version_info[0] < 3:
+    def func_code(f):
+        return f.func_code
+else:
+    def func_code(f):
+        return f.__code__
+
+# This regular expression is used to match valid token names
+_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')
+
+# Exception thrown when invalid token encountered and no default error
+# handler is defined.
+
+class LexError(Exception):
+    def __init__(self,message,s):
+         self.args = (message,)
+         self.text = s
+
+# Token class.  This class is used to represent the tokens produced.
+class LexToken(object):
+    def __str__(self):
+        return "LexToken(%s,%r,%d,%d)" % (self.type,self.value,self.lineno,self.lexpos)
+    def __repr__(self):
+        return str(self)
+
+# This object is a stand-in for a logging object created by the 
+# logging module.  
+
+class PlyLogger(object):
+    def __init__(self,f):
+        self.f = f
+    def critical(self,msg,*args,**kwargs):
+        self.f.write((msg % args) + "\n")
+
+    def warning(self,msg,*args,**kwargs):
+        self.f.write("WARNING: "+ (msg % args) + "\n")
+
+    def error(self,msg,*args,**kwargs):
+        self.f.write("ERROR: " + (msg % args) + "\n")
+
+    info = critical
+    debug = critical
+
+# Null logger is used when no output is generated. Does nothing.
+class NullLogger(object):
+    def __getattribute__(self,name):
+        return self
+    def __call__(self,*args,**kwargs):
+        return self
+
+# -----------------------------------------------------------------------------
+#                        === Lexing Engine ===
+#
+# The following Lexer class implements the lexer runtime.   There are only
+# a few public methods and attributes:
+#
+#    input()          -  Store a new string in the lexer
+#    token()          -  Get the next token
+#    clone()          -  Clone the lexer
+#
+#    lineno           -  Current line number
+#    lexpos           -  Current position in the input string
+# -----------------------------------------------------------------------------
+
+class Lexer:
+    def __init__(self):
+        self.lexre = None             # Master regular expression. This is a list of
+                                      # tuples (re,findex) where re is a compiled
+                                      # regular expression and findex is a list
+                                      # mapping regex group numbers to rules
+        self.lexretext = None         # Current regular expression strings
+        self.lexstatere = {}          # Dictionary mapping lexer states to master regexs
+        self.lexstateretext = {}      # Dictionary mapping lexer states to regex strings
+        self.lexstaterenames = {}     # Dictionary mapping lexer states to symbol names
+        self.lexstate = "INITIAL"     # Current lexer state
+        self.lexstatestack = []       # Stack of lexer states
+        self.lexstateinfo = None      # State information
+        self.lexstateignore = {}      # Dictionary of ignored characters for each state
+        self.lexstateerrorf = {}      # Dictionary of error functions for each state
+        self.lexreflags = 0           # Optional re compile flags
+        self.lexdata = None           # Actual input data (as a string)
+        self.lexpos = 0               # Current position in input text
+        self.lexlen = 0               # Length of the input text
+        self.lexerrorf = None         # Error rule (if any)
+        self.lextokens = None         # List of valid tokens
+        self.lexignore = ""           # Ignored characters
+        self.lexliterals = ""         # Literal characters that can be passed through
+        self.lexmodule = None         # Module
+        self.lineno = 1               # Current line number
+        self.lexoptimize = 0          # Optimized mode
+
+    def clone(self,object=None):
+        c = copy.copy(self)
+
+        # If the object parameter has been supplied, it means we are attaching the
+        # lexer to a new object.  In this case, we have to rebind all methods in
+        # the lexstatere and lexstateerrorf tables.
+
+        if object:
+            newtab = { }
+            for key, ritem in self.lexstatere.items():
+                newre = []
+                for cre, findex in ritem:
+                     newfindex = []
+                     for f in findex:
+                         if not f or not f[0]:
+                             newfindex.append(f)
+                             continue
+                         newfindex.append((getattr(object,f[0].__name__),f[1]))
+                newre.append((cre,newfindex))
+                newtab[key] = newre
+            c.lexstatere = newtab
+            c.lexstateerrorf = { }
+            for key, ef in self.lexstateerrorf.items():
+                c.lexstateerrorf[key] = getattr(object,ef.__name__)
+            c.lexmodule = object
+        return c
+
+    # ------------------------------------------------------------
+    # writetab() - Write lexer information to a table file
+    # ------------------------------------------------------------
+    def writetab(self,tabfile,outputdir=""):
+        if isinstance(tabfile,types.ModuleType):
+            return
+        basetabfilename = tabfile.split(".")[-1]
+        filename = os.path.join(outputdir,basetabfilename)+".py"
+        tf = open(filename,"w")
+        tf.write("# %s.py. This file automatically created by PLY (version %s). Don't edit!\n" % (tabfile,__version__))
+        tf.write("_tabversion   = %s\n" % repr(__version__))
+        tf.write("_lextokens    = %s\n" % repr(self.lextokens))
+        tf.write("_lexreflags   = %s\n" % repr(self.lexreflags))
+        tf.write("_lexliterals  = %s\n" % repr(self.lexliterals))
+        tf.write("_lexstateinfo = %s\n" % repr(self.lexstateinfo))
+
+        tabre = { }
+        # Collect all functions in the initial state
+        initial = self.lexstatere["INITIAL"]
+        initialfuncs = []
+        for part in initial:
+            for f in part[1]:
+                if f and f[0]:
+                    initialfuncs.append(f)
+
+        for key, lre in self.lexstatere.items():
+             titem = []
+             for i in range(len(lre)):
+                  titem.append((self.lexstateretext[key][i],_funcs_to_names(lre[i][1],self.lexstaterenames[key][i])))
+             tabre[key] = titem
+
+        tf.write("_lexstatere   = %s\n" % repr(tabre))
+        tf.write("_lexstateignore = %s\n" % repr(self.lexstateignore))
+
+        taberr = { }
+        for key, ef in self.lexstateerrorf.items():
+             if ef:
+                  taberr[key] = ef.__name__
+             else:
+                  taberr[key] = None
+        tf.write("_lexstateerrorf = %s\n" % repr(taberr))
+        tf.close()
+
+    # ------------------------------------------------------------
+    # readtab() - Read lexer information from a tab file
+    # ------------------------------------------------------------
+    def readtab(self,tabfile,fdict):
+        if isinstance(tabfile,types.ModuleType):
+            lextab = tabfile
+        else:
+            if sys.version_info[0] < 3:
+                exec("import %s as lextab" % tabfile)
+            else:
+                env = { }
+                exec("import %s as lextab" % tabfile, env,env)
+                lextab = env['lextab']
+
+        if getattr(lextab,"_tabversion","0.0") != __version__:
+            raise ImportError("Inconsistent PLY version")
+
+        self.lextokens      = lextab._lextokens
+        self.lexreflags     = lextab._lexreflags
+        self.lexliterals    = lextab._lexliterals
+        self.lexstateinfo   = lextab._lexstateinfo
+        self.lexstateignore = lextab._lexstateignore
+        self.lexstatere     = { }
+        self.lexstateretext = { }
+        for key,lre in lextab._lexstatere.items():
+             titem = []
+             txtitem = []
+             for i in range(len(lre)):
+                  titem.append((re.compile(lre[i][0],lextab._lexreflags | re.VERBOSE),_names_to_funcs(lre[i][1],fdict)))
+                  txtitem.append(lre[i][0])
+             self.lexstatere[key] = titem
+             self.lexstateretext[key] = txtitem
+        self.lexstateerrorf = { }
+        for key,ef in lextab._lexstateerrorf.items():
+             self.lexstateerrorf[key] = fdict[ef]
+        self.begin('INITIAL')
+
+    # ------------------------------------------------------------
+    # input() - Push a new string into the lexer
+    # ------------------------------------------------------------
+    def input(self,s):
+        # Pull off the first character to see if s looks like a string
+        c = s[:1]
+        if not isinstance(c,StringTypes):
+            raise ValueError("Expected a string")
+        self.lexdata = s
+        self.lexpos = 0
+        self.lexlen = len(s)
+
+    # ------------------------------------------------------------
+    # begin() - Changes the lexing state
+    # ------------------------------------------------------------
+    def begin(self,state):
+        if not state in self.lexstatere:
+            raise ValueError("Undefined state")
+        self.lexre = self.lexstatere[state]
+        self.lexretext = self.lexstateretext[state]
+        self.lexignore = self.lexstateignore.get(state,"")
+        self.lexerrorf = self.lexstateerrorf.get(state,None)
+        self.lexstate = state
+
+    # ------------------------------------------------------------
+    # push_state() - Changes the lexing state and saves old on stack
+    # ------------------------------------------------------------
+    def push_state(self,state):
+        self.lexstatestack.append(self.lexstate)
+        self.begin(state)
+
+    # ------------------------------------------------------------
+    # pop_state() - Restores the previous state
+    # ------------------------------------------------------------
+    def pop_state(self):
+        self.begin(self.lexstatestack.pop())
+
+    # ------------------------------------------------------------
+    # current_state() - Returns the current lexing state
+    # ------------------------------------------------------------
+    def current_state(self):
+        return self.lexstate
+
+    # ------------------------------------------------------------
+    # skip() - Skip ahead n characters
+    # ------------------------------------------------------------
+    def skip(self,n):
+        self.lexpos += n
+
+    # ------------------------------------------------------------
+    # opttoken() - Return the next token from the Lexer
+    #
+    # Note: This function has been carefully implemented to be as fast
+    # as possible.  Don't make changes unless you really know what
+    # you are doing
+    # ------------------------------------------------------------
+    def token(self):
+        # Make local copies of frequently referenced attributes
+        lexpos    = self.lexpos
+        lexlen    = self.lexlen
+        lexignore = self.lexignore
+        lexdata   = self.lexdata
+
+        while lexpos < lexlen:
+            # This code provides some short-circuit code for whitespace, tabs, and other ignored characters
+            if lexdata[lexpos] in lexignore:
+                lexpos += 1
+                continue
+
+            # Look for a regular expression match
+            for lexre,lexindexfunc in self.lexre:
+                m = lexre.match(lexdata,lexpos)
+                if not m: continue
+
+                # Create a token for return
+                tok = LexToken()
+                tok.value = m.group()
+                tok.lineno = self.lineno
+                tok.lexpos = lexpos
+
+                i = m.lastindex
+                func,tok.type = lexindexfunc[i]
+
+                if not func:
+                   # If no token type was set, it's an ignored token
+                   if tok.type:
+                      self.lexpos = m.end()
+                      return tok
+                   else:
+                      lexpos = m.end()
+                      break
+
+                lexpos = m.end()
+
+                # If token is processed by a function, call it
+
+                tok.lexer = self      # Set additional attributes useful in token rules
+                self.lexmatch = m
+                self.lexpos = lexpos
+
+                newtok = func(tok)
+
+                # Every function must return a token, if nothing, we just move to next token
+                if not newtok:
+                    lexpos    = self.lexpos         # This is here in case user has updated lexpos.
+                    lexignore = self.lexignore      # This is here in case there was a state change
+                    break
+
+                # Verify type of the token.  If not in the token map, raise an error
+                if not self.lexoptimize:
+                    if not newtok.type in self.lextokens:
+                        raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % (
+                            func_code(func).co_filename, func_code(func).co_firstlineno,
+                            func.__name__, newtok.type),lexdata[lexpos:])
+
+                return newtok
+            else:
+                # No match, see if in literals
+                if lexdata[lexpos] in self.lexliterals:
+                    tok = LexToken()
+                    tok.value = lexdata[lexpos]
+                    tok.lineno = self.lineno
+                    tok.type = tok.value
+                    tok.lexpos = lexpos
+                    self.lexpos = lexpos + 1
+                    return tok
+
+                # No match. Call t_error() if defined.
+                if self.lexerrorf:
+                    tok = LexToken()
+                    tok.value = self.lexdata[lexpos:]
+                    tok.lineno = self.lineno
+                    tok.type = "error"
+                    tok.lexer = self
+                    tok.lexpos = lexpos
+                    self.lexpos = lexpos
+                    newtok = self.lexerrorf(tok)
+                    if lexpos == self.lexpos:
+                        # Error method didn't change text position at all. This is an error.
+                        raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:])
+                    lexpos = self.lexpos
+                    if not newtok: continue
+                    return newtok
+
+                self.lexpos = lexpos
+                raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos],lexpos), lexdata[lexpos:])
+
+        self.lexpos = lexpos + 1
+        if self.lexdata is None:
+             raise RuntimeError("No input string given with input()")
+        return None
+
+    # Iterator interface
+    def __iter__(self):
+        return self
+
+    def next(self):
+        t = self.token()
+        if t is None:
+            raise StopIteration
+        return t
+
+    __next__ = next
+
+# -----------------------------------------------------------------------------
+#                           ==== Lex Builder ===
+#
+# The functions and classes below are used to collect lexing information
+# and build a Lexer object from it.
+# -----------------------------------------------------------------------------
+
+# -----------------------------------------------------------------------------
+# get_caller_module_dict()
+#
+# This function returns a dictionary containing all of the symbols defined within
+# a caller further down the call stack.  This is used to get the environment
+# associated with the yacc() call if none was provided.
+# -----------------------------------------------------------------------------
+
+def get_caller_module_dict(levels):
+    try:
+        raise RuntimeError
+    except RuntimeError:
+        e,b,t = sys.exc_info()
+        f = t.tb_frame
+        while levels > 0:
+            f = f.f_back                   
+            levels -= 1
+        ldict = f.f_globals.copy()
+        if f.f_globals != f.f_locals:
+            ldict.update(f.f_locals)
+
+        return ldict
+
+# -----------------------------------------------------------------------------
+# _funcs_to_names()
+#
+# Given a list of regular expression functions, this converts it to a list
+# suitable for output to a table file
+# -----------------------------------------------------------------------------
+
+def _funcs_to_names(funclist,namelist):
+    result = []
+    for f,name in zip(funclist,namelist):
+         if f and f[0]:
+             result.append((name, f[1]))
+         else:
+             result.append(f)
+    return result
+
+# -----------------------------------------------------------------------------
+# _names_to_funcs()
+#
+# Given a list of regular expression function names, this converts it back to
+# functions.
+# -----------------------------------------------------------------------------
+
+def _names_to_funcs(namelist,fdict):
+     result = []
+     for n in namelist:
+          if n and n[0]:
+              result.append((fdict[n[0]],n[1]))
+          else:
+              result.append(n)
+     return result
+
+# -----------------------------------------------------------------------------
+# _form_master_re()
+#
+# This function takes a list of all of the regex components and attempts to
+# form the master regular expression.  Given limitations in the Python re
+# module, it may be necessary to break the master regex into separate expressions.
+# -----------------------------------------------------------------------------
+
+def _form_master_re(relist,reflags,ldict,toknames):
+    if not relist: return []
+    regex = "|".join(relist)
+    try:
+        lexre = re.compile(regex,re.VERBOSE | reflags)
+
+        # Build the index to function map for the matching engine
+        lexindexfunc = [ None ] * (max(lexre.groupindex.values())+1)
+        lexindexnames = lexindexfunc[:]
+
+        for f,i in lexre.groupindex.items():
+            handle = ldict.get(f,None)
+            if type(handle) in (types.FunctionType, types.MethodType):
+                lexindexfunc[i] = (handle,toknames[f])
+                lexindexnames[i] = f
+            elif handle is not None:
+                lexindexnames[i] = f
+                if f.find("ignore_") > 0:
+                    lexindexfunc[i] = (None,None)
+                else:
+                    lexindexfunc[i] = (None, toknames[f])
+        
+        return [(lexre,lexindexfunc)],[regex],[lexindexnames]
+    except Exception:
+        m = int(len(relist)/2)
+        if m == 0: m = 1
+        llist, lre, lnames = _form_master_re(relist[:m],reflags,ldict,toknames)
+        rlist, rre, rnames = _form_master_re(relist[m:],reflags,ldict,toknames)
+        return llist+rlist, lre+rre, lnames+rnames
+
+# -----------------------------------------------------------------------------
+# def _statetoken(s,names)
+#
+# Given a declaration name s of the form "t_" and a dictionary whose keys are
+# state names, this function returns a tuple (states,tokenname) where states
+# is a tuple of state names and tokenname is the name of the token.  For example,
+# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM')
+# -----------------------------------------------------------------------------
+
+def _statetoken(s,names):
+    nonstate = 1
+    parts = s.split("_")
+    for i in range(1,len(parts)):
+         if not parts[i] in names and parts[i] != 'ANY': break
+    if i > 1:
+       states = tuple(parts[1:i])
+    else:
+       states = ('INITIAL',)
+
+    if 'ANY' in states:
+       states = tuple(names)
+
+    tokenname = "_".join(parts[i:])
+    return (states,tokenname)
+
+
+# -----------------------------------------------------------------------------
+# LexerReflect()
+#
+# This class represents information needed to build a lexer as extracted from a
+# user's input file.
+# -----------------------------------------------------------------------------
+class LexerReflect(object):
+    def __init__(self,ldict,log=None,reflags=0):
+        self.ldict      = ldict
+        self.error_func = None
+        self.tokens     = []
+        self.reflags    = reflags
+        self.stateinfo  = { 'INITIAL' : 'inclusive'}
+        self.files      = {}
+        self.error      = 0
+
+        if log is None:
+            self.log = PlyLogger(sys.stderr)
+        else:
+            self.log = log
+
+    # Get all of the basic information
+    def get_all(self):
+        self.get_tokens()
+        self.get_literals()
+        self.get_states()
+        self.get_rules()
+        
+    # Validate all of the information
+    def validate_all(self):
+        self.validate_tokens()
+        self.validate_literals()
+        self.validate_rules()
+        return self.error
+
+    # Get the tokens map
+    def get_tokens(self):
+        tokens = self.ldict.get("tokens",None)
+        if not tokens:
+            self.log.error("No token list is defined")
+            self.error = 1
+            return
+
+        if not isinstance(tokens,(list, tuple)):
+            self.log.error("tokens must be a list or tuple")
+            self.error = 1
+            return
+        
+        if not tokens:
+            self.log.error("tokens is empty")
+            self.error = 1
+            return
+
+        self.tokens = tokens
+
+    # Validate the tokens
+    def validate_tokens(self):
+        terminals = {}
+        for n in self.tokens:
+            if not _is_identifier.match(n):
+                self.log.error("Bad token name '%s'",n)
+                self.error = 1
+            if n in terminals:
+                self.log.warning("Token '%s' multiply defined", n)
+            terminals[n] = 1
+
+    # Get the literals specifier
+    def get_literals(self):
+        self.literals = self.ldict.get("literals","")
+
+    # Validate literals
+    def validate_literals(self):
+        try:
+            for c in self.literals:
+                if not isinstance(c,StringTypes) or len(c) > 1:
+                    self.log.error("Invalid literal %s. Must be a single character", repr(c))
+                    self.error = 1
+                    continue
+
+        except TypeError:
+            self.log.error("Invalid literals specification. literals must be a sequence of characters")
+            self.error = 1
+
+    def get_states(self):
+        self.states = self.ldict.get("states",None)
+        # Build statemap
+        if self.states:
+             if not isinstance(self.states,(tuple,list)):
+                  self.log.error("states must be defined as a tuple or list")
+                  self.error = 1
+             else:
+                  for s in self.states:
+                        if not isinstance(s,tuple) or len(s) != 2:
+                               self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')",repr(s))
+                               self.error = 1
+                               continue
+                        name, statetype = s
+                        if not isinstance(name,StringTypes):
+                               self.log.error("State name %s must be a string", repr(name))
+                               self.error = 1
+                               continue
+                        if not (statetype == 'inclusive' or statetype == 'exclusive'):
+                               self.log.error("State type for state %s must be 'inclusive' or 'exclusive'",name)
+                               self.error = 1
+                               continue
+                        if name in self.stateinfo:
+                               self.log.error("State '%s' already defined",name)
+                               self.error = 1
+                               continue
+                        self.stateinfo[name] = statetype
+
+    # Get all of the symbols with a t_ prefix and sort them into various
+    # categories (functions, strings, error functions, and ignore characters)
+
+    def get_rules(self):
+        tsymbols = [f for f in self.ldict if f[:2] == 't_' ]
+
+        # Now build up a list of functions and a list of strings
+
+        self.toknames = { }        # Mapping of symbols to token names
+        self.funcsym =  { }        # Symbols defined as functions
+        self.strsym =   { }        # Symbols defined as strings
+        self.ignore   = { }        # Ignore strings by state
+        self.errorf   = { }        # Error functions by state
+
+        for s in self.stateinfo:
+             self.funcsym[s] = []
+             self.strsym[s] = []
+
+        if len(tsymbols) == 0:
+            self.log.error("No rules of the form t_rulename are defined")
+            self.error = 1
+            return
+
+        for f in tsymbols:
+            t = self.ldict[f]
+            states, tokname = _statetoken(f,self.stateinfo)
+            self.toknames[f] = tokname
+
+            if hasattr(t,"__call__"):
+                if tokname == 'error':
+                    for s in states:
+                        self.errorf[s] = t
+                elif tokname == 'ignore':
+                    line = func_code(t).co_firstlineno
+                    file = func_code(t).co_filename
+                    self.log.error("%s:%d: Rule '%s' must be defined as a string",file,line,t.__name__)
+                    self.error = 1
+                else:
+                    for s in states: 
+                        self.funcsym[s].append((f,t))
+            elif isinstance(t, StringTypes):
+                if tokname == 'ignore':
+                    for s in states:
+                        self.ignore[s] = t
+                    if "\\" in t:
+                        self.log.warning("%s contains a literal backslash '\\'",f)
+
+                elif tokname == 'error':
+                    self.log.error("Rule '%s' must be defined as a function", f)
+                    self.error = 1
+                else:
+                    for s in states: 
+                        self.strsym[s].append((f,t))
+            else:
+                self.log.error("%s not defined as a function or string", f)
+                self.error = 1
+
+        # Sort the functions by line number
+        for f in self.funcsym.values():
+            if sys.version_info[0] < 3:
+                f.sort(lambda x,y: cmp(func_code(x[1]).co_firstlineno,func_code(y[1]).co_firstlineno))
+            else:
+                # Python 3.0
+                f.sort(key=lambda x: func_code(x[1]).co_firstlineno)
+
+        # Sort the strings by regular expression length
+        for s in self.strsym.values():
+            if sys.version_info[0] < 3:
+                s.sort(lambda x,y: (len(x[1]) < len(y[1])) - (len(x[1]) > len(y[1])))
+            else:
+                # Python 3.0
+                s.sort(key=lambda x: len(x[1]),reverse=True)
+
+    # Validate all of the t_rules collected 
+    def validate_rules(self):
+        for state in self.stateinfo:
+            # Validate all rules defined by functions
+
+            
+
+            for fname, f in self.funcsym[state]:
+                line = func_code(f).co_firstlineno
+                file = func_code(f).co_filename
+                self.files[file] = 1
+
+                tokname = self.toknames[fname]
+                if isinstance(f, types.MethodType):
+                    reqargs = 2
+                else:
+                    reqargs = 1
+                nargs = func_code(f).co_argcount
+                if nargs > reqargs:
+                    self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,f.__name__)
+                    self.error = 1
+                    continue
+
+                if nargs < reqargs:
+                    self.log.error("%s:%d: Rule '%s' requires an argument", file,line,f.__name__)
+                    self.error = 1
+                    continue
+
+                if not f.__doc__:
+                    self.log.error("%s:%d: No regular expression defined for rule '%s'",file,line,f.__name__)
+                    self.error = 1
+                    continue
+
+                try:
+                    c = re.compile("(?P<%s>%s)" % (fname,f.__doc__), re.VERBOSE | self.reflags)
+                    if c.match(""):
+                        self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file,line,f.__name__)
+                        self.error = 1
+                except re.error:
+                    _etype, e, _etrace = sys.exc_info()
+                    self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file,line,f.__name__,e)
+                    if '#' in f.__doc__:
+                        self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'",file,line, f.__name__)
+                    self.error = 1
+
+            # Validate all rules defined by strings
+            for name,r in self.strsym[state]:
+                tokname = self.toknames[name]
+                if tokname == 'error':
+                    self.log.error("Rule '%s' must be defined as a function", name)
+                    self.error = 1
+                    continue
+
+                if not tokname in self.tokens and tokname.find("ignore_") < 0:
+                    self.log.error("Rule '%s' defined for an unspecified token %s",name,tokname)
+                    self.error = 1
+                    continue
+
+                try:
+                    c = re.compile("(?P<%s>%s)" % (name,r),re.VERBOSE | self.reflags)
+                    if (c.match("")):
+                         self.log.error("Regular expression for rule '%s' matches empty string",name)
+                         self.error = 1
+                except re.error:
+                    _etype, e, _etrace = sys.exc_info()
+                    self.log.error("Invalid regular expression for rule '%s'. %s",name,e)
+                    if '#' in r:
+                         self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'",name)
+                    self.error = 1
+
+            if not self.funcsym[state] and not self.strsym[state]:
+                self.log.error("No rules defined for state '%s'",state)
+                self.error = 1
+
+            # Validate the error function
+            efunc = self.errorf.get(state,None)
+            if efunc:
+                f = efunc
+                line = func_code(f).co_firstlineno
+                file = func_code(f).co_filename
+                self.files[file] = 1
+
+                if isinstance(f, types.MethodType):
+                    reqargs = 2
+                else:
+                    reqargs = 1
+                nargs = func_code(f).co_argcount
+                if nargs > reqargs:
+                    self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,f.__name__)
+                    self.error = 1
+
+                if nargs < reqargs:
+                    self.log.error("%s:%d: Rule '%s' requires an argument", file,line,f.__name__)
+                    self.error = 1
+
+        for f in self.files:
+            self.validate_file(f)
+
+
+    # -----------------------------------------------------------------------------
+    # validate_file()
+    #
+    # This checks to see if there are duplicated t_rulename() functions or strings
+    # in the parser input file.  This is done using a simple regular expression
+    # match on each line in the given file.  
+    # -----------------------------------------------------------------------------
+
+    def validate_file(self,filename):
+        import os.path
+        base,ext = os.path.splitext(filename)
+        if ext != '.py': return         # No idea what the file is. Return OK
+
+        try:
+            f = open(filename)
+            lines = f.readlines()
+            f.close()
+        except IOError:
+            return                      # Couldn't find the file.  Don't worry about it
+
+        fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(')
+        sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=')
+
+        counthash = { }
+        linen = 1
+        for l in lines:
+            m = fre.match(l)
+            if not m:
+                m = sre.match(l)
+            if m:
+                name = m.group(1)
+                prev = counthash.get(name)
+                if not prev:
+                    counthash[name] = linen
+                else:
+                    self.log.error("%s:%d: Rule %s redefined. Previously defined on line %d",filename,linen,name,prev)
+                    self.error = 1
+            linen += 1
+            
+# -----------------------------------------------------------------------------
+# lex(module)
+#
+# Build all of the regular expression rules from definitions in the supplied module
+# -----------------------------------------------------------------------------
+def lex(module=None,object=None,debug=0,optimize=0,lextab="lextab",reflags=0,nowarn=0,outputdir="", debuglog=None, errorlog=None):
+    global lexer
+    ldict = None
+    stateinfo  = { 'INITIAL' : 'inclusive'}
+    lexobj = Lexer()
+    lexobj.lexoptimize = optimize
+    global token,input
+
+    if errorlog is None:
+        errorlog = PlyLogger(sys.stderr)
+
+    if debug:
+        if debuglog is None:
+            debuglog = PlyLogger(sys.stderr)
+
+    # Get the module dictionary used for the lexer
+    if object: module = object
+
+    if module:
+        _items = [(k,getattr(module,k)) for k in dir(module)]
+        ldict = dict(_items)
+    else:
+        ldict = get_caller_module_dict(2)
+
+    # Collect parser information from the dictionary
+    linfo = LexerReflect(ldict,log=errorlog,reflags=reflags)
+    linfo.get_all()
+    if not optimize:
+        if linfo.validate_all():
+            raise SyntaxError("Can't build lexer")
+
+    if optimize and lextab:
+        try:
+            lexobj.readtab(lextab,ldict)
+            token = lexobj.token
+            input = lexobj.input
+            lexer = lexobj
+            return lexobj
+
+        except ImportError:
+            pass
+
+    # Dump some basic debugging information
+    if debug:
+        debuglog.info("lex: tokens   = %r", linfo.tokens)
+        debuglog.info("lex: literals = %r", linfo.literals)
+        debuglog.info("lex: states   = %r", linfo.stateinfo)
+
+    # Build a dictionary of valid token names
+    lexobj.lextokens = { }
+    for n in linfo.tokens:
+        lexobj.lextokens[n] = 1
+
+    # Get literals specification
+    if isinstance(linfo.literals,(list,tuple)):
+        lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals)
+    else:
+        lexobj.lexliterals = linfo.literals
+
+    # Get the stateinfo dictionary
+    stateinfo = linfo.stateinfo
+
+    regexs = { }
+    # Build the master regular expressions
+    for state in stateinfo:
+        regex_list = []
+
+        # Add rules defined by functions first
+        for fname, f in linfo.funcsym[state]:
+            line = func_code(f).co_firstlineno
+            file = func_code(f).co_filename
+            regex_list.append("(?P<%s>%s)" % (fname,f.__doc__))
+            if debug:
+                debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",fname,f.__doc__, state)
+
+        # Now add all of the simple rules
+        for name,r in linfo.strsym[state]:
+            regex_list.append("(?P<%s>%s)" % (name,r))
+            if debug:
+                debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",name,r, state)
+
+        regexs[state] = regex_list
+
+    # Build the master regular expressions
+
+    if debug:
+        debuglog.info("lex: ==== MASTER REGEXS FOLLOW ====")
+
+    for state in regexs:
+        lexre, re_text, re_names = _form_master_re(regexs[state],reflags,ldict,linfo.toknames)
+        lexobj.lexstatere[state] = lexre
+        lexobj.lexstateretext[state] = re_text
+        lexobj.lexstaterenames[state] = re_names
+        if debug:
+            for i in range(len(re_text)):
+                debuglog.info("lex: state '%s' : regex[%d] = '%s'",state, i, re_text[i])
+
+    # For inclusive states, we need to add the regular expressions from the INITIAL state
+    for state,stype in stateinfo.items():
+        if state != "INITIAL" and stype == 'inclusive':
+             lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL'])
+             lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL'])
+             lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL'])
+
+    lexobj.lexstateinfo = stateinfo
+    lexobj.lexre = lexobj.lexstatere["INITIAL"]
+    lexobj.lexretext = lexobj.lexstateretext["INITIAL"]
+    lexobj.lexreflags = reflags
+
+    # Set up ignore variables
+    lexobj.lexstateignore = linfo.ignore
+    lexobj.lexignore = lexobj.lexstateignore.get("INITIAL","")
+
+    # Set up error functions
+    lexobj.lexstateerrorf = linfo.errorf
+    lexobj.lexerrorf = linfo.errorf.get("INITIAL",None)
+    if not lexobj.lexerrorf:
+        errorlog.warning("No t_error rule is defined")
+
+    # Check state information for ignore and error rules
+    for s,stype in stateinfo.items():
+        if stype == 'exclusive':
+              if not s in linfo.errorf:
+                   errorlog.warning("No error rule is defined for exclusive state '%s'", s)
+              if not s in linfo.ignore and lexobj.lexignore:
+                   errorlog.warning("No ignore rule is defined for exclusive state '%s'", s)
+        elif stype == 'inclusive':
+              if not s in linfo.errorf:
+                   linfo.errorf[s] = linfo.errorf.get("INITIAL",None)
+              if not s in linfo.ignore:
+                   linfo.ignore[s] = linfo.ignore.get("INITIAL","")
+
+    # Create global versions of the token() and input() functions
+    token = lexobj.token
+    input = lexobj.input
+    lexer = lexobj
+
+    # If in optimize mode, we write the lextab
+    if lextab and optimize:
+        lexobj.writetab(lextab,outputdir)
+
+    return lexobj
+
+# -----------------------------------------------------------------------------
+# runmain()
+#
+# This runs the lexer as a main program
+# -----------------------------------------------------------------------------
+
+def runmain(lexer=None,data=None):
+    if not data:
+        try:
+            filename = sys.argv[1]
+            f = open(filename)
+            data = f.read()
+            f.close()
+        except IndexError:
+            sys.stdout.write("Reading from standard input (type EOF to end):\n")
+            data = sys.stdin.read()
+
+    if lexer:
+        _input = lexer.input
+    else:
+        _input = input
+    _input(data)
+    if lexer:
+        _token = lexer.token
+    else:
+        _token = token
+
+    while 1:
+        tok = _token()
+        if not tok: break
+        sys.stdout.write("(%s,%r,%d,%d)\n" % (tok.type, tok.value, tok.lineno,tok.lexpos))
+
+# -----------------------------------------------------------------------------
+# @TOKEN(regex)
+#
+# This decorator function can be used to set the regex expression on a function
+# when its docstring might need to be set in an alternative way
+# -----------------------------------------------------------------------------
+
+def TOKEN(r):
+    def set_doc(f):
+        if hasattr(r,"__call__"):
+            f.__doc__ = r.__doc__
+        else:
+            f.__doc__ = r
+        return f
+    return set_doc
+
+# Alternative spelling of the TOKEN decorator
+Token = TOKEN
+
diff --git a/tools/slimit/ply/yacc.py b/tools/slimit/ply/yacc.py
new file mode 100644 (file)
index 0000000..f70439e
--- /dev/null
@@ -0,0 +1,3276 @@
+# -----------------------------------------------------------------------------
+# ply: yacc.py
+#
+# Copyright (C) 2001-2011,
+# David M. Beazley (Dabeaz LLC)
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+# 
+# * Redistributions of source code must retain the above copyright notice,
+#   this list of conditions and the following disclaimer.  
+# * Redistributions in binary form must reproduce the above copyright notice, 
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.  
+# * Neither the name of the David Beazley or Dabeaz LLC may be used to
+#   endorse or promote products derived from this software without
+#  specific prior written permission. 
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+# -----------------------------------------------------------------------------
+#
+# This implements an LR parser that is constructed from grammar rules defined
+# as Python functions. The grammer is specified by supplying the BNF inside
+# Python documentation strings.  The inspiration for this technique was borrowed
+# from John Aycock's Spark parsing system.  PLY might be viewed as cross between
+# Spark and the GNU bison utility.
+#
+# The current implementation is only somewhat object-oriented. The
+# LR parser itself is defined in terms of an object (which allows multiple
+# parsers to co-exist).  However, most of the variables used during table
+# construction are defined in terms of global variables.  Users shouldn't
+# notice unless they are trying to define multiple parsers at the same
+# time using threads (in which case they should have their head examined).
+#
+# This implementation supports both SLR and LALR(1) parsing.  LALR(1)
+# support was originally implemented by Elias Ioup (ezioup@alumni.uchicago.edu),
+# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles,
+# Techniques, and Tools" (The Dragon Book).  LALR(1) has since been replaced
+# by the more efficient DeRemer and Pennello algorithm.
+#
+# :::::::: WARNING :::::::
+#
+# Construction of LR parsing tables is fairly complicated and expensive.
+# To make this module run fast, a *LOT* of work has been put into
+# optimization---often at the expensive of readability and what might
+# consider to be good Python "coding style."   Modify the code at your
+# own risk!
+# ----------------------------------------------------------------------------
+
+__version__    = "3.4"
+__tabversion__ = "3.2"       # Table version
+
+#-----------------------------------------------------------------------------
+#                     === User configurable parameters ===
+#
+# Change these to modify the default behavior of yacc (if you wish)
+#-----------------------------------------------------------------------------
+
+yaccdebug   = 1                # Debugging mode.  If set, yacc generates a
+                               # a 'parser.out' file in the current directory
+
+debug_file  = 'parser.out'     # Default name of the debugging file
+tab_module  = 'parsetab'       # Default name of the table module
+default_lr  = 'LALR'           # Default LR table generation method
+
+error_count = 3                # Number of symbols that must be shifted to leave recovery mode
+
+yaccdevel   = 0                # Set to True if developing yacc.  This turns off optimized
+                               # implementations of certain functions.
+
+resultlimit = 40               # Size limit of results when running in debug mode.
+
+pickle_protocol = 0            # Protocol to use when writing pickle files
+
+import re, types, sys, os.path
+
+# Compatibility function for python 2.6/3.0
+if sys.version_info[0] < 3:
+    def func_code(f):
+        return f.func_code
+else:
+    def func_code(f):
+        return f.__code__
+
+# Compatibility
+try:
+    MAXINT = sys.maxint
+except AttributeError:
+    MAXINT = sys.maxsize
+
+# Python 2.x/3.0 compatibility.
+def load_ply_lex():
+    if sys.version_info[0] < 3:
+        import lex
+    else:
+        import ply.lex as lex
+    return lex
+
+# This object is a stand-in for a logging object created by the 
+# logging module.   PLY will use this by default to create things
+# such as the parser.out file.  If a user wants more detailed
+# information, they can create their own logging object and pass
+# it into PLY.
+
+class PlyLogger(object):
+    def __init__(self,f):
+        self.f = f
+    def debug(self,msg,*args,**kwargs):
+        self.f.write((msg % args) + "\n")
+    info     = debug
+
+    def warning(self,msg,*args,**kwargs):
+        self.f.write("WARNING: "+ (msg % args) + "\n")
+
+    def error(self,msg,*args,**kwargs):
+        self.f.write("ERROR: " + (msg % args) + "\n")
+
+    critical = debug
+
+# Null logger is used when no output is generated. Does nothing.
+class NullLogger(object):
+    def __getattribute__(self,name):
+        return self
+    def __call__(self,*args,**kwargs):
+        return self
+        
+# Exception raised for yacc-related errors
+class YaccError(Exception):   pass
+
+# Format the result message that the parser produces when running in debug mode.
+def format_result(r):
+    repr_str = repr(r)
+    if '\n' in repr_str: repr_str = repr(repr_str)
+    if len(repr_str) > resultlimit:
+        repr_str = repr_str[:resultlimit]+" ..."
+    result = "<%s @ 0x%x> (%s)" % (type(r).__name__,id(r),repr_str)
+    return result
+
+
+# Format stack entries when the parser is running in debug mode
+def format_stack_entry(r):
+    repr_str = repr(r)
+    if '\n' in repr_str: repr_str = repr(repr_str)
+    if len(repr_str) < 16:
+        return repr_str
+    else:
+        return "<%s @ 0x%x>" % (type(r).__name__,id(r))
+
+#-----------------------------------------------------------------------------
+#                        ===  LR Parsing Engine ===
+#
+# The following classes are used for the LR parser itself.  These are not
+# used during table construction and are independent of the actual LR
+# table generation algorithm
+#-----------------------------------------------------------------------------
+
+# This class is used to hold non-terminal grammar symbols during parsing.
+# It normally has the following attributes set:
+#        .type       = Grammar symbol type
+#        .value      = Symbol value
+#        .lineno     = Starting line number
+#        .endlineno  = Ending line number (optional, set automatically)
+#        .lexpos     = Starting lex position
+#        .endlexpos  = Ending lex position (optional, set automatically)
+
+class YaccSymbol:
+    def __str__(self):    return self.type
+    def __repr__(self):   return str(self)
+
+# This class is a wrapper around the objects actually passed to each
+# grammar rule.   Index lookup and assignment actually assign the
+# .value attribute of the underlying YaccSymbol object.
+# The lineno() method returns the line number of a given
+# item (or 0 if not defined).   The linespan() method returns
+# a tuple of (startline,endline) representing the range of lines
+# for a symbol.  The lexspan() method returns a tuple (lexpos,endlexpos)
+# representing the range of positional information for a symbol.
+
+class YaccProduction:
+    def __init__(self,s,stack=None):
+        self.slice = s
+        self.stack = stack
+        self.lexer = None
+        self.parser= None
+    def __getitem__(self,n):
+        if n >= 0: return self.slice[n].value
+        else: return self.stack[n].value
+
+    def __setitem__(self,n,v):
+        self.slice[n].value = v
+
+    def __getslice__(self,i,j):
+        return [s.value for s in self.slice[i:j]]
+
+    def __len__(self):
+        return len(self.slice)
+
+    def lineno(self,n):
+        return getattr(self.slice[n],"lineno",0)
+
+    def set_lineno(self,n,lineno):
+        self.slice[n].lineno = lineno
+
+    def linespan(self,n):
+        startline = getattr(self.slice[n],"lineno",0)
+        endline = getattr(self.slice[n],"endlineno",startline)
+        return startline,endline
+
+    def lexpos(self,n):
+        return getattr(self.slice[n],"lexpos",0)
+
+    def lexspan(self,n):
+        startpos = getattr(self.slice[n],"lexpos",0)
+        endpos = getattr(self.slice[n],"endlexpos",startpos)
+        return startpos,endpos
+
+    def error(self):
+       raise SyntaxError
+
+
+# -----------------------------------------------------------------------------
+#                               == LRParser ==
+#
+# The LR Parsing engine.
+# -----------------------------------------------------------------------------
+
+class LRParser:
+    def __init__(self,lrtab,errorf):
+        self.productions = lrtab.lr_productions
+        self.action      = lrtab.lr_action
+        self.goto        = lrtab.lr_goto
+        self.errorfunc   = errorf
+
+    def errok(self):
+        self.errorok     = 1
+
+    def restart(self):
+        del self.statestack[:]
+        del self.symstack[:]
+        sym = YaccSymbol()
+        sym.type = '$end'
+        self.symstack.append(sym)
+        self.statestack.append(0)
+
+    def parse(self,input=None,lexer=None,debug=0,tracking=0,tokenfunc=None):
+        if debug or yaccdevel:
+            if isinstance(debug,int):
+                debug = PlyLogger(sys.stderr)
+            return self.parsedebug(input,lexer,debug,tracking,tokenfunc)
+        elif tracking:
+            return self.parseopt(input,lexer,debug,tracking,tokenfunc)
+        else:
+            return self.parseopt_notrack(input,lexer,debug,tracking,tokenfunc)
+        
+
+    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+    # parsedebug().
+    #
+    # This is the debugging enabled version of parse().  All changes made to the
+    # parsing engine should be made here.   For the non-debugging version,
+    # copy this code to a method parseopt() and delete all of the sections
+    # enclosed in:
+    #
+    #      #--! DEBUG
+    #      statements
+    #      #--! DEBUG
+    #
+    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+    def parsedebug(self,input=None,lexer=None,debug=None,tracking=0,tokenfunc=None):
+        lookahead = None                 # Current lookahead symbol
+        lookaheadstack = [ ]             # Stack of lookahead symbols
+        actions = self.action            # Local reference to action table (to avoid lookup on self.)
+        goto    = self.goto              # Local reference to goto table (to avoid lookup on self.)
+        prod    = self.productions       # Local reference to production list (to avoid lookup on self.)
+        pslice  = YaccProduction(None)   # Production object passed to grammar rules
+        errorcount = 0                   # Used during error recovery 
+
+        # --! DEBUG
+        debug.info("PLY: PARSE DEBUG START")
+        # --! DEBUG
+
+        # If no lexer was given, we will try to use the lex module
+        if not lexer:
+            lex = load_ply_lex()
+            lexer = lex.lexer
+
+        # Set up the lexer and parser objects on pslice
+        pslice.lexer = lexer
+        pslice.parser = self
+
+        # If input was supplied, pass to lexer
+        if input is not None:
+            lexer.input(input)
+
+        if tokenfunc is None:
+           # Tokenize function
+           get_token = lexer.token
+        else:
+           get_token = tokenfunc
+
+        # Set up the state and symbol stacks
+
+        statestack = [ ]                # Stack of parsing states
+        self.statestack = statestack
+        symstack   = [ ]                # Stack of grammar symbols
+        self.symstack = symstack
+
+        pslice.stack = symstack         # Put in the production
+        errtoken   = None               # Err token
+
+        # The start state is assumed to be (0,$end)
+
+        statestack.append(0)
+        sym = YaccSymbol()
+        sym.type = "$end"
+        symstack.append(sym)
+        state = 0
+        while 1:
+            # Get the next symbol on the input.  If a lookahead symbol
+            # is already set, we just use that. Otherwise, we'll pull
+            # the next token off of the lookaheadstack or from the lexer
+
+            # --! DEBUG
+            debug.debug('')
+            debug.debug('State  : %s', state)
+            # --! DEBUG
+
+            if not lookahead:
+                if not lookaheadstack:
+                    lookahead = get_token()     # Get the next token
+                else:
+                    lookahead = lookaheadstack.pop()
+                if not lookahead:
+                    lookahead = YaccSymbol()
+                    lookahead.type = "$end"
+
+            # --! DEBUG
+            debug.debug('Stack  : %s',
+                        ("%s . %s" % (" ".join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
+            # --! DEBUG
+
+            # Check the action table
+            ltype = lookahead.type
+            t = actions[state].get(ltype)
+
+            if t is not None:
+                if t > 0:
+                    # shift a symbol on the stack
+                    statestack.append(t)
+                    state = t
+                    
+                    # --! DEBUG
+                    debug.debug("Action : Shift and goto state %s", t)
+                    # --! DEBUG
+
+                    symstack.append(lookahead)
+                    lookahead = None
+
+                    # Decrease error count on successful shift
+                    if errorcount: errorcount -=1
+                    continue
+
+                if t < 0:
+                    # reduce a symbol on the stack, emit a production
+                    p = prod[-t]
+                    pname = p.name
+                    plen  = p.len
+
+                    # Get production function
+                    sym = YaccSymbol()
+                    sym.type = pname       # Production name
+                    sym.value = None
+
+                    # --! DEBUG
+                    if plen:
+                        debug.info("Action : Reduce rule [%s] with %s and goto state %d", p.str, "["+",".join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+"]",-t)
+                    else:
+                        debug.info("Action : Reduce rule [%s] with %s and goto state %d", p.str, [],-t)
+                        
+                    # --! DEBUG
+
+                    if plen:
+                        targ = symstack[-plen-1:]
+                        targ[0] = sym
+
+                        # --! TRACKING
+                        if tracking:
+                           t1 = targ[1]
+                           sym.lineno = t1.lineno
+                           sym.lexpos = t1.lexpos
+                           t1 = targ[-1]
+                           sym.endlineno = getattr(t1,"endlineno",t1.lineno)
+                           sym.endlexpos = getattr(t1,"endlexpos",t1.lexpos)
+
+                        # --! TRACKING
+
+                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+                        # The code enclosed in this section is duplicated 
+                        # below as a performance optimization.  Make sure
+                        # changes get made in both locations.
+
+                        pslice.slice = targ
+                        
+                        try:
+                            # Call the grammar rule with our special slice object
+                            del symstack[-plen:]
+                            del statestack[-plen:]
+                            p.callable(pslice)
+                            # --! DEBUG
+                            debug.info("Result : %s", format_result(pslice[0]))
+                            # --! DEBUG
+                            symstack.append(sym)
+                            state = goto[statestack[-1]][pname]
+                            statestack.append(state)
+                        except SyntaxError:
+                            # If an error was set. Enter error recovery state
+                            lookaheadstack.append(lookahead)
+                            symstack.pop()
+                            statestack.pop()
+                            state = statestack[-1]
+                            sym.type = 'error'
+                            lookahead = sym
+                            errorcount = error_count
+                            self.errorok = 0
+                        continue
+                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+    
+                    else:
+
+                        # --! TRACKING
+                        if tracking:
+                           sym.lineno = lexer.lineno
+                           sym.lexpos = lexer.lexpos
+                        # --! TRACKING
+
+                        targ = [ sym ]
+
+                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+                        # The code enclosed in this section is duplicated 
+                        # above as a performance optimization.  Make sure
+                        # changes get made in both locations.
+
+                        pslice.slice = targ
+
+                        try:
+                            # Call the grammar rule with our special slice object
+                            p.callable(pslice)
+                            # --! DEBUG
+                            debug.info("Result : %s", format_result(pslice[0]))
+                            # --! DEBUG
+                            symstack.append(sym)
+                            state = goto[statestack[-1]][pname]
+                            statestack.append(state)
+                        except SyntaxError:
+                            # If an error was set. Enter error recovery state
+                            lookaheadstack.append(lookahead)
+                            symstack.pop()
+                            statestack.pop()
+                            state = statestack[-1]
+                            sym.type = 'error'
+                            lookahead = sym
+                            errorcount = error_count
+                            self.errorok = 0
+                        continue
+                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+                if t == 0:
+                    n = symstack[-1]
+                    result = getattr(n,"value",None)
+                    # --! DEBUG
+                    debug.info("Done   : Returning %s", format_result(result))
+                    debug.info("PLY: PARSE DEBUG END")
+                    # --! DEBUG
+                    return result
+
+            if t == None:
+
+                # --! DEBUG
+                debug.error('Error  : %s',
+                            ("%s . %s" % (" ".join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
+                # --! DEBUG
+
+                # We have some kind of parsing error here.  To handle
+                # this, we are going to push the current token onto
+                # the tokenstack and replace it with an 'error' token.
+                # If there are any synchronization rules, they may
+                # catch it.
+                #
+                # In addition to pushing the error token, we call call
+                # the user defined p_error() function if this is the
+                # first syntax error.  This function is only called if
+                # errorcount == 0.
+                if errorcount == 0 or self.errorok:
+                    errorcount = error_count
+                    self.errorok = 0
+                    errtoken = lookahead
+                    if errtoken.type == "$end":
+                        errtoken = None               # End of file!
+                    if self.errorfunc:
+                        global errok,token,restart
+                        errok = self.errok        # Set some special functions available in error recovery
+                        token = get_token
+                        restart = self.restart
+                        if errtoken and not hasattr(errtoken,'lexer'):
+                            errtoken.lexer = lexer
+                        tok = self.errorfunc(errtoken)
+                        del errok, token, restart   # Delete special functions
+
+                        if self.errorok:
+                            # User must have done some kind of panic
+                            # mode recovery on their own.  The
+                            # returned token is the next lookahead
+                            lookahead = tok
+                            errtoken = None
+                            continue
+                    else:
+                        if errtoken:
+                            if hasattr(errtoken,"lineno"): lineno = lookahead.lineno
+                            else: lineno = 0
+                            if lineno:
+                                sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type))
+                            else:
+                                sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type)
+                        else:
+                            sys.stderr.write("yacc: Parse error in input. EOF\n")
+                            return
+
+                else:
+                    errorcount = error_count
+
+                # case 1:  the statestack only has 1 entry on it.  If we're in this state, the
+                # entire parse has been rolled back and we're completely hosed.   The token is
+                # discarded and we just keep going.
+
+                if len(statestack) <= 1 and lookahead.type != "$end":
+                    lookahead = None
+                    errtoken = None
+                    state = 0
+                    # Nuke the pushback stack
+                    del lookaheadstack[:]
+                    continue
+
+                # case 2: the statestack has a couple of entries on it, but we're
+                # at the end of the file. nuke the top entry and generate an error token
+
+                # Start nuking entries on the stack
+                if lookahead.type == "$end":
+                    # Whoa. We're really hosed here. Bail out
+                    return
+
+                if lookahead.type != 'error':
+                    sym = symstack[-1]
+                    if sym.type == 'error':
+                        # Hmmm. Error is on top of stack, we'll just nuke input
+                        # symbol and continue
+                        lookahead = None
+                        continue
+                    t = YaccSymbol()
+                    t.type = 'error'
+                    if hasattr(lookahead,"lineno"):
+                        t.lineno = lookahead.lineno
+                    t.value = lookahead
+                    lookaheadstack.append(lookahead)
+                    lookahead = t
+                else:
+                    symstack.pop()
+                    statestack.pop()
+                    state = statestack[-1]       # Potential bug fix
+
+                continue
+
+            # Call an error function here
+            raise RuntimeError("yacc: internal parser error!!!\n")
+
+    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+    # parseopt().
+    #
+    # Optimized version of parse() method.  DO NOT EDIT THIS CODE DIRECTLY.
+    # Edit the debug version above, then copy any modifications to the method
+    # below while removing #--! DEBUG sections.
+    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+
+    def parseopt(self,input=None,lexer=None,debug=0,tracking=0,tokenfunc=None):
+        lookahead = None                 # Current lookahead symbol
+        lookaheadstack = [ ]             # Stack of lookahead symbols
+        actions = self.action            # Local reference to action table (to avoid lookup on self.)
+        goto    = self.goto              # Local reference to goto table (to avoid lookup on self.)
+        prod    = self.productions       # Local reference to production list (to avoid lookup on self.)
+        pslice  = YaccProduction(None)   # Production object passed to grammar rules
+        errorcount = 0                   # Used during error recovery 
+
+        # If no lexer was given, we will try to use the lex module
+        if not lexer:
+            lex = load_ply_lex()
+            lexer = lex.lexer
+        
+        # Set up the lexer and parser objects on pslice
+        pslice.lexer = lexer
+        pslice.parser = self
+
+        # If input was supplied, pass to lexer
+        if input is not None:
+            lexer.input(input)
+
+        if tokenfunc is None:
+           # Tokenize function
+           get_token = lexer.token
+        else:
+           get_token = tokenfunc
+
+        # Set up the state and symbol stacks
+
+        statestack = [ ]                # Stack of parsing states
+        self.statestack = statestack
+        symstack   = [ ]                # Stack of grammar symbols
+        self.symstack = symstack
+
+        pslice.stack = symstack         # Put in the production
+        errtoken   = None               # Err token
+
+        # The start state is assumed to be (0,$end)
+
+        statestack.append(0)
+        sym = YaccSymbol()
+        sym.type = '$end'
+        symstack.append(sym)
+        state = 0
+        while 1:
+            # Get the next symbol on the input.  If a lookahead symbol
+            # is already set, we just use that. Otherwise, we'll pull
+            # the next token off of the lookaheadstack or from the lexer
+
+            if not lookahead:
+                if not lookaheadstack:
+                    lookahead = get_token()     # Get the next token
+                else:
+                    lookahead = lookaheadstack.pop()
+                if not lookahead:
+                    lookahead = YaccSymbol()
+                    lookahead.type = '$end'
+
+            # Check the action table
+            ltype = lookahead.type
+            t = actions[state].get(ltype)
+
+            if t is not None:
+                if t > 0:
+                    # shift a symbol on the stack
+                    statestack.append(t)
+                    state = t
+
+                    symstack.append(lookahead)
+                    lookahead = None
+
+                    # Decrease error count on successful shift
+                    if errorcount: errorcount -=1
+                    continue
+
+                if t < 0:
+                    # reduce a symbol on the stack, emit a production
+                    p = prod[-t]
+                    pname = p.name
+                    plen  = p.len
+
+                    # Get production function
+                    sym = YaccSymbol()
+                    sym.type = pname       # Production name
+                    sym.value = None
+
+                    if plen:
+                        targ = symstack[-plen-1:]
+                        targ[0] = sym
+
+                        # --! TRACKING
+                        if tracking:
+                           t1 = targ[1]
+                           sym.lineno = t1.lineno
+                           sym.lexpos = t1.lexpos
+                           t1 = targ[-1]
+                           sym.endlineno = getattr(t1,"endlineno",t1.lineno)
+                           sym.endlexpos = getattr(t1,"endlexpos",t1.lexpos)
+
+                        # --! TRACKING
+
+                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+                        # The code enclosed in this section is duplicated 
+                        # below as a performance optimization.  Make sure
+                        # changes get made in both locations.
+
+                        pslice.slice = targ
+                        
+                        try:
+                            # Call the grammar rule with our special slice object
+                            del symstack[-plen:]
+                            del statestack[-plen:]
+                            p.callable(pslice)
+                            symstack.append(sym)
+                            state = goto[statestack[-1]][pname]
+                            statestack.append(state)
+                        except SyntaxError:
+                            # If an error was set. Enter error recovery state
+                            lookaheadstack.append(lookahead)
+                            symstack.pop()
+                            statestack.pop()
+                            state = statestack[-1]
+                            sym.type = 'error'
+                            lookahead = sym
+                            errorcount = error_count
+                            self.errorok = 0
+                        continue
+                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+    
+                    else:
+
+                        # --! TRACKING
+                        if tracking:
+                           sym.lineno = lexer.lineno
+                           sym.lexpos = lexer.lexpos
+                        # --! TRACKING
+
+                        targ = [ sym ]
+
+                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+                        # The code enclosed in this section is duplicated 
+                        # above as a performance optimization.  Make sure
+                        # changes get made in both locations.
+
+                        pslice.slice = targ
+
+                        try:
+                            # Call the grammar rule with our special slice object
+                            p.callable(pslice)
+                            symstack.append(sym)
+                            state = goto[statestack[-1]][pname]
+                            statestack.append(state)
+                        except SyntaxError:
+                            # If an error was set. Enter error recovery state
+                            lookaheadstack.append(lookahead)
+                            symstack.pop()
+                            statestack.pop()
+                            state = statestack[-1]
+                            sym.type = 'error'
+                            lookahead = sym
+                            errorcount = error_count
+                            self.errorok = 0
+                        continue
+                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+                if t == 0:
+                    n = symstack[-1]
+                    return getattr(n,"value",None)
+
+            if t == None:
+
+                # We have some kind of parsing error here.  To handle
+                # this, we are going to push the current token onto
+                # the tokenstack and replace it with an 'error' token.
+                # If there are any synchronization rules, they may
+                # catch it.
+                #
+                # In addition to pushing the error token, we call call
+                # the user defined p_error() function if this is the
+                # first syntax error.  This function is only called if
+                # errorcount == 0.
+                if errorcount == 0 or self.errorok:
+                    errorcount = error_count
+                    self.errorok = 0
+                    errtoken = lookahead
+                    if errtoken.type == '$end':
+                        errtoken = None               # End of file!
+                    if self.errorfunc:
+                        global errok,token,restart
+                        errok = self.errok        # Set some special functions available in error recovery
+                        token = get_token
+                        restart = self.restart
+                        if errtoken and not hasattr(errtoken,'lexer'):
+                            errtoken.lexer = lexer
+                        tok = self.errorfunc(errtoken)
+                        del errok, token, restart   # Delete special functions
+
+                        if self.errorok:
+                            # User must have done some kind of panic
+                            # mode recovery on their own.  The
+                            # returned token is the next lookahead
+                            lookahead = tok
+                            errtoken = None
+                            continue
+                    else:
+                        if errtoken:
+                            if hasattr(errtoken,"lineno"): lineno = lookahead.lineno
+                            else: lineno = 0
+                            if lineno:
+                                sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type))
+                            else:
+                                sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type)
+                        else:
+                            sys.stderr.write("yacc: Parse error in input. EOF\n")
+                            return
+
+                else:
+                    errorcount = error_count
+
+                # case 1:  the statestack only has 1 entry on it.  If we're in this state, the
+                # entire parse has been rolled back and we're completely hosed.   The token is
+                # discarded and we just keep going.
+
+                if len(statestack) <= 1 and lookahead.type != '$end':
+                    lookahead = None
+                    errtoken = None
+                    state = 0
+                    # Nuke the pushback stack
+                    del lookaheadstack[:]
+                    continue
+
+                # case 2: the statestack has a couple of entries on it, but we're
+                # at the end of the file. nuke the top entry and generate an error token
+
+                # Start nuking entries on the stack
+                if lookahead.type == '$end':
+                    # Whoa. We're really hosed here. Bail out
+                    return
+
+                if lookahead.type != 'error':
+                    sym = symstack[-1]
+                    if sym.type == 'error':
+                        # Hmmm. Error is on top of stack, we'll just nuke input
+                        # symbol and continue
+                        lookahead = None
+                        continue
+                    t = YaccSymbol()
+                    t.type = 'error'
+                    if hasattr(lookahead,"lineno"):
+                        t.lineno = lookahead.lineno
+                    t.value = lookahead
+                    lookaheadstack.append(lookahead)
+                    lookahead = t
+                else:
+                    symstack.pop()
+                    statestack.pop()
+                    state = statestack[-1]       # Potential bug fix
+
+                continue
+
+            # Call an error function here
+            raise RuntimeError("yacc: internal parser error!!!\n")
+
+    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+    # parseopt_notrack().
+    #
+    # Optimized version of parseopt() with line number tracking removed. 
+    # DO NOT EDIT THIS CODE DIRECTLY. Copy the optimized version and remove
+    # code in the #--! TRACKING sections
+    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+    def parseopt_notrack(self,input=None,lexer=None,debug=0,tracking=0,tokenfunc=None):
+        lookahead = None                 # Current lookahead symbol
+        lookaheadstack = [ ]             # Stack of lookahead symbols
+        actions = self.action            # Local reference to action table (to avoid lookup on self.)
+        goto    = self.goto              # Local reference to goto table (to avoid lookup on self.)
+        prod    = self.productions       # Local reference to production list (to avoid lookup on self.)
+        pslice  = YaccProduction(None)   # Production object passed to grammar rules
+        errorcount = 0                   # Used during error recovery 
+
+        # If no lexer was given, we will try to use the lex module
+        if not lexer:
+            lex = load_ply_lex()
+            lexer = lex.lexer
+        
+        # Set up the lexer and parser objects on pslice
+        pslice.lexer = lexer
+        pslice.parser = self
+
+        # If input was supplied, pass to lexer
+        if input is not None:
+            lexer.input(input)
+
+        if tokenfunc is None:
+           # Tokenize function
+           get_token = lexer.token
+        else:
+           get_token = tokenfunc
+
+        # Set up the state and symbol stacks
+
+        statestack = [ ]                # Stack of parsing states
+        self.statestack = statestack
+        symstack   = [ ]                # Stack of grammar symbols
+        self.symstack = symstack
+
+        pslice.stack = symstack         # Put in the production
+        errtoken   = None               # Err token
+
+        # The start state is assumed to be (0,$end)
+
+        statestack.append(0)
+        sym = YaccSymbol()
+        sym.type = '$end'
+        symstack.append(sym)
+        state = 0
+        while 1:
+            # Get the next symbol on the input.  If a lookahead symbol
+            # is already set, we just use that. Otherwise, we'll pull
+            # the next token off of the lookaheadstack or from the lexer
+
+            if not lookahead:
+                if not lookaheadstack:
+                    lookahead = get_token()     # Get the next token
+                else:
+                    lookahead = lookaheadstack.pop()
+                if not lookahead:
+                    lookahead = YaccSymbol()
+                    lookahead.type = '$end'
+
+            # Check the action table
+            ltype = lookahead.type
+            t = actions[state].get(ltype)
+
+            if t is not None:
+                if t > 0:
+                    # shift a symbol on the stack
+                    statestack.append(t)
+                    state = t
+
+                    symstack.append(lookahead)
+                    lookahead = None
+
+                    # Decrease error count on successful shift
+                    if errorcount: errorcount -=1
+                    continue
+
+                if t < 0:
+                    # reduce a symbol on the stack, emit a production
+                    p = prod[-t]
+                    pname = p.name
+                    plen  = p.len
+
+                    # Get production function
+                    sym = YaccSymbol()
+                    sym.type = pname       # Production name
+                    sym.value = None
+
+                    if plen:
+                        targ = symstack[-plen-1:]
+                        targ[0] = sym
+
+                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+                        # The code enclosed in this section is duplicated 
+                        # below as a performance optimization.  Make sure
+                        # changes get made in both locations.
+
+                        pslice.slice = targ
+                        
+                        try:
+                            # Call the grammar rule with our special slice object
+                            del symstack[-plen:]
+                            del statestack[-plen:]
+                            p.callable(pslice)
+                            symstack.append(sym)
+                            state = goto[statestack[-1]][pname]
+                            statestack.append(state)
+                        except SyntaxError:
+                            # If an error was set. Enter error recovery state
+                            lookaheadstack.append(lookahead)
+                            symstack.pop()
+                            statestack.pop()
+                            state = statestack[-1]
+                            sym.type = 'error'
+                            lookahead = sym
+                            errorcount = error_count
+                            self.errorok = 0
+                        continue
+                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+    
+                    else:
+
+                        targ = [ sym ]
+
+                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+                        # The code enclosed in this section is duplicated 
+                        # above as a performance optimization.  Make sure
+                        # changes get made in both locations.
+
+                        pslice.slice = targ
+
+                        try:
+                            # Call the grammar rule with our special slice object
+                            p.callable(pslice)
+                            symstack.append(sym)
+                            state = goto[statestack[-1]][pname]
+                            statestack.append(state)
+                        except SyntaxError:
+                            # If an error was set. Enter error recovery state
+                            lookaheadstack.append(lookahead)
+                            symstack.pop()
+                            statestack.pop()
+                            state = statestack[-1]
+                            sym.type = 'error'
+                            lookahead = sym
+                            errorcount = error_count
+                            self.errorok = 0
+                        continue
+                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+                if t == 0:
+                    n = symstack[-1]
+                    return getattr(n,"value",None)
+
+            if t == None:
+
+                # We have some kind of parsing error here.  To handle
+                # this, we are going to push the current token onto
+                # the tokenstack and replace it with an 'error' token.
+                # If there are any synchronization rules, they may
+                # catch it.
+                #
+                # In addition to pushing the error token, we call call
+                # the user defined p_error() function if this is the
+                # first syntax error.  This function is only called if
+                # errorcount == 0.
+                if errorcount == 0 or self.errorok:
+                    errorcount = error_count
+                    self.errorok = 0
+                    errtoken = lookahead
+                    if errtoken.type == '$end':
+                        errtoken = None               # End of file!
+                    if self.errorfunc:
+                        global errok,token,restart
+                        errok = self.errok        # Set some special functions available in error recovery
+                        token = get_token
+                        restart = self.restart
+                        if errtoken and not hasattr(errtoken,'lexer'):
+                            errtoken.lexer = lexer
+                        tok = self.errorfunc(errtoken)
+                        del errok, token, restart   # Delete special functions
+
+                        if self.errorok:
+                            # User must have done some kind of panic
+                            # mode recovery on their own.  The
+                            # returned token is the next lookahead
+                            lookahead = tok
+                            errtoken = None
+                            continue
+                    else:
+                        if errtoken:
+                            if hasattr(errtoken,"lineno"): lineno = lookahead.lineno
+                            else: lineno = 0
+                            if lineno:
+                                sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type))
+                            else:
+                                sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type)
+                        else:
+                            sys.stderr.write("yacc: Parse error in input. EOF\n")
+                            return
+
+                else:
+                    errorcount = error_count
+
+                # case 1:  the statestack only has 1 entry on it.  If we're in this state, the
+                # entire parse has been rolled back and we're completely hosed.   The token is
+                # discarded and we just keep going.
+
+                if len(statestack) <= 1 and lookahead.type != '$end':
+                    lookahead = None
+                    errtoken = None
+                    state = 0
+                    # Nuke the pushback stack
+                    del lookaheadstack[:]
+                    continue
+
+                # case 2: the statestack has a couple of entries on it, but we're
+                # at the end of the file. nuke the top entry and generate an error token
+
+                # Start nuking entries on the stack
+                if lookahead.type == '$end':
+                    # Whoa. We're really hosed here. Bail out
+                    return
+
+                if lookahead.type != 'error':
+                    sym = symstack[-1]
+                    if sym.type == 'error':
+                        # Hmmm. Error is on top of stack, we'll just nuke input
+                        # symbol and continue
+                        lookahead = None
+                        continue
+                    t = YaccSymbol()
+                    t.type = 'error'
+                    if hasattr(lookahead,"lineno"):
+                        t.lineno = lookahead.lineno
+                    t.value = lookahead
+                    lookaheadstack.append(lookahead)
+                    lookahead = t
+                else:
+                    symstack.pop()
+                    statestack.pop()
+                    state = statestack[-1]       # Potential bug fix
+
+                continue
+
+            # Call an error function here
+            raise RuntimeError("yacc: internal parser error!!!\n")
+
+# -----------------------------------------------------------------------------
+#                          === Grammar Representation ===
+#
+# The following functions, classes, and variables are used to represent and
+# manipulate the rules that make up a grammar. 
+# -----------------------------------------------------------------------------
+
+import re
+
+# regex matching identifiers
+_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$')
+
+# -----------------------------------------------------------------------------
+# class Production:
+#
+# This class stores the raw information about a single production or grammar rule.
+# A grammar rule refers to a specification such as this:
+#
+#       expr : expr PLUS term 
+#
+# Here are the basic attributes defined on all productions
+#
+#       name     - Name of the production.  For example 'expr'
+#       prod     - A list of symbols on the right side ['expr','PLUS','term']
+#       prec     - Production precedence level
+#       number   - Production number.
+#       func     - Function that executes on reduce
+#       file     - File where production function is defined
+#       lineno   - Line number where production function is defined
+#
+# The following attributes are defined or optional.
+#
+#       len       - Length of the production (number of symbols on right hand side)
+#       usyms     - Set of unique symbols found in the production
+# -----------------------------------------------------------------------------
+
+class Production(object):
+    reduced = 0
+    def __init__(self,number,name,prod,precedence=('right',0),func=None,file='',line=0):
+        self.name     = name
+        self.prod     = tuple(prod)
+        self.number   = number
+        self.func     = func
+        self.callable = None
+        self.file     = file
+        self.line     = line
+        self.prec     = precedence
+
+        # Internal settings used during table construction
+        
+        self.len  = len(self.prod)   # Length of the production
+
+        # Create a list of unique production symbols used in the production
+        self.usyms = [ ]             
+        for s in self.prod:
+            if s not in self.usyms:
+                self.usyms.append(s)
+
+        # List of all LR items for the production
+        self.lr_items = []
+        self.lr_next = None
+
+        # Create a string representation
+        if self.prod:
+            self.str = "%s -> %s" % (self.name," ".join(self.prod))
+        else:
+            self.str = "%s -> <empty>" % self.name
+
+    def __str__(self):
+        return self.str
+
+    def __repr__(self):
+        return "Production("+str(self)+")"
+
+    def __len__(self):
+        return len(self.prod)
+
+    def __nonzero__(self):
+        return 1
+
+    def __getitem__(self,index):
+        return self.prod[index]
+            
+    # Return the nth lr_item from the production (or None if at the end)
+    def lr_item(self,n):
+        if n > len(self.prod): return None
+        p = LRItem(self,n)
+
+        # Precompute the list of productions immediately following.  Hack. Remove later
+        try:
+            p.lr_after = Prodnames[p.prod[n+1]]
+        except (IndexError,KeyError):
+            p.lr_after = []
+        try:
+            p.lr_before = p.prod[n-1]
+        except IndexError:
+            p.lr_before = None
+
+        return p
+    
+    # Bind the production function name to a callable
+    def bind(self,pdict):
+        if self.func:
+            self.callable = pdict[self.func]
+
+# This class serves as a minimal standin for Production objects when
+# reading table data from files.   It only contains information
+# actually used by the LR parsing engine, plus some additional
+# debugging information.
+class MiniProduction(object):
+    def __init__(self,str,name,len,func,file,line):
+        self.name     = name
+        self.len      = len
+        self.func     = func
+        self.callable = None
+        self.file     = file
+        self.line     = line
+        self.str      = str
+    def __str__(self):
+        return self.str
+    def __repr__(self):
+        return "MiniProduction(%s)" % self.str
+
+    # Bind the production function name to a callable
+    def bind(self,pdict):
+        if self.func:
+            self.callable = pdict[self.func]
+
+
+# -----------------------------------------------------------------------------
+# class LRItem
+#
+# This class represents a specific stage of parsing a production rule.  For
+# example: 
+#
+#       expr : expr . PLUS term 
+#
+# In the above, the "." represents the current location of the parse.  Here
+# basic attributes:
+#
+#       name       - Name of the production.  For example 'expr'
+#       prod       - A list of symbols on the right side ['expr','.', 'PLUS','term']
+#       number     - Production number.
+#
+#       lr_next      Next LR item. Example, if we are ' expr -> expr . PLUS term'
+#                    then lr_next refers to 'expr -> expr PLUS . term'
+#       lr_index   - LR item index (location of the ".") in the prod list.
+#       lookaheads - LALR lookahead symbols for this item
+#       len        - Length of the production (number of symbols on right hand side)
+#       lr_after    - List of all productions that immediately follow
+#       lr_before   - Grammar symbol immediately before
+# -----------------------------------------------------------------------------
+
+class LRItem(object):
+    def __init__(self,p,n):
+        self.name       = p.name
+        self.prod       = list(p.prod)
+        self.number     = p.number
+        self.lr_index   = n
+        self.lookaheads = { }
+        self.prod.insert(n,".")
+        self.prod       = tuple(self.prod)
+        self.len        = len(self.prod)
+        self.usyms      = p.usyms
+
+    def __str__(self):
+        if self.prod:
+            s = "%s -> %s" % (self.name," ".join(self.prod))
+        else:
+            s = "%s -> <empty>" % self.name
+        return s
+
+    def __repr__(self):
+        return "LRItem("+str(self)+")"
+
+# -----------------------------------------------------------------------------
+# rightmost_terminal()
+#
+# Return the rightmost terminal from a list of symbols.  Used in add_production()
+# -----------------------------------------------------------------------------
+def rightmost_terminal(symbols, terminals):
+    i = len(symbols) - 1
+    while i >= 0:
+        if symbols[i] in terminals:
+            return symbols[i]
+        i -= 1
+    return None
+
+# -----------------------------------------------------------------------------
+#                           === GRAMMAR CLASS ===
+#
+# The following class represents the contents of the specified grammar along
+# with various computed properties such as first sets, follow sets, LR items, etc.
+# This data is used for critical parts of the table generation process later.
+# -----------------------------------------------------------------------------
+
+class GrammarError(YaccError): pass
+
+class Grammar(object):
+    def __init__(self,terminals):
+        self.Productions  = [None]  # A list of all of the productions.  The first
+                                    # entry is always reserved for the purpose of
+                                    # building an augmented grammar
+
+        self.Prodnames    = { }     # A dictionary mapping the names of nonterminals to a list of all
+                                    # productions of that nonterminal.
+
+        self.Prodmap      = { }     # A dictionary that is only used to detect duplicate
+                                    # productions.
+
+        self.Terminals    = { }     # A dictionary mapping the names of terminal symbols to a
+                                    # list of the rules where they are used.
+
+        for term in terminals:
+            self.Terminals[term] = []
+
+        self.Terminals['error'] = []
+
+        self.Nonterminals = { }     # A dictionary mapping names of nonterminals to a list
+                                    # of rule numbers where they are used.
+
+        self.First        = { }     # A dictionary of precomputed FIRST(x) symbols
+
+        self.Follow       = { }     # A dictionary of precomputed FOLLOW(x) symbols
+
+        self.Precedence   = { }     # Precedence rules for each terminal. Contains tuples of the
+                                    # form ('right',level) or ('nonassoc', level) or ('left',level)
+
+        self.UsedPrecedence = { }   # Precedence rules that were actually used by the grammer.
+                                    # This is only used to provide error checking and to generate
+                                    # a warning about unused precedence rules.
+
+        self.Start = None           # Starting symbol for the grammar
+
+
+    def __len__(self):
+        return len(self.Productions)
+
+    def __getitem__(self,index):
+        return self.Productions[index]
+
+    # -----------------------------------------------------------------------------
+    # set_precedence()
+    #
+    # Sets the precedence for a given terminal. assoc is the associativity such as
+    # 'left','right', or 'nonassoc'.  level is a numeric level.
+    #
+    # -----------------------------------------------------------------------------
+
+    def set_precedence(self,term,assoc,level):
+        assert self.Productions == [None],"Must call set_precedence() before add_production()"
+        if term in self.Precedence:
+            raise GrammarError("Precedence already specified for terminal '%s'" % term)
+        if assoc not in ['left','right','nonassoc']:
+            raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'")
+        self.Precedence[term] = (assoc,level)
+    # -----------------------------------------------------------------------------
+    # add_production()
+    #
+    # Given an action function, this function assembles a production rule and
+    # computes its precedence level.
+    #
+    # The production rule is supplied as a list of symbols.   For example,
+    # a rule such as 'expr : expr PLUS term' has a production name of 'expr' and
+    # symbols ['expr','PLUS','term'].
+    #
+    # Precedence is determined by the precedence of the right-most non-terminal
+    # or the precedence of a terminal specified by %prec.
+    #
+    # A variety of error checks are performed to make sure production symbols
+    # are valid and that %prec is used correctly.
+    # -----------------------------------------------------------------------------
+
+    def add_production(self,prodname,syms,func=None,file='',line=0):
+
+        if prodname in self.Terminals:
+            raise GrammarError("%s:%d: Illegal rule name '%s'. Already defined as a token" % (file,line,prodname))
+        if prodname == 'error':
+            raise GrammarError("%s:%d: Illegal rule name '%s'. error is a reserved word" % (file,line,prodname))
+        if not _is_identifier.match(prodname):
+            raise GrammarError("%s:%d: Illegal rule name '%s'" % (file,line,prodname))
+
+        # Look for literal tokens 
+        for n,s in enumerate(syms):
+            if s[0] in "'\"":
+                 try:
+                     c = eval(s)
+                     if (len(c) > 1):
+                          raise GrammarError("%s:%d: Literal token %s in rule '%s' may only be a single character" % (file,line,s, prodname))
+                     if not c in self.Terminals:
+                          self.Terminals[c] = []
+                     syms[n] = c
+                     continue
+                 except SyntaxError:
+                     pass
+            if not _is_identifier.match(s) and s != '%prec':
+                raise GrammarError("%s:%d: Illegal name '%s' in rule '%s'" % (file,line,s, prodname))
+        
+        # Determine the precedence level
+        if '%prec' in syms:
+            if syms[-1] == '%prec':
+                raise GrammarError("%s:%d: Syntax error. Nothing follows %%prec" % (file,line))
+            if syms[-2] != '%prec':
+                raise GrammarError("%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule" % (file,line))
+            precname = syms[-1]
+            prodprec = self.Precedence.get(precname,None)
+            if not prodprec:
+                raise GrammarError("%s:%d: Nothing known about the precedence of '%s'" % (file,line,precname))
+            else:
+                self.UsedPrecedence[precname] = 1
+            del syms[-2:]     # Drop %prec from the rule
+        else:
+            # If no %prec, precedence is determined by the rightmost terminal symbol
+            precname = rightmost_terminal(syms,self.Terminals)
+            prodprec = self.Precedence.get(precname,('right',0)) 
+            
+        # See if the rule is already in the rulemap
+        map = "%s -> %s" % (prodname,syms)
+        if map in self.Prodmap:
+            m = self.Prodmap[map]
+            raise GrammarError("%s:%d: Duplicate rule %s. " % (file,line, m) +
+                               "Previous definition at %s:%d" % (m.file, m.line))
+
+        # From this point on, everything is valid.  Create a new Production instance
+        pnumber  = len(self.Productions)
+        if not prodname in self.Nonterminals:
+            self.Nonterminals[prodname] = [ ]
+
+        # Add the production number to Terminals and Nonterminals
+        for t in syms:
+            if t in self.Terminals:
+                self.Terminals[t].append(pnumber)
+            else:
+                if not t in self.Nonterminals:
+                    self.Nonterminals[t] = [ ]
+                self.Nonterminals[t].append(pnumber)
+
+        # Create a production and add it to the list of productions
+        p = Production(pnumber,prodname,syms,prodprec,func,file,line)
+        self.Productions.append(p)
+        self.Prodmap[map] = p
+
+        # Add to the global productions list
+        try:
+            self.Prodnames[prodname].append(p)
+        except KeyError:
+            self.Prodnames[prodname] = [ p ]
+        return 0
+
+    # -----------------------------------------------------------------------------
+    # set_start()
+    #
+    # Sets the starting symbol and creates the augmented grammar.  Production 
+    # rule 0 is S' -> start where start is the start symbol.
+    # -----------------------------------------------------------------------------
+
+    def set_start(self,start=None):
+        if not start:
+            start = self.Productions[1].name
+        if start not in self.Nonterminals:
+            raise GrammarError("start symbol %s undefined" % start)
+        self.Productions[0] = Production(0,"S'",[start])
+        self.Nonterminals[start].append(0)
+        self.Start = start
+
+    # -----------------------------------------------------------------------------
+    # find_unreachable()
+    #
+    # Find all of the nonterminal symbols that can't be reached from the starting
+    # symbol.  Returns a list of nonterminals that can't be reached.
+    # -----------------------------------------------------------------------------
+
+    def find_unreachable(self):
+        
+        # Mark all symbols that are reachable from a symbol s
+        def mark_reachable_from(s):
+            if reachable[s]:
+                # We've already reached symbol s.
+                return
+            reachable[s] = 1
+            for p in self.Prodnames.get(s,[]):
+                for r in p.prod:
+                    mark_reachable_from(r)
+
+        reachable   = { }
+        for s in list(self.Terminals) + list(self.Nonterminals):
+            reachable[s] = 0
+
+        mark_reachable_from( self.Productions[0].prod[0] )
+
+        return [s for s in list(self.Nonterminals)
+                        if not reachable[s]]
+    
+    # -----------------------------------------------------------------------------
+    # infinite_cycles()
+    #
+    # This function looks at the various parsing rules and tries to detect
+    # infinite recursion cycles (grammar rules where there is no possible way
+    # to derive a string of only terminals).
+    # -----------------------------------------------------------------------------
+
+    def infinite_cycles(self):
+        terminates = {}
+
+        # Terminals:
+        for t in self.Terminals:
+            terminates[t] = 1
+
+        terminates['$end'] = 1
+
+        # Nonterminals:
+
+        # Initialize to false:
+        for n in self.Nonterminals:
+            terminates[n] = 0
+
+        # Then propagate termination until no change:
+        while 1:
+            some_change = 0
+            for (n,pl) in self.Prodnames.items():
+                # Nonterminal n terminates iff any of its productions terminates.
+                for p in pl:
+                    # Production p terminates iff all of its rhs symbols terminate.
+                    for s in p.prod:
+                        if not terminates[s]:
+                            # The symbol s does not terminate,
+                            # so production p does not terminate.
+                            p_terminates = 0
+                            break
+                    else:
+                        # didn't break from the loop,
+                        # so every symbol s terminates
+                        # so production p terminates.
+                        p_terminates = 1
+
+                    if p_terminates:
+                        # symbol n terminates!
+                        if not terminates[n]:
+                            terminates[n] = 1
+                            some_change = 1
+                        # Don't need to consider any more productions for this n.
+                        break
+
+            if not some_change:
+                break
+
+        infinite = []
+        for (s,term) in terminates.items():
+            if not term:
+                if not s in self.Prodnames and not s in self.Terminals and s != 'error':
+                    # s is used-but-not-defined, and we've already warned of that,
+                    # so it would be overkill to say that it's also non-terminating.
+                    pass
+                else:
+                    infinite.append(s)
+
+        return infinite
+
+
+    # -----------------------------------------------------------------------------
+    # undefined_symbols()
+    #
+    # Find all symbols that were used the grammar, but not defined as tokens or
+    # grammar rules.  Returns a list of tuples (sym, prod) where sym in the symbol
+    # and prod is the production where the symbol was used. 
+    # -----------------------------------------------------------------------------
+    def undefined_symbols(self):
+        result = []
+        for p in self.Productions:
+            if not p: continue
+
+            for s in p.prod:
+                if not s in self.Prodnames and not s in self.Terminals and s != 'error':
+                    result.append((s,p))
+        return result
+
+    # -----------------------------------------------------------------------------
+    # unused_terminals()
+    #
+    # Find all terminals that were defined, but not used by the grammar.  Returns
+    # a list of all symbols.
+    # -----------------------------------------------------------------------------
+    def unused_terminals(self):
+        unused_tok = []
+        for s,v in self.Terminals.items():
+            if s != 'error' and not v:
+                unused_tok.append(s)
+
+        return unused_tok
+
+    # ------------------------------------------------------------------------------
+    # unused_rules()
+    #
+    # Find all grammar rules that were defined,  but not used (maybe not reachable)
+    # Returns a list of productions.
+    # ------------------------------------------------------------------------------
+
+    def unused_rules(self):
+        unused_prod = []
+        for s,v in self.Nonterminals.items():
+            if not v:
+                p = self.Prodnames[s][0]
+                unused_prod.append(p)
+        return unused_prod
+
+    # -----------------------------------------------------------------------------
+    # unused_precedence()
+    #
+    # Returns a list of tuples (term,precedence) corresponding to precedence
+    # rules that were never used by the grammar.  term is the name of the terminal
+    # on which precedence was applied and precedence is a string such as 'left' or
+    # 'right' corresponding to the type of precedence. 
+    # -----------------------------------------------------------------------------
+
+    def unused_precedence(self):
+        unused = []
+        for termname in self.Precedence:
+            if not (termname in self.Terminals or termname in self.UsedPrecedence):
+                unused.append((termname,self.Precedence[termname][0]))
+                
+        return unused
+
+    # -------------------------------------------------------------------------
+    # _first()
+    #
+    # Compute the value of FIRST1(beta) where beta is a tuple of symbols.
+    #
+    # During execution of compute_first1, the result may be incomplete.
+    # Afterward (e.g., when called from compute_follow()), it will be complete.
+    # -------------------------------------------------------------------------
+    def _first(self,beta):
+
+        # We are computing First(x1,x2,x3,...,xn)
+        result = [ ]
+        for x in beta:
+            x_produces_empty = 0
+
+            # Add all the non-<empty> symbols of First[x] to the result.
+            for f in self.First[x]:
+                if f == '<empty>':
+                    x_produces_empty = 1
+                else:
+                    if f not in result: result.append(f)
+
+            if x_produces_empty:
+                # We have to consider the next x in beta,
+                # i.e. stay in the loop.
+                pass
+            else:
+                # We don't have to consider any further symbols in beta.
+                break
+        else:
+            # There was no 'break' from the loop,
+            # so x_produces_empty was true for all x in beta,
+            # so beta produces empty as well.
+            result.append('<empty>')
+
+        return result
+
+    # -------------------------------------------------------------------------
+    # compute_first()
+    #
+    # Compute the value of FIRST1(X) for all symbols
+    # -------------------------------------------------------------------------
+    def compute_first(self):
+        if self.First:
+            return self.First
+
+        # Terminals:
+        for t in self.Terminals:
+            self.First[t] = [t]
+
+        self.First['$end'] = ['$end']
+
+        # Nonterminals:
+
+        # Initialize to the empty set:
+        for n in self.Nonterminals:
+            self.First[n] = []
+
+        # Then propagate symbols until no change:
+        while 1:
+            some_change = 0
+            for n in self.Nonterminals:
+                for p in self.Prodnames[n]:
+                    for f in self._first(p.prod):
+                        if f not in self.First[n]:
+                            self.First[n].append( f )
+                            some_change = 1
+            if not some_change:
+                break
+        
+        return self.First
+
+    # ---------------------------------------------------------------------
+    # compute_follow()
+    #
+    # Computes all of the follow sets for every non-terminal symbol.  The
+    # follow set is the set of all symbols that might follow a given
+    # non-terminal.  See the Dragon book, 2nd Ed. p. 189.
+    # ---------------------------------------------------------------------
+    def compute_follow(self,start=None):
+        # If already computed, return the result
+        if self.Follow:
+            return self.Follow
+
+        # If first sets not computed yet, do that first.
+        if not self.First:
+            self.compute_first()
+
+        # Add '$end' to the follow list of the start symbol
+        for k in self.Nonterminals:
+            self.Follow[k] = [ ]
+
+        if not start:
+            start = self.Productions[1].name
+
+        self.Follow[start] = [ '$end' ]
+
+        while 1:
+            didadd = 0
+            for p in self.Productions[1:]:
+                # Here is the production set
+                for i in range(len(p.prod)):
+                    B = p.prod[i]
+                    if B in self.Nonterminals:
+                        # Okay. We got a non-terminal in a production
+                        fst = self._first(p.prod[i+1:])
+                        hasempty = 0
+                        for f in fst:
+                            if f != '<empty>' and f not in self.Follow[B]:
+                                self.Follow[B].append(f)
+                                didadd = 1
+                            if f == '<empty>':
+                                hasempty = 1
+                        if hasempty or i == (len(p.prod)-1):
+                            # Add elements of follow(a) to follow(b)
+                            for f in self.Follow[p.name]:
+                                if f not in self.Follow[B]:
+                                    self.Follow[B].append(f)
+                                    didadd = 1
+            if not didadd: break
+        return self.Follow
+
+
+    # -----------------------------------------------------------------------------
+    # build_lritems()
+    #
+    # This function walks the list of productions and builds a complete set of the
+    # LR items.  The LR items are stored in two ways:  First, they are uniquely
+    # numbered and placed in the list _lritems.  Second, a linked list of LR items
+    # is built for each production.  For example:
+    #
+    #   E -> E PLUS E
+    #
+    # Creates the list
+    #
+    #  [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ]
+    # -----------------------------------------------------------------------------
+
+    def build_lritems(self):
+        for p in self.Productions:
+            lastlri = p
+            i = 0
+            lr_items = []
+            while 1:
+                if i > len(p):
+                    lri = None
+                else:
+                    lri = LRItem(p,i)
+                    # Precompute the list of productions immediately following
+                    try:
+                        lri.lr_after = self.Prodnames[lri.prod[i+1]]
+                    except (IndexError,KeyError):
+                        lri.lr_after = []
+                    try:
+                        lri.lr_before = lri.prod[i-1]
+                    except IndexError:
+                        lri.lr_before = None
+
+                lastlri.lr_next = lri
+                if not lri: break
+                lr_items.append(lri)
+                lastlri = lri
+                i += 1
+            p.lr_items = lr_items
+
+# -----------------------------------------------------------------------------
+#                            == Class LRTable ==
+#
+# This basic class represents a basic table of LR parsing information.  
+# Methods for generating the tables are not defined here.  They are defined
+# in the derived class LRGeneratedTable.
+# -----------------------------------------------------------------------------
+
+class VersionError(YaccError): pass
+
+class LRTable(object):
+    def __init__(self):
+        self.lr_action = None
+        self.lr_goto = None
+        self.lr_productions = None
+        self.lr_method = None
+
+    def read_table(self,module):
+        if isinstance(module,types.ModuleType):
+            parsetab = module
+        else:
+            if sys.version_info[0] < 3:
+                exec("import %s as parsetab" % module)
+            else:
+                env = { }
+                exec("import %s as parsetab" % module, env, env)
+                parsetab = env['parsetab']
+
+        if parsetab._tabversion != __tabversion__:
+            raise VersionError("yacc table file version is out of date")
+
+        self.lr_action = parsetab._lr_action
+        self.lr_goto = parsetab._lr_goto
+
+        self.lr_productions = []
+        for p in parsetab._lr_productions:
+            self.lr_productions.append(MiniProduction(*p))
+
+        self.lr_method = parsetab._lr_method
+        return parsetab._lr_signature
+
+    def read_pickle(self,filename):
+        try:
+            import cPickle as pickle
+        except ImportError:
+            import pickle
+
+        in_f = open(filename,"rb")
+
+        tabversion = pickle.load(in_f)
+        if tabversion != __tabversion__:
+            raise VersionError("yacc table file version is out of date")
+        self.lr_method = pickle.load(in_f)
+        signature      = pickle.load(in_f)
+        self.lr_action = pickle.load(in_f)
+        self.lr_goto   = pickle.load(in_f)
+        productions    = pickle.load(in_f)
+
+        self.lr_productions = []
+        for p in productions:
+            self.lr_productions.append(MiniProduction(*p))
+
+        in_f.close()
+        return signature
+
+    # Bind all production function names to callable objects in pdict
+    def bind_callables(self,pdict):
+        for p in self.lr_productions:
+            p.bind(pdict)
+    
+# -----------------------------------------------------------------------------
+#                           === LR Generator ===
+#
+# The following classes and functions are used to generate LR parsing tables on 
+# a grammar.
+# -----------------------------------------------------------------------------
+
+# -----------------------------------------------------------------------------
+# digraph()
+# traverse()
+#
+# The following two functions are used to compute set valued functions
+# of the form:
+#
+#     F(x) = F'(x) U U{F(y) | x R y}
+#
+# This is used to compute the values of Read() sets as well as FOLLOW sets
+# in LALR(1) generation.
+#
+# Inputs:  X    - An input set
+#          R    - A relation
+#          FP   - Set-valued function
+# ------------------------------------------------------------------------------
+
+def digraph(X,R,FP):
+    N = { }
+    for x in X:
+       N[x] = 0
+    stack = []
+    F = { }
+    for x in X:
+        if N[x] == 0: traverse(x,N,stack,F,X,R,FP)
+    return F
+
+def traverse(x,N,stack,F,X,R,FP):
+    stack.append(x)
+    d = len(stack)
+    N[x] = d
+    F[x] = FP(x)             # F(X) <- F'(x)
+
+    rel = R(x)               # Get y's related to x
+    for y in rel:
+        if N[y] == 0:
+             traverse(y,N,stack,F,X,R,FP)
+        N[x] = min(N[x],N[y])
+        for a in F.get(y,[]):
+            if a not in F[x]: F[x].append(a)
+    if N[x] == d:
+       N[stack[-1]] = MAXINT
+       F[stack[-1]] = F[x]
+       element = stack.pop()
+       while element != x:
+           N[stack[-1]] = MAXINT
+           F[stack[-1]] = F[x]
+           element = stack.pop()
+
+class LALRError(YaccError): pass
+
+# -----------------------------------------------------------------------------
+#                             == LRGeneratedTable ==
+#
+# This class implements the LR table generation algorithm.  There are no
+# public methods except for write()
+# -----------------------------------------------------------------------------
+
+class LRGeneratedTable(LRTable):
+    def __init__(self,grammar,method='LALR',log=None):
+        if method not in ['SLR','LALR']:
+            raise LALRError("Unsupported method %s" % method)
+
+        self.grammar = grammar
+        self.lr_method = method
+
+        # Set up the logger
+        if not log:
+            log = NullLogger()
+        self.log = log
+
+        # Internal attributes
+        self.lr_action     = {}        # Action table
+        self.lr_goto       = {}        # Goto table
+        self.lr_productions  = grammar.Productions    # Copy of grammar Production array
+        self.lr_goto_cache = {}        # Cache of computed gotos
+        self.lr0_cidhash   = {}        # Cache of closures
+
+        self._add_count    = 0         # Internal counter used to detect cycles
+
+        # Diagonistic information filled in by the table generator
+        self.sr_conflict   = 0
+        self.rr_conflict   = 0
+        self.conflicts     = []        # List of conflicts
+
+        self.sr_conflicts  = []
+        self.rr_conflicts  = []
+
+        # Build the tables
+        self.grammar.build_lritems()
+        self.grammar.compute_first()
+        self.grammar.compute_follow()
+        self.lr_parse_table()
+
+    # Compute the LR(0) closure operation on I, where I is a set of LR(0) items.
+
+    def lr0_closure(self,I):
+        self._add_count += 1
+
+        # Add everything in I to J
+        J = I[:]
+        didadd = 1
+        while didadd:
+            didadd = 0
+            for j in J:
+                for x in j.lr_after:
+                    if getattr(x,"lr0_added",0) == self._add_count: continue
+                    # Add B --> .G to J
+                    J.append(x.lr_next)
+                    x.lr0_added = self._add_count
+                    didadd = 1
+
+        return J
+
+    # Compute the LR(0) goto function goto(I,X) where I is a set
+    # of LR(0) items and X is a grammar symbol.   This function is written
+    # in a way that guarantees uniqueness of the generated goto sets
+    # (i.e. the same goto set will never be returned as two different Python
+    # objects).  With uniqueness, we can later do fast set comparisons using
+    # id(obj) instead of element-wise comparison.
+
+    def lr0_goto(self,I,x):
+        # First we look for a previously cached entry
+        g = self.lr_goto_cache.get((id(I),x),None)
+        if g: return g
+
+        # Now we generate the goto set in a way that guarantees uniqueness
+        # of the result
+
+        s = self.lr_goto_cache.get(x,None)
+        if not s:
+            s = { }
+            self.lr_goto_cache[x] = s
+
+        gs = [ ]
+        for p in I:
+            n = p.lr_next
+            if n and n.lr_before == x:
+                s1 = s.get(id(n),None)
+                if not s1:
+                    s1 = { }
+                    s[id(n)] = s1
+                gs.append(n)
+                s = s1
+        g = s.get('$end',None)
+        if not g:
+            if gs:
+                g = self.lr0_closure(gs)
+                s['$end'] = g
+            else:
+                s['$end'] = gs
+        self.lr_goto_cache[(id(I),x)] = g
+        return g
+
+    # Compute the LR(0) sets of item function
+    def lr0_items(self):
+
+        C = [ self.lr0_closure([self.grammar.Productions[0].lr_next]) ]
+        i = 0
+        for I in C:
+            self.lr0_cidhash[id(I)] = i
+            i += 1
+
+        # Loop over the items in C and each grammar symbols
+        i = 0
+        while i < len(C):
+            I = C[i]
+            i += 1
+
+            # Collect all of the symbols that could possibly be in the goto(I,X) sets
+            asyms = { }
+            for ii in I:
+                for s in ii.usyms:
+                    asyms[s] = None
+
+            for x in asyms:
+                g = self.lr0_goto(I,x)
+                if not g:  continue
+                if id(g) in self.lr0_cidhash: continue
+                self.lr0_cidhash[id(g)] = len(C)
+                C.append(g)
+
+        return C
+
+    # -----------------------------------------------------------------------------
+    #                       ==== LALR(1) Parsing ====
+    #
+    # LALR(1) parsing is almost exactly the same as SLR except that instead of
+    # relying upon Follow() sets when performing reductions, a more selective
+    # lookahead set that incorporates the state of the LR(0) machine is utilized.
+    # Thus, we mainly just have to focus on calculating the lookahead sets.
+    #
+    # The method used here is due to DeRemer and Pennelo (1982).
+    #
+    # DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1)
+    #     Lookahead Sets", ACM Transactions on Programming Languages and Systems,
+    #     Vol. 4, No. 4, Oct. 1982, pp. 615-649
+    #
+    # Further details can also be found in:
+    #
+    #  J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing",
+    #      McGraw-Hill Book Company, (1985).
+    #
+    # -----------------------------------------------------------------------------
+
+    # -----------------------------------------------------------------------------
+    # compute_nullable_nonterminals()
+    #
+    # Creates a dictionary containing all of the non-terminals that might produce
+    # an empty production.
+    # -----------------------------------------------------------------------------
+
+    def compute_nullable_nonterminals(self):
+        nullable = {}
+        num_nullable = 0
+        while 1:
+           for p in self.grammar.Productions[1:]:
+               if p.len == 0:
+                    nullable[p.name] = 1
+                    continue
+               for t in p.prod:
+                    if not t in nullable: break
+               else:
+                    nullable[p.name] = 1
+           if len(nullable) == num_nullable: break
+           num_nullable = len(nullable)
+        return nullable
+
+    # -----------------------------------------------------------------------------
+    # find_nonterminal_trans(C)
+    #
+    # Given a set of LR(0) items, this functions finds all of the non-terminal
+    # transitions.    These are transitions in which a dot appears immediately before
+    # a non-terminal.   Returns a list of tuples of the form (state,N) where state
+    # is the state number and N is the nonterminal symbol.
+    #
+    # The input C is the set of LR(0) items.
+    # -----------------------------------------------------------------------------
+
+    def find_nonterminal_transitions(self,C):
+         trans = []
+         for state in range(len(C)):
+             for p in C[state]:
+                 if p.lr_index < p.len - 1:
+                      t = (state,p.prod[p.lr_index+1])
+                      if t[1] in self.grammar.Nonterminals:
+                            if t not in trans: trans.append(t)
+             state = state + 1
+         return trans
+
+    # -----------------------------------------------------------------------------
+    # dr_relation()
+    #
+    # Computes the DR(p,A) relationships for non-terminal transitions.  The input
+    # is a tuple (state,N) where state is a number and N is a nonterminal symbol.
+    #
+    # Returns a list of terminals.
+    # -----------------------------------------------------------------------------
+
+    def dr_relation(self,C,trans,nullable):
+        dr_set = { }
+        state,N = trans
+        terms = []
+
+        g = self.lr0_goto(C[state],N)
+        for p in g:
+           if p.lr_index < p.len - 1:
+               a = p.prod[p.lr_index+1]
+               if a in self.grammar.Terminals:
+                   if a not in terms: terms.append(a)
+
+        # This extra bit is to handle the start state
+        if state == 0 and N == self.grammar.Productions[0].prod[0]:
+           terms.append('$end')
+
+        return terms
+
+    # -----------------------------------------------------------------------------
+    # reads_relation()
+    #
+    # Computes the READS() relation (p,A) READS (t,C).
+    # -----------------------------------------------------------------------------
+
+    def reads_relation(self,C, trans, empty):
+        # Look for empty transitions
+        rel = []
+        state, N = trans
+
+        g = self.lr0_goto(C[state],N)
+        j = self.lr0_cidhash.get(id(g),-1)
+        for p in g:
+            if p.lr_index < p.len - 1:
+                 a = p.prod[p.lr_index + 1]
+                 if a in empty:
+                      rel.append((j,a))
+
+        return rel
+
+    # -----------------------------------------------------------------------------
+    # compute_lookback_includes()
+    #
+    # Determines the lookback and includes relations
+    #
+    # LOOKBACK:
+    #
+    # This relation is determined by running the LR(0) state machine forward.
+    # For example, starting with a production "N : . A B C", we run it forward
+    # to obtain "N : A B C ."   We then build a relationship between this final
+    # state and the starting state.   These relationships are stored in a dictionary
+    # lookdict.
+    #
+    # INCLUDES:
+    #
+    # Computes the INCLUDE() relation (p,A) INCLUDES (p',B).
+    #
+    # This relation is used to determine non-terminal transitions that occur
+    # inside of other non-terminal transition states.   (p,A) INCLUDES (p', B)
+    # if the following holds:
+    #
+    #       B -> LAT, where T -> epsilon and p' -L-> p
+    #
+    # L is essentially a prefix (which may be empty), T is a suffix that must be
+    # able to derive an empty string.  State p' must lead to state p with the string L.
+    #
+    # -----------------------------------------------------------------------------
+
+    def compute_lookback_includes(self,C,trans,nullable):
+
+        lookdict = {}          # Dictionary of lookback relations
+        includedict = {}       # Dictionary of include relations
+
+        # Make a dictionary of non-terminal transitions
+        dtrans = {}
+        for t in trans:
+            dtrans[t] = 1
+
+        # Loop over all transitions and compute lookbacks and includes
+        for state,N in trans:
+            lookb = []
+            includes = []
+            for p in C[state]:
+                if p.name != N: continue
+
+                # Okay, we have a name match.  We now follow the production all the way
+                # through the state machine until we get the . on the right hand side
+
+                lr_index = p.lr_index
+                j = state
+                while lr_index < p.len - 1:
+                     lr_index = lr_index + 1
+                     t = p.prod[lr_index]
+
+                     # Check to see if this symbol and state are a non-terminal transition
+                     if (j,t) in dtrans:
+                           # Yes.  Okay, there is some chance that this is an includes relation
+                           # the only way to know for certain is whether the rest of the
+                           # production derives empty
+
+                           li = lr_index + 1
+                           while li < p.len:
+                                if p.prod[li] in self.grammar.Terminals: break      # No forget it
+                                if not p.prod[li] in nullable: break
+                                li = li + 1
+                           else:
+                                # Appears to be a relation between (j,t) and (state,N)
+                                includes.append((j,t))
+
+                     g = self.lr0_goto(C[j],t)               # Go to next set
+                     j = self.lr0_cidhash.get(id(g),-1)     # Go to next state
+
+                # When we get here, j is the final state, now we have to locate the production
+                for r in C[j]:
+                     if r.name != p.name: continue
+                     if r.len != p.len:   continue
+                     i = 0
+                     # This look is comparing a production ". A B C" with "A B C ."
+                     while i < r.lr_index:
+                          if r.prod[i] != p.prod[i+1]: break
+                          i = i + 1
+                     else:
+                          lookb.append((j,r))
+            for i in includes:
+                 if not i in includedict: includedict[i] = []
+                 includedict[i].append((state,N))
+            lookdict[(state,N)] = lookb
+
+        return lookdict,includedict
+
+    # -----------------------------------------------------------------------------
+    # compute_read_sets()
+    #
+    # Given a set of LR(0) items, this function computes the read sets.
+    #
+    # Inputs:  C        =  Set of LR(0) items
+    #          ntrans   = Set of nonterminal transitions
+    #          nullable = Set of empty transitions
+    #
+    # Returns a set containing the read sets
+    # -----------------------------------------------------------------------------
+
+    def compute_read_sets(self,C, ntrans, nullable):
+        FP = lambda x: self.dr_relation(C,x,nullable)
+        R =  lambda x: self.reads_relation(C,x,nullable)
+        F = digraph(ntrans,R,FP)
+        return F
+
+    # -----------------------------------------------------------------------------
+    # compute_follow_sets()
+    #
+    # Given a set of LR(0) items, a set of non-terminal transitions, a readset,
+    # and an include set, this function computes the follow sets
+    #
+    # Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)}
+    #
+    # Inputs:
+    #            ntrans     = Set of nonterminal transitions
+    #            readsets   = Readset (previously computed)
+    #            inclsets   = Include sets (previously computed)
+    #
+    # Returns a set containing the follow sets
+    # -----------------------------------------------------------------------------
+
+    def compute_follow_sets(self,ntrans,readsets,inclsets):
+         FP = lambda x: readsets[x]
+         R  = lambda x: inclsets.get(x,[])
+         F = digraph(ntrans,R,FP)
+         return F
+
+    # -----------------------------------------------------------------------------
+    # add_lookaheads()
+    #
+    # Attaches the lookahead symbols to grammar rules.
+    #
+    # Inputs:    lookbacks         -  Set of lookback relations
+    #            followset         -  Computed follow set
+    #
+    # This function directly attaches the lookaheads to productions contained
+    # in the lookbacks set
+    # -----------------------------------------------------------------------------
+
+    def add_lookaheads(self,lookbacks,followset):
+        for trans,lb in lookbacks.items():
+            # Loop over productions in lookback
+            for state,p in lb:
+                 if not state in p.lookaheads:
+                      p.lookaheads[state] = []
+                 f = followset.get(trans,[])
+                 for a in f:
+                      if a not in p.lookaheads[state]: p.lookaheads[state].append(a)
+
+    # -----------------------------------------------------------------------------
+    # add_lalr_lookaheads()
+    #
+    # This function does all of the work of adding lookahead information for use
+    # with LALR parsing
+    # -----------------------------------------------------------------------------
+
+    def add_lalr_lookaheads(self,C):
+        # Determine all of the nullable nonterminals
+        nullable = self.compute_nullable_nonterminals()
+
+        # Find all non-terminal transitions
+        trans = self.find_nonterminal_transitions(C)
+
+        # Compute read sets
+        readsets = self.compute_read_sets(C,trans,nullable)
+
+        # Compute lookback/includes relations
+        lookd, included = self.compute_lookback_includes(C,trans,nullable)
+
+        # Compute LALR FOLLOW sets
+        followsets = self.compute_follow_sets(trans,readsets,included)
+
+        # Add all of the lookaheads
+        self.add_lookaheads(lookd,followsets)
+
+    # -----------------------------------------------------------------------------
+    # lr_parse_table()
+    #
+    # This function constructs the parse tables for SLR or LALR
+    # -----------------------------------------------------------------------------
+    def lr_parse_table(self):
+        Productions = self.grammar.Productions
+        Precedence  = self.grammar.Precedence
+        goto   = self.lr_goto         # Goto array
+        action = self.lr_action       # Action array
+        log    = self.log             # Logger for output
+
+        actionp = { }                 # Action production array (temporary)
+        
+        log.info("Parsing method: %s", self.lr_method)
+
+        # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items
+        # This determines the number of states
+
+        C = self.lr0_items()
+
+        if self.lr_method == 'LALR':
+            self.add_lalr_lookaheads(C)
+
+        # Build the parser table, state by state
+        st = 0
+        for I in C:
+            # Loop over each production in I
+            actlist = [ ]              # List of actions
+            st_action  = { }
+            st_actionp = { }
+            st_goto    = { }
+            log.info("")
+            log.info("state %d", st)
+            log.info("")
+            for p in I:
+                log.info("    (%d) %s", p.number, str(p))
+            log.info("")
+
+            for p in I:
+                    if p.len == p.lr_index + 1:
+                        if p.name == "S'":
+                            # Start symbol. Accept!
+                            st_action["$end"] = 0
+                            st_actionp["$end"] = p
+                        else:
+                            # We are at the end of a production.  Reduce!
+                            if self.lr_method == 'LALR':
+                                laheads = p.lookaheads[st]
+                            else:
+                                laheads = self.grammar.Follow[p.name]
+                            for a in laheads:
+                                actlist.append((a,p,"reduce using rule %d (%s)" % (p.number,p)))
+                                r = st_action.get(a,None)
+                                if r is not None:
+                                    # Whoa. Have a shift/reduce or reduce/reduce conflict
+                                    if r > 0:
+                                        # Need to decide on shift or reduce here
+                                        # By default we favor shifting. Need to add
+                                        # some precedence rules here.
+                                        sprec,slevel = Productions[st_actionp[a].number].prec
+                                        rprec,rlevel = Precedence.get(a,('right',0))
+                                        if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')):
+                                            # We really need to reduce here.
+                                            st_action[a] = -p.number
+                                            st_actionp[a] = p
+                                            if not slevel and not rlevel:
+                                                log.info("  ! shift/reduce conflict for %s resolved as reduce",a)
+                                                self.sr_conflicts.append((st,a,'reduce'))
+                                            Productions[p.number].reduced += 1
+                                        elif (slevel == rlevel) and (rprec == 'nonassoc'):
+                                            st_action[a] = None
+                                        else:
+                                            # Hmmm. Guess we'll keep the shift
+                                            if not rlevel:
+                                                log.info("  ! shift/reduce conflict for %s resolved as shift",a)
+                                                self.sr_conflicts.append((st,a,'shift'))
+                                    elif r < 0:
+                                        # Reduce/reduce conflict.   In this case, we favor the rule
+                                        # that was defined first in the grammar file
+                                        oldp = Productions[-r]
+                                        pp = Productions[p.number]
+                                        if oldp.line > pp.line:
+                                            st_action[a] = -p.number
+                                            st_actionp[a] = p
+                                            chosenp,rejectp = pp,oldp
+                                            Productions[p.number].reduced += 1
+                                            Productions[oldp.number].reduced -= 1
+                                        else:
+                                            chosenp,rejectp = oldp,pp
+                                        self.rr_conflicts.append((st,chosenp,rejectp))
+                                        log.info("  ! reduce/reduce conflict for %s resolved using rule %d (%s)", a,st_actionp[a].number, st_actionp[a])
+                                    else:
+                                        raise LALRError("Unknown conflict in state %d" % st)
+                                else:
+                                    st_action[a] = -p.number
+                                    st_actionp[a] = p
+                                    Productions[p.number].reduced += 1
+                    else:
+                        i = p.lr_index
+                        a = p.prod[i+1]       # Get symbol right after the "."
+                        if a in self.grammar.Terminals:
+                            g = self.lr0_goto(I,a)
+                            j = self.lr0_cidhash.get(id(g),-1)
+                            if j >= 0:
+                                # We are in a shift state
+                                actlist.append((a,p,"shift and go to state %d" % j))
+                                r = st_action.get(a,None)
+                                if r is not None:
+                                    # Whoa have a shift/reduce or shift/shift conflict
+                                    if r > 0:
+                                        if r != j:
+                                            raise LALRError("Shift/shift conflict in state %d" % st)
+                                    elif r < 0:
+                                        # Do a precedence check.
+                                        #   -  if precedence of reduce rule is higher, we reduce.
+                                        #   -  if precedence of reduce is same and left assoc, we reduce.
+                                        #   -  otherwise we shift
+                                        rprec,rlevel = Productions[st_actionp[a].number].prec
+                                        sprec,slevel = Precedence.get(a,('right',0))
+                                        if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')):
+                                            # We decide to shift here... highest precedence to shift
+                                            Productions[st_actionp[a].number].reduced -= 1
+                                            st_action[a] = j
+                                            st_actionp[a] = p
+                                            if not rlevel:
+                                                log.info("  ! shift/reduce conflict for %s resolved as shift",a)
+                                                self.sr_conflicts.append((st,a,'shift'))
+                                        elif (slevel == rlevel) and (rprec == 'nonassoc'):
+                                            st_action[a] = None
+                                        else:
+                                            # Hmmm. Guess we'll keep the reduce
+                                            if not slevel and not rlevel:
+                                                log.info("  ! shift/reduce conflict for %s resolved as reduce",a)
+                                                self.sr_conflicts.append((st,a,'reduce'))
+
+                                    else:
+                                        raise LALRError("Unknown conflict in state %d" % st)
+                                else:
+                                    st_action[a] = j
+                                    st_actionp[a] = p
+
+            # Print the actions associated with each terminal
+            _actprint = { }
+            for a,p,m in actlist:
+                if a in st_action:
+                    if p is st_actionp[a]:
+                        log.info("    %-15s %s",a,m)
+                        _actprint[(a,m)] = 1
+            log.info("")
+            # Print the actions that were not used. (debugging)
+            not_used = 0
+            for a,p,m in actlist:
+                if a in st_action:
+                    if p is not st_actionp[a]:
+                        if not (a,m) in _actprint:
+                            log.debug("  ! %-15s [ %s ]",a,m)
+                            not_used = 1
+                            _actprint[(a,m)] = 1
+            if not_used:
+                log.debug("")
+
+            # Construct the goto table for this state
+
+            nkeys = { }
+            for ii in I:
+                for s in ii.usyms:
+                    if s in self.grammar.Nonterminals:
+                        nkeys[s] = None
+            for n in nkeys:
+                g = self.lr0_goto(I,n)
+                j = self.lr0_cidhash.get(id(g),-1)
+                if j >= 0:
+                    st_goto[n] = j
+                    log.info("    %-30s shift and go to state %d",n,j)
+
+            action[st] = st_action
+            actionp[st] = st_actionp
+            goto[st] = st_goto
+            st += 1
+
+
+    # -----------------------------------------------------------------------------
+    # write()
+    #
+    # This function writes the LR parsing tables to a file
+    # -----------------------------------------------------------------------------
+
+    def write_table(self,modulename,outputdir='',signature=""):
+        basemodulename = modulename.split(".")[-1]
+        filename = os.path.join(outputdir,basemodulename) + ".py"
+        try:
+            f = open(filename,"w")
+
+            f.write("""
+# %s
+# This file is automatically generated. Do not edit.
+_tabversion = %r
+
+_lr_method = %r
+
+_lr_signature = %r
+    """ % (filename, __tabversion__, self.lr_method, signature))
+
+            # Change smaller to 0 to go back to original tables
+            smaller = 1
+
+            # Factor out names to try and make smaller
+            if smaller:
+                items = { }
+
+                for s,nd in self.lr_action.items():
+                   for name,v in nd.items():
+                      i = items.get(name)
+                      if not i:
+                         i = ([],[])
+                         items[name] = i
+                      i[0].append(s)
+                      i[1].append(v)
+
+                f.write("\n_lr_action_items = {")
+                for k,v in items.items():
+                    f.write("%r:([" % k)
+                    for i in v[0]:
+                        f.write("%r," % i)
+                    f.write("],[")
+                    for i in v[1]:
+                        f.write("%r," % i)
+
+                    f.write("]),")
+                f.write("}\n")
+
+                f.write("""
+_lr_action = { }
+for _k, _v in _lr_action_items.items():
+   for _x,_y in zip(_v[0],_v[1]):
+      if not _x in _lr_action:  _lr_action[_x] = { }
+      _lr_action[_x][_k] = _y
+del _lr_action_items
+""")
+
+            else:
+                f.write("\n_lr_action = { ");
+                for k,v in self.lr_action.items():
+                    f.write("(%r,%r):%r," % (k[0],k[1],v))
+                f.write("}\n");
+
+            if smaller:
+                # Factor out names to try and make smaller
+                items = { }
+
+                for s,nd in self.lr_goto.items():
+                   for name,v in nd.items():
+                      i = items.get(name)
+                      if not i:
+                         i = ([],[])
+                         items[name] = i
+                      i[0].append(s)
+                      i[1].append(v)
+
+                f.write("\n_lr_goto_items = {")
+                for k,v in items.items():
+                    f.write("%r:([" % k)
+                    for i in v[0]:
+                        f.write("%r," % i)
+                    f.write("],[")
+                    for i in v[1]:
+                        f.write("%r," % i)
+
+                    f.write("]),")
+                f.write("}\n")
+
+                f.write("""
+_lr_goto = { }
+for _k, _v in _lr_goto_items.items():
+   for _x,_y in zip(_v[0],_v[1]):
+       if not _x in _lr_goto: _lr_goto[_x] = { }
+       _lr_goto[_x][_k] = _y
+del _lr_goto_items
+""")
+            else:
+                f.write("\n_lr_goto = { ");
+                for k,v in self.lr_goto.items():
+                    f.write("(%r,%r):%r," % (k[0],k[1],v))
+                f.write("}\n");
+
+            # Write production table
+            f.write("_lr_productions = [\n")
+            for p in self.lr_productions:
+                if p.func:
+                    f.write("  (%r,%r,%d,%r,%r,%d),\n" % (p.str,p.name, p.len, p.func,p.file,p.line))
+                else:
+                    f.write("  (%r,%r,%d,None,None,None),\n" % (str(p),p.name, p.len))
+            f.write("]\n")
+            f.close()
+
+        except IOError:
+            e = sys.exc_info()[1]
+            sys.stderr.write("Unable to create '%s'\n" % filename)
+            sys.stderr.write(str(e)+"\n")
+            return
+
+
+    # -----------------------------------------------------------------------------
+    # pickle_table()
+    #
+    # This function pickles the LR parsing tables to a supplied file object
+    # -----------------------------------------------------------------------------
+
+    def pickle_table(self,filename,signature=""):
+        try:
+            import cPickle as pickle
+        except ImportError:
+            import pickle
+        outf = open(filename,"wb")
+        pickle.dump(__tabversion__,outf,pickle_protocol)
+        pickle.dump(self.lr_method,outf,pickle_protocol)
+        pickle.dump(signature,outf,pickle_protocol)
+        pickle.dump(self.lr_action,outf,pickle_protocol)
+        pickle.dump(self.lr_goto,outf,pickle_protocol)
+
+        outp = []
+        for p in self.lr_productions:
+            if p.func:
+                outp.append((p.str,p.name, p.len, p.func,p.file,p.line))
+            else:
+                outp.append((str(p),p.name,p.len,None,None,None))
+        pickle.dump(outp,outf,pickle_protocol)
+        outf.close()
+
+# -----------------------------------------------------------------------------
+#                            === INTROSPECTION ===
+#
+# The following functions and classes are used to implement the PLY
+# introspection features followed by the yacc() function itself.
+# -----------------------------------------------------------------------------
+
+# -----------------------------------------------------------------------------
+# get_caller_module_dict()
+#
+# This function returns a dictionary containing all of the symbols defined within
+# a caller further down the call stack.  This is used to get the environment
+# associated with the yacc() call if none was provided.
+# -----------------------------------------------------------------------------
+
+def get_caller_module_dict(levels):
+    try:
+        raise RuntimeError
+    except RuntimeError:
+        e,b,t = sys.exc_info()
+        f = t.tb_frame
+        while levels > 0:
+            f = f.f_back                   
+            levels -= 1
+        ldict = f.f_globals.copy()
+        if f.f_globals != f.f_locals:
+            ldict.update(f.f_locals)
+
+        return ldict
+
+# -----------------------------------------------------------------------------
+# parse_grammar()
+#
+# This takes a raw grammar rule string and parses it into production data
+# -----------------------------------------------------------------------------
+def parse_grammar(doc,file,line):
+    grammar = []
+    # Split the doc string into lines
+    pstrings = doc.splitlines()
+    lastp = None
+    dline = line
+    for ps in pstrings:
+        dline += 1
+        p = ps.split()
+        if not p: continue
+        try:
+            if p[0] == '|':
+                # This is a continuation of a previous rule
+                if not lastp:
+                    raise SyntaxError("%s:%d: Misplaced '|'" % (file,dline))
+                prodname = lastp
+                syms = p[1:]
+            else:
+                prodname = p[0]
+                lastp = prodname
+                syms   = p[2:]
+                assign = p[1]
+                if assign != ':' and assign != '::=':
+                    raise SyntaxError("%s:%d: Syntax error. Expected ':'" % (file,dline))
+
+            grammar.append((file,dline,prodname,syms))
+        except SyntaxError:
+            raise
+        except Exception:
+            raise SyntaxError("%s:%d: Syntax error in rule '%s'" % (file,dline,ps.strip()))
+
+    return grammar
+
+# -----------------------------------------------------------------------------
+# ParserReflect()
+#
+# This class represents information extracted for building a parser including
+# start symbol, error function, tokens, precedence list, action functions,
+# etc.
+# -----------------------------------------------------------------------------
+class ParserReflect(object):
+    def __init__(self,pdict,log=None):
+        self.pdict      = pdict
+        self.start      = None
+        self.error_func = None
+        self.tokens     = None
+        self.files      = {}
+        self.grammar    = []
+        self.error      = 0
+
+        if log is None:
+            self.log = PlyLogger(sys.stderr)
+        else:
+            self.log = log
+
+    # Get all of the basic information
+    def get_all(self):
+        self.get_start()
+        self.get_error_func()
+        self.get_tokens()
+        self.get_precedence()
+        self.get_pfunctions()
+        
+    # Validate all of the information
+    def validate_all(self):
+        self.validate_start()
+        self.validate_error_func()
+        self.validate_tokens()
+        self.validate_precedence()
+        self.validate_pfunctions()
+        self.validate_files()
+        return self.error
+
+    # Compute a signature over the grammar
+    def signature(self):
+        try:
+            from hashlib import md5
+        except ImportError:
+            from md5 import md5
+        try:
+            sig = md5()
+            if self.start:
+                sig.update(self.start.encode('latin-1'))
+            if self.prec:
+                sig.update("".join(["".join(p) for p in self.prec]).encode('latin-1'))
+            if self.tokens:
+                sig.update(" ".join(self.tokens).encode('latin-1'))
+            for f in self.pfuncs:
+                if f[3]:
+                    sig.update(f[3].encode('latin-1'))
+        except (TypeError,ValueError):
+            pass
+        return sig.digest()
+
+    # -----------------------------------------------------------------------------
+    # validate_file()
+    #
+    # This method checks to see if there are duplicated p_rulename() functions
+    # in the parser module file.  Without this function, it is really easy for
+    # users to make mistakes by cutting and pasting code fragments (and it's a real
+    # bugger to try and figure out why the resulting parser doesn't work).  Therefore,
+    # we just do a little regular expression pattern matching of def statements
+    # to try and detect duplicates.
+    # -----------------------------------------------------------------------------
+
+    def validate_files(self):
+        # Match def p_funcname(
+        fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(')
+
+        for filename in self.files.keys():
+            base,ext = os.path.splitext(filename)
+            if ext != '.py': return 1          # No idea. Assume it's okay.
+
+            try:
+                f = open(filename)
+                lines = f.readlines()
+                f.close()
+            except IOError:
+                continue
+
+            counthash = { }
+            for linen,l in enumerate(lines):
+                linen += 1
+                m = fre.match(l)
+                if m:
+                    name = m.group(1)
+                    prev = counthash.get(name)
+                    if not prev:
+                        counthash[name] = linen
+                    else:
+                        self.log.warning("%s:%d: Function %s redefined. Previously defined on line %d", filename,linen,name,prev)
+
+    # Get the start symbol
+    def get_start(self):
+        self.start = self.pdict.get('start')
+
+    # Validate the start symbol
+    def validate_start(self):
+        if self.start is not None:
+            if not isinstance(self.start,str):
+                self.log.error("'start' must be a string")
+
+    # Look for error handler
+    def get_error_func(self):
+        self.error_func = self.pdict.get('p_error')
+
+    # Validate the error function
+    def validate_error_func(self):
+        if self.error_func:
+            if isinstance(self.error_func,types.FunctionType):
+                ismethod = 0
+            elif isinstance(self.error_func, types.MethodType):
+                ismethod = 1
+            else:
+                self.log.error("'p_error' defined, but is not a function or method")
+                self.error = 1
+                return
+
+            eline = func_code(self.error_func).co_firstlineno
+            efile = func_code(self.error_func).co_filename
+            self.files[efile] = 1
+
+            if (func_code(self.error_func).co_argcount != 1+ismethod):
+                self.log.error("%s:%d: p_error() requires 1 argument",efile,eline)
+                self.error = 1
+
+    # Get the tokens map
+    def get_tokens(self):
+        tokens = self.pdict.get("tokens",None)
+        if not tokens:
+            self.log.error("No token list is defined")
+            self.error = 1
+            return
+
+        if not isinstance(tokens,(list, tuple)):
+            self.log.error("tokens must be a list or tuple")
+            self.error = 1
+            return
+        
+        if not tokens:
+            self.log.error("tokens is empty")
+            self.error = 1
+            return
+
+        self.tokens = tokens
+
+    # Validate the tokens
+    def validate_tokens(self):
+        # Validate the tokens.
+        if 'error' in self.tokens:
+            self.log.error("Illegal token name 'error'. Is a reserved word")
+            self.error = 1
+            return
+
+        terminals = {}
+        for n in self.tokens:
+            if n in terminals:
+                self.log.warning("Token '%s' multiply defined", n)
+            terminals[n] = 1
+
+    # Get the precedence map (if any)
+    def get_precedence(self):
+        self.prec = self.pdict.get("precedence",None)
+
+    # Validate and parse the precedence map
+    def validate_precedence(self):
+        preclist = []
+        if self.prec:
+            if not isinstance(self.prec,(list,tuple)):
+                self.log.error("precedence must be a list or tuple")
+                self.error = 1
+                return
+            for level,p in enumerate(self.prec):
+                if not isinstance(p,(list,tuple)):
+                    self.log.error("Bad precedence table")
+                    self.error = 1
+                    return
+
+                if len(p) < 2:
+                    self.log.error("Malformed precedence entry %s. Must be (assoc, term, ..., term)",p)
+                    self.error = 1
+                    return
+                assoc = p[0]
+                if not isinstance(assoc,str):
+                    self.log.error("precedence associativity must be a string")
+                    self.error = 1
+                    return
+                for term in p[1:]:
+                    if not isinstance(term,str):
+                        self.log.error("precedence items must be strings")
+                        self.error = 1
+                        return
+                    preclist.append((term,assoc,level+1))
+        self.preclist = preclist
+
+    # Get all p_functions from the grammar
+    def get_pfunctions(self):
+        p_functions = []
+        for name, item in self.pdict.items():
+            if name[:2] != 'p_': continue
+            if name == 'p_error': continue
+            if isinstance(item,(types.FunctionType,types.MethodType)):
+                line = func_code(item).co_firstlineno
+                file = func_code(item).co_filename
+                p_functions.append((line,file,name,item.__doc__))
+
+        # Sort all of the actions by line number
+        p_functions.sort()
+        self.pfuncs = p_functions
+
+
+    # Validate all of the p_functions
+    def validate_pfunctions(self):
+        grammar = []
+        # Check for non-empty symbols
+        if len(self.pfuncs) == 0:
+            self.log.error("no rules of the form p_rulename are defined")
+            self.error = 1
+            return 
+        
+        for line, file, name, doc in self.pfuncs:
+            func = self.pdict[name]
+            if isinstance(func, types.MethodType):
+                reqargs = 2
+            else:
+                reqargs = 1
+            if func_code(func).co_argcount > reqargs:
+                self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,func.__name__)
+                self.error = 1
+            elif func_code(func).co_argcount < reqargs:
+                self.log.error("%s:%d: Rule '%s' requires an argument",file,line,func.__name__)
+                self.error = 1
+            elif not func.__doc__:
+                self.log.warning("%s:%d: No documentation string specified in function '%s' (ignored)",file,line,func.__name__)
+            else:
+                try:
+                    parsed_g = parse_grammar(doc,file,line)
+                    for g in parsed_g:
+                        grammar.append((name, g))
+                except SyntaxError:
+                    e = sys.exc_info()[1]
+                    self.log.error(str(e))
+                    self.error = 1
+
+                # Looks like a valid grammar rule
+                # Mark the file in which defined.
+                self.files[file] = 1
+
+        # Secondary validation step that looks for p_ definitions that are not functions
+        # or functions that look like they might be grammar rules.
+
+        for n,v in self.pdict.items():
+            if n[0:2] == 'p_' and isinstance(v, (types.FunctionType, types.MethodType)): continue
+            if n[0:2] == 't_': continue
+            if n[0:2] == 'p_' and n != 'p_error':
+                self.log.warning("'%s' not defined as a function", n)
+            if ((isinstance(v,types.FunctionType) and func_code(v).co_argcount == 1) or
+                (isinstance(v,types.MethodType) and func_code(v).co_argcount == 2)):
+                try:
+                    doc = v.__doc__.split(" ")
+                    if doc[1] == ':':
+                        self.log.warning("%s:%d: Possible grammar rule '%s' defined without p_ prefix",
+                                         func_code(v).co_filename, func_code(v).co_firstlineno,n)
+                except Exception:
+                    pass
+
+        self.grammar = grammar
+
+# -----------------------------------------------------------------------------
+# yacc(module)
+#
+# Build a parser
+# -----------------------------------------------------------------------------
+
+def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None, 
+         check_recursion=1, optimize=0, write_tables=1, debugfile=debug_file,outputdir='',
+         debuglog=None, errorlog = None, picklefile=None):
+
+    global parse                 # Reference to the parsing method of the last built parser
+
+    # If pickling is enabled, table files are not created
+
+    if picklefile:
+        write_tables = 0
+
+    if errorlog is None:
+        errorlog = PlyLogger(sys.stderr)
+
+    # Get the module dictionary used for the parser
+    if module:
+        _items = [(k,getattr(module,k)) for k in dir(module)]
+        pdict = dict(_items)
+    else:
+        pdict = get_caller_module_dict(2)
+
+    # Collect parser information from the dictionary
+    pinfo = ParserReflect(pdict,log=errorlog)
+    pinfo.get_all()
+
+    if pinfo.error:
+        raise YaccError("Unable to build parser")
+
+    # Check signature against table files (if any)
+    signature = pinfo.signature()
+
+    # Read the tables
+    try:
+        lr = LRTable()
+        if picklefile:
+            read_signature = lr.read_pickle(picklefile)
+        else:
+            read_signature = lr.read_table(tabmodule)
+        if optimize or (read_signature == signature):
+            try:
+                lr.bind_callables(pinfo.pdict)
+                parser = LRParser(lr,pinfo.error_func)
+                parse = parser.parse
+                return parser
+            except Exception:
+                e = sys.exc_info()[1]
+                errorlog.warning("There was a problem loading the table file: %s", repr(e))
+    except VersionError:
+        e = sys.exc_info()
+        errorlog.warning(str(e))
+    except Exception:
+        pass
+
+    if debuglog is None:
+        if debug:
+            debuglog = PlyLogger(open(debugfile,"w"))
+        else:
+            debuglog = NullLogger()
+
+    debuglog.info("Created by PLY version %s (http://www.dabeaz.com/ply)", __version__)
+
+
+    errors = 0
+
+    # Validate the parser information
+    if pinfo.validate_all():
+        raise YaccError("Unable to build parser")
+    
+    if not pinfo.error_func:
+        errorlog.warning("no p_error() function is defined")
+
+    # Create a grammar object
+    grammar = Grammar(pinfo.tokens)
+
+    # Set precedence level for terminals
+    for term, assoc, level in pinfo.preclist:
+        try:
+            grammar.set_precedence(term,assoc,level)
+        except GrammarError:
+            e = sys.exc_info()[1]
+            errorlog.warning("%s",str(e))
+
+    # Add productions to the grammar
+    for funcname, gram in pinfo.grammar:
+        file, line, prodname, syms = gram
+        try:
+            grammar.add_production(prodname,syms,funcname,file,line)
+        except GrammarError:
+            e = sys.exc_info()[1]
+            errorlog.error("%s",str(e))
+            errors = 1
+
+    # Set the grammar start symbols
+    try:
+        if start is None:
+            grammar.set_start(pinfo.start)
+        else:
+            grammar.set_start(start)
+    except GrammarError:
+        e = sys.exc_info()[1]
+        errorlog.error(str(e))
+        errors = 1
+
+    if errors:
+        raise YaccError("Unable to build parser")
+
+    # Verify the grammar structure
+    undefined_symbols = grammar.undefined_symbols()
+    for sym, prod in undefined_symbols:
+        errorlog.error("%s:%d: Symbol '%s' used, but not defined as a token or a rule",prod.file,prod.line,sym)
+        errors = 1
+
+    unused_terminals = grammar.unused_terminals()
+    if unused_terminals:
+        debuglog.info("")
+        debuglog.info("Unused terminals:")
+        debuglog.info("")
+        for term in unused_terminals:
+            errorlog.warning("Token '%s' defined, but not used", term)
+            debuglog.info("    %s", term)
+
+    # Print out all productions to the debug log
+    if debug:
+        debuglog.info("")
+        debuglog.info("Grammar")
+        debuglog.info("")
+        for n,p in enumerate(grammar.Productions):
+            debuglog.info("Rule %-5d %s", n, p)
+
+    # Find unused non-terminals
+    unused_rules = grammar.unused_rules()
+    for prod in unused_rules:
+        errorlog.warning("%s:%d: Rule '%s' defined, but not used", prod.file, prod.line, prod.name)
+
+    if len(unused_terminals) == 1:
+        errorlog.warning("There is 1 unused token")
+    if len(unused_terminals) > 1:
+        errorlog.warning("There are %d unused tokens", len(unused_terminals))
+
+    if len(unused_rules) == 1:
+        errorlog.warning("There is 1 unused rule")
+    if len(unused_rules) > 1:
+        errorlog.warning("There are %d unused rules", len(unused_rules))
+
+    if debug:
+        debuglog.info("")
+        debuglog.info("Terminals, with rules where they appear")
+        debuglog.info("")
+        terms = list(grammar.Terminals)
+        terms.sort()
+        for term in terms:
+            debuglog.info("%-20s : %s", term, " ".join([str(s) for s in grammar.Terminals[term]]))
+        
+        debuglog.info("")
+        debuglog.info("Nonterminals, with rules where they appear")
+        debuglog.info("")
+        nonterms = list(grammar.Nonterminals)
+        nonterms.sort()
+        for nonterm in nonterms:
+            debuglog.info("%-20s : %s", nonterm, " ".join([str(s) for s in grammar.Nonterminals[nonterm]]))
+        debuglog.info("")
+
+    if check_recursion:
+        unreachable = grammar.find_unreachable()
+        for u in unreachable:
+            errorlog.warning("Symbol '%s' is unreachable",u)
+
+        infinite = grammar.infinite_cycles()
+        for inf in infinite:
+            errorlog.error("Infinite recursion detected for symbol '%s'", inf)
+            errors = 1
+        
+    unused_prec = grammar.unused_precedence()
+    for term, assoc in unused_prec:
+        errorlog.error("Precedence rule '%s' defined for unknown symbol '%s'", assoc, term)
+        errors = 1
+
+    if errors:
+        raise YaccError("Unable to build parser")
+    
+    # Run the LRGeneratedTable on the grammar
+    if debug:
+        errorlog.debug("Generating %s tables", method)
+            
+    lr = LRGeneratedTable(grammar,method,debuglog)
+
+    if debug:
+        num_sr = len(lr.sr_conflicts)
+
+        # Report shift/reduce and reduce/reduce conflicts
+        if num_sr == 1:
+            errorlog.warning("1 shift/reduce conflict")
+        elif num_sr > 1:
+            errorlog.warning("%d shift/reduce conflicts", num_sr)
+
+        num_rr = len(lr.rr_conflicts)
+        if num_rr == 1:
+            errorlog.warning("1 reduce/reduce conflict")
+        elif num_rr > 1:
+            errorlog.warning("%d reduce/reduce conflicts", num_rr)
+
+    # Write out conflicts to the output file
+    if debug and (lr.sr_conflicts or lr.rr_conflicts):
+        debuglog.warning("")
+        debuglog.warning("Conflicts:")
+        debuglog.warning("")
+
+        for state, tok, resolution in lr.sr_conflicts:
+            debuglog.warning("shift/reduce conflict for %s in state %d resolved as %s",  tok, state, resolution)
+        
+        already_reported = {}
+        for state, rule, rejected in lr.rr_conflicts:
+            if (state,id(rule),id(rejected)) in already_reported:
+                continue
+            debuglog.warning("reduce/reduce conflict in state %d resolved using rule (%s)", state, rule)
+            debuglog.warning("rejected rule (%s) in state %d", rejected,state)
+            errorlog.warning("reduce/reduce conflict in state %d resolved using rule (%s)", state, rule)
+            errorlog.warning("rejected rule (%s) in state %d", rejected, state)
+            already_reported[state,id(rule),id(rejected)] = 1
+        
+        warned_never = []
+        for state, rule, rejected in lr.rr_conflicts:
+            if not rejected.reduced and (rejected not in warned_never):
+                debuglog.warning("Rule (%s) is never reduced", rejected)
+                errorlog.warning("Rule (%s) is never reduced", rejected)
+                warned_never.append(rejected)
+
+    # Write the table file if requested
+    if write_tables:
+        lr.write_table(tabmodule,outputdir,signature)
+
+    # Write a pickled version of the tables
+    if picklefile:
+        lr.pickle_table(picklefile,signature)
+
+    # Build the parser
+    lr.bind_callables(pinfo.pdict)
+    parser = LRParser(lr,pinfo.error_func)
+
+    parse = parser.parse
+    return parser
diff --git a/tools/slimit/scope.py b/tools/slimit/scope.py
new file mode 100644 (file)
index 0000000..4cac367
--- /dev/null
@@ -0,0 +1,185 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+import itertools
+
+try:
+    from collections import OrderedDict
+except ImportError:
+    from odict import odict as OrderedDict
+
+from slimit.lexer import Lexer
+
+
+ID_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+
+def powerset(iterable):
+    """powerset('abc') -> a b c ab ac bc abc"""
+    s = list(iterable)
+    for chars in itertools.chain.from_iterable(
+        itertools.combinations(s, r) for r in range(1, len(s)+1)
+        ):
+        yield ''.join(chars)
+
+
+class SymbolTable(object):
+    def __init__(self):
+        self.globals = GlobalScope()
+
+
+class Scope(object):
+
+    def __init__(self, enclosing_scope=None):
+        self.symbols = OrderedDict()
+        # {symbol.name: mangled_name}
+        self.mangled = {}
+        # {mangled_name: symbol.name}
+        self.rev_mangled = {}
+        # names referenced from this scope and all sub-scopes
+        # {name: scope} key is the name, value is the scope that
+        # contains referenced name
+        self.refs = {}
+        # set to True if this scope or any subscope contains 'eval'
+        self.has_eval = False
+        # set to True if this scope or any subscope contains 'wit
+        self.has_with = False
+        self.enclosing_scope = enclosing_scope
+        # sub-scopes
+        self.children = []
+        # add ourselves as a child to the enclosing scope
+        if enclosing_scope is not None:
+            self.enclosing_scope.add_child(self)
+        self.base54 = powerset(ID_CHARS)
+
+    def __contains__(self, sym):
+        return sym.name in self.symbols
+
+    def add_child(self, scope):
+        self.children.append(scope)
+
+    def define(self, sym):
+        self.symbols[sym.name] = sym
+        # track scope for every symbol
+        sym.scope = self
+
+    def resolve(self, name):
+        sym = self.symbols.get(name)
+        if sym is not None:
+            return sym
+        elif self.enclosing_scope is not None:
+            return self.enclosing_scope.resolve(name)
+
+    def get_enclosing_scope(self):
+        return self.enclosing_scope
+
+    def _get_scope_with_mangled(self, name):
+        """Return a scope containing passed mangled name."""
+        scope = self
+        while True:
+            parent = scope.get_enclosing_scope()
+            if parent is None:
+                return
+
+            if name in parent.rev_mangled:
+                return parent
+
+            scope = parent
+
+    def _get_scope_with_symbol(self, name):
+        """Return a scope containing passed name as a symbol name."""
+        scope = self
+        while True:
+            parent = scope.get_enclosing_scope()
+            if parent is None:
+                return
+
+            if name in parent.symbols:
+                return parent
+
+            scope = parent
+
+    def get_next_mangled_name(self):
+        """
+        1. Do not shadow a mangled name from a parent scope
+           if we reference the original name from that scope
+           in this scope or any sub-scope.
+
+        2. Do not shadow an original name from a parent scope
+           if it's not mangled and we reference it in this scope
+           or any sub-scope.
+
+        """
+        while True:
+            mangled = self.base54.next()
+
+            # case 1
+            ancestor = self._get_scope_with_mangled(mangled)
+            if (ancestor is not None
+                and self.refs.get(ancestor.rev_mangled[mangled]) is ancestor
+                ):
+                continue
+
+            # case 2
+            ancestor = self._get_scope_with_symbol(mangled)
+            if (ancestor is not None
+                and self.refs.get(mangled) is ancestor
+                and mangled not in ancestor.mangled
+                ):
+                continue
+
+            # make sure a new mangled name is not a reserved word
+            if mangled.upper() in Lexer.keywords:
+                continue
+
+            return mangled
+
+
+class GlobalScope(Scope):
+    pass
+
+
+class LocalScope(Scope):
+    pass
+
+
+class Symbol(object):
+    def __init__(self, name):
+        self.name = name
+        self.scope = None
+
+
+class VarSymbol(Symbol):
+    pass
+
+
+class FuncSymbol(Symbol, Scope):
+    """Function symbol is both a symbol and a scope for arguments."""
+
+    def __init__(self, name, enclosing_scope):
+        Symbol.__init__(self, name)
+        Scope.__init__(self, enclosing_scope)
+
+
diff --git a/tools/slimit/tests/__init__.py b/tools/slimit/tests/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tools/slimit/tests/test_cmd.py b/tools/slimit/tests/test_cmd.py
new file mode 100644 (file)
index 0000000..ebb8ef5
--- /dev/null
@@ -0,0 +1,113 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+import os
+import sys
+import StringIO
+import tempfile
+import unittest
+
+from contextlib import contextmanager
+
+
+@contextmanager
+def redirected_input_output(input=''):
+    old_inp, old_out = sys.stdin, sys.stdout
+    inp, out = StringIO.StringIO(input), StringIO.StringIO()
+    sys.stdin, sys.stdout = inp, out
+    try:
+        yield out
+    finally:
+        sys.stdin, sys.stdout = old_inp, old_out
+
+
+@contextmanager
+def redirected_sys_argv(argv):
+    old = sys.argv
+    sys.argv = argv
+    try:
+        yield argv
+    finally:
+        sys.argv = old
+
+
+class CmdTestCase(unittest.TestCase):
+
+    def setUp(self):
+        fd, path = tempfile.mkstemp()
+        self.path = path
+        with os.fdopen(fd, 'w') as fout:
+            fout.write('var global = 5;')
+
+    def tearDown(self):
+        os.remove(self.path)
+
+    def test_main_dash_m_with_input_file(self):
+        from slimit.minifier import main
+        out = StringIO.StringIO()
+        main(['-m', '-t', self.path], out=out)
+        self.assertEqual('var a=5;', out.getvalue())
+
+    def test_main_dash_dash_mangle_with_input_file(self):
+        from slimit.minifier import main
+        out = StringIO.StringIO()
+        main(['--mangle', '--mangle-toplevel', self.path], out=out)
+        self.assertEqual('var a=5;', out.getvalue())
+
+    def test_main_dash_m_with_mock_stdin(self):
+        from slimit.minifier import main
+        out = StringIO.StringIO()
+        inp = StringIO.StringIO('function foo() { var local = 5; }')
+        main(['-m'], inp=inp, out=out)
+        self.assertEqual('function foo(){var a=5;}', out.getvalue())
+
+    def test_main_stdin_stdout(self):
+        # slimit.minifier should be deleted from sys.modules in order
+        # to have a proper reference to sys.stdin and sys.stdou when
+        # 'main' definition is evaluated during module import
+        old_module = None
+        try:
+            old_module = sys.modules.pop('slimit.minifier')
+        except KeyError:
+            pass
+
+        with redirected_input_output(
+            input='function foo() { var local = 5; }') as out:
+            from slimit.minifier import main
+            main(['-m'])
+
+        self.assertEqual('function foo(){var a=5;}', out.getvalue())
+        if old_module is not None:
+            sys.modules['slimit.minifier'] = old_module
+
+    def test_main_sys_argv(self):
+        out = StringIO.StringIO()
+        inp = StringIO.StringIO('var global = 5;')
+        with redirected_sys_argv(['slimit', '-m', '-t']):
+            from slimit.minifier import main
+            main(inp=inp, out=out)
+
+        self.assertEqual('var a=5;', out.getvalue())
diff --git a/tools/slimit/tests/test_ecmavisitor.py b/tools/slimit/tests/test_ecmavisitor.py
new file mode 100644 (file)
index 0000000..65dfce2
--- /dev/null
@@ -0,0 +1,505 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+import textwrap
+import unittest
+
+from slimit.parser import Parser
+
+
+def decorator(cls):
+    def make_test_function(input, expected):
+
+        def test_func(self):
+            parser = Parser()
+            result = parser.parse(input).to_ecma()
+            self.assertMultiLineEqual(result, expected)
+
+        return test_func
+
+    for index, input in enumerate(cls.TEST_CASES):
+        input = textwrap.dedent(input).strip()
+        func = make_test_function(input, input)
+        setattr(cls, 'test_case_%d' % index, func)
+
+    return cls
+
+
+@decorator
+class ECMAVisitorTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.maxDiff = 2000
+
+    TEST_CASES = [
+        ################################
+        # block
+        ################################
+        """
+        {
+          var a = 5;
+        }
+        """,
+
+        ################################
+        # variable statement
+        ################################
+        """
+        var a;
+        var b;
+        var a, b = 3;
+        var a = 1, b;
+        var a = 5, b = 7;
+        """,
+
+        # empty statement
+        """
+        ;
+        ;
+        ;
+        """,
+
+        # test 3
+        ################################
+        # if
+        ################################
+        'if (true) var x = 100;',
+
+        """
+        if (true) {
+          var x = 100;
+          var y = 200;
+        }
+        """,
+
+        'if (true) if (true) var x = 100; else var y = 200;',
+
+        # test 6
+        """
+        if (true) {
+          var x = 100;
+        } else {
+          var y = 200;
+        }
+        """,
+        ################################
+        # iteration
+        ################################
+        """
+        for (i = 0; i < 10; i++) {
+          x = 10 * i;
+        }
+        """,
+
+        """
+        for (var i = 0; i < 10; i++) {
+          x = 10 * i;
+        }
+        """,
+
+        # test 9
+        """
+        for (i = 0, j = 10; i < j && j < 15; i++, j++) {
+          x = i * j;
+        }
+        """,
+
+        """
+        for (var i = 0, j = 10; i < j && j < 15; i++, j++) {
+          x = i * j;
+        }
+        """,
+
+        """
+        for (p in obj) {
+
+        }
+        """,
+        # retain the semicolon in the initialiser part of a 'for' statement
+        """
+        for (Q || (Q = []); d < b; ) {
+          d = 1;
+        }
+        """,
+
+        """
+        for (new Foo(); d < b; ) {
+          d = 1;
+        }
+        """,
+
+        """
+        for (2 >> (foo ? 32 : 43) && 54; 21; ) {
+          a = c;
+        }
+        """,
+
+        """
+        for (/^.+/g; cond(); ++z) {
+          ev();
+        }
+        """,
+
+        # test 12
+        """
+        for (var p in obj) {
+          p = 1;
+        }
+        """,
+
+        """
+        do {
+          x += 1;
+        } while (true);
+        """,
+
+        """
+        while (false) {
+          x = null;
+        }
+        """,
+
+        # test 15
+        ################################
+        # continue statement
+        ################################
+        """
+        while (true) {
+          continue;
+          s = 'I am not reachable';
+        }
+        """,
+
+        """
+        while (true) {
+          continue label1;
+          s = 'I am not reachable';
+        }
+        """,
+
+        ################################
+        # break statement
+        ################################
+        """
+        while (true) {
+          break;
+          s = 'I am not reachable';
+        }
+        """,
+        # test 18
+        """
+        while (true) {
+          break label1;
+          s = 'I am not reachable';
+        }
+        """,
+
+        ################################
+        # return statement
+        ################################
+        """
+        {
+          return;
+        }
+        """,
+
+        """
+        {
+          return 1;
+        }
+        """,
+
+        # test21
+        ################################
+        # with statement
+        ################################
+        """
+        with (x) {
+          var y = x * 2;
+        }
+        """,
+
+        ################################
+        # labelled statement
+        ################################
+        """
+        label: while (true) {
+          x *= 3;
+        }
+        """,
+
+        ################################
+        # switch statement
+        ################################
+        """
+        switch (day_of_week) {
+          case 6:
+          case 7:
+            x = 'Weekend';
+            break;
+          case 1:
+            x = 'Monday';
+            break;
+          default:
+            break;
+        }
+        """,
+
+        # test 24
+        ################################
+        # throw statement
+        ################################
+        """
+        throw 'exc';
+        """,
+
+        ################################
+        # debugger statement
+        ################################
+        'debugger;',
+
+        ################################
+        # expression statement
+        ################################
+        """
+        5 + 7 - 20 * 10;
+        ++x;
+        --x;
+        x++;
+        x--;
+        x = 17 /= 3;
+        s = mot ? z : /x:3;x<5;y</g / i;
+        """,
+
+        # test 27
+        ################################
+        # try statement
+        ################################
+        """
+        try {
+          x = 3;
+        } catch (exc) {
+          x = exc;
+        }
+        """,
+
+        """
+        try {
+          x = 3;
+        } finally {
+          x = null;
+        }
+        """,
+
+        """
+        try {
+          x = 5;
+        } catch (exc) {
+          x = exc;
+        } finally {
+          y = null;
+        }
+        """,
+
+        # test 30
+        ################################
+        # function
+        ################################
+        """
+        function foo(x, y) {
+          z = 10;
+          return x + y + z;
+        }
+        """,
+
+        """
+        function foo() {
+          return 10;
+        }
+        """,
+
+        """
+        var a = function() {
+          return 10;
+        };
+        """,
+        # test 33
+        """
+        var a = function foo(x, y) {
+          return x + y;
+        };
+        """,
+        # nested function declaration
+        """
+        function foo() {
+          function bar() {
+
+          }
+        }
+        """,
+
+        """
+        var mult = function(x) {
+          return x * 10;
+        }();
+        """,
+
+        # function call
+        # test 36
+        'foo();',
+        'foo(x, 7);',
+        'foo()[10];',
+        # test 39
+        'foo().foo;',
+
+        ################################
+        # misc
+        ################################
+
+        # new
+        'var foo = new Foo();',
+        # dot accessor
+        'var bar = new Foo.Bar();',
+
+        # test 42
+        # bracket accessor
+        'var bar = new Foo.Bar()[7];',
+
+        # object literal
+        """
+        var obj = {
+          foo: 10,
+          bar: 20
+        };
+        """,
+        """
+        var obj = {
+          1: 'a',
+          2: 'b'
+        };
+        """,
+        # test 45
+        """
+        var obj = {
+          'a': 100,
+          'b': 200
+        };
+        """,
+        """
+        var obj = {
+        };
+        """,
+
+        # array
+        """
+        var a = [1,2,3,4,5];
+        var res = a[3];
+        """,
+        # test 48
+        # elision
+        'var a = [,,,];',
+        'var a = [1,,,4];',
+        'var a = [1,,3,,5];',
+
+        # test 51
+        """
+        String.prototype.foo = function(data) {
+          var tmpl = this.toString();
+          return tmpl.replace(/{{\s*(.*?)\s*}}/g, function(a, b) {
+            var node = data;
+            if (true) {
+              var value = true;
+            } else {
+              var value = false;
+            }
+            $.each(n.split('.'), function(i, sym) {
+              node = node[sym];
+            });
+            return node;
+          });
+        };
+        """,
+
+        #######################################
+        # Make sure parentheses are not removed
+        #######################################
+
+        # ... Expected an identifier and instead saw '/'
+        'Expr.match[type].source + (/(?![^\[]*\])(?![^\(]*\))/.source);',
+
+        '(options = arguments[i]) != null;',
+
+        # test 54
+        'return (/h\d/i).test(elem.nodeName);',
+
+        # https://github.com/rspivak/slimit/issues/42
+        """
+        e.b(d) ? (a = [c.f(j[1])], e.fn.attr.call(a, d, !0)) : a = [k.f(j[1])];
+        """,
+
+        """
+        (function() {
+          x = 5;
+        }());
+        """,
+
+        """
+        (function() {
+          x = 5;
+        })();
+        """,
+
+        'return !(match === true || elem.getAttribute("classid") !== match);',
+
+        # test 57
+        'var el = (elem ? elem.ownerDocument || elem : 0).documentElement;',
+
+        # typeof
+        'typeof second.length === "number";',
+
+        # function call in FOR init
+        """
+        for (o(); i < 3; i++) {
+
+        }
+        """,
+
+        # https://github.com/rspivak/slimit/issues/32
+        """
+        Name.prototype = {
+          get fullName() {
+            return this.first + " " + this.last;
+          },
+          set fullName(name) {
+            var names = name.split(" ");
+            this.first = names[0];
+            this.last = names[1];
+          }
+        };
+        """,
+        ]
+
+
diff --git a/tools/slimit/tests/test_lexer.py b/tools/slimit/tests/test_lexer.py
new file mode 100644 (file)
index 0000000..922d628
--- /dev/null
@@ -0,0 +1,297 @@
+###############################################################################
+# encoding: utf-8
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+import doctest
+import unittest
+import difflib
+import pprint
+
+from slimit.lexer import Lexer
+
+
+def decorator(cls):
+    def make_test_function(input, expected):
+
+        def test_func(self):
+            lexer = self._get_lexer()
+            lexer.input(input)
+            result = ['%s %s' % (token.type, token.value) for token in lexer]
+            self.assertListEqual(result, expected)
+
+        return test_func
+
+    for index, (input, expected) in enumerate(cls.TEST_CASES):
+        func = make_test_function(input, expected)
+        setattr(cls, 'test_case_%d' % index, func)
+
+    return cls
+
+# The structure and some test cases are taken
+# from https://bitbucket.org/ned/jslex
+@decorator
+class LexerTestCase(unittest.TestCase):
+
+    def _get_lexer(self):
+        lexer = Lexer()
+        return lexer
+
+    def assertListEqual(self, first, second):
+        """Assert that two lists are equal.
+
+        Prints differences on error.
+        This method is similar to that of Python 2.7 'assertListEqual'
+        """
+        if first != second:
+            message = '\n'.join(
+                difflib.ndiff(pprint.pformat(first).splitlines(),
+                              pprint.pformat(second).splitlines())
+                )
+            self.fail('Lists differ:\n' + message)
+
+    def test_illegal_unicode_char_in_identifier(self):
+        lexer = self._get_lexer()
+        lexer.input(u'\u0036_tail')
+        token = lexer.token()
+        # \u0036_tail is the same as 6_tail and that's not a correct ID
+        # Check that the token is NUMBER and not an ID
+        self.assertEqual(token.type, 'NUMBER')
+        self.assertEqual(token.value, '6')
+
+    TEST_CASES = [
+        # Identifiers
+        ('i my_variable_name c17 _dummy $str $ _ CamelCase class2type',
+         ['ID i', 'ID my_variable_name', 'ID c17', 'ID _dummy',
+          'ID $str', 'ID $', 'ID _', 'ID CamelCase', 'ID class2type']
+         ),
+        (ur'\u03c0 \u03c0_tail var\ua67c',
+         [ur'ID \u03c0', ur'ID \u03c0_tail', ur'ID var\ua67c']),
+        # https://github.com/rspivak/slimit/issues/2
+        ('nullify truelie falsepositive',
+         ['ID nullify', 'ID truelie', 'ID falsepositive']),
+
+        # Keywords
+        # ('break case ...', ['BREAK break', 'CASE case', ...])
+        (' '.join(kw.lower() for kw in Lexer.keywords),
+         ['%s %s' % (kw, kw.lower()) for kw in Lexer.keywords]
+         ),
+        ('break Break BREAK', ['BREAK break', 'ID Break', 'ID BREAK']),
+
+        # Literals
+        ('null true false Null True False',
+         ['NULL null', 'TRUE true', 'FALSE false',
+          'ID Null', 'ID True', 'ID False']
+         ),
+
+        # Punctuators
+        ('a /= b', ['ID a', 'DIVEQUAL /=', 'ID b']),
+        (('= == != === !== < > <= >= || && ++ -- << >> '
+          '>>> += -= *= <<= >>= >>>= &= %= ^= |='),
+         ['EQ =', 'EQEQ ==', 'NE !=', 'STREQ ===', 'STRNEQ !==', 'LT <',
+          'GT >', 'LE <=', 'GE >=', 'OR ||', 'AND &&', 'PLUSPLUS ++',
+          'MINUSMINUS --', 'LSHIFT <<', 'RSHIFT >>', 'URSHIFT >>>',
+          'PLUSEQUAL +=', 'MINUSEQUAL -=', 'MULTEQUAL *=', 'LSHIFTEQUAL <<=',
+          'RSHIFTEQUAL >>=', 'URSHIFTEQUAL >>>=', 'ANDEQUAL &=', 'MODEQUAL %=',
+          'XOREQUAL ^=', 'OREQUAL |=',
+          ]
+         ),
+        ('. , ; : + - * % & | ^ ~ ? ! ( ) { } [ ]',
+         ['PERIOD .', 'COMMA ,', 'SEMI ;', 'COLON :', 'PLUS +', 'MINUS -',
+          'MULT *', 'MOD %', 'BAND &', 'BOR |', 'BXOR ^', 'BNOT ~',
+          'CONDOP ?', 'NOT !', 'LPAREN (', 'RPAREN )', 'LBRACE {', 'RBRACE }',
+          'LBRACKET [', 'RBRACKET ]']
+         ),
+        ('a / b', ['ID a', 'DIV /', 'ID b']),
+
+        # Numbers
+        (('3 3.3 0 0. 0.0 0.001 010 3.e2 3.e-2 3.e+2 3E2 3E+2 3E-2 '
+          '0.5e2 0.5e+2 0.5e-2 33 128.15 0x001 0X12ABCDEF 0xabcdef'),
+         ['NUMBER 3', 'NUMBER 3.3', 'NUMBER 0', 'NUMBER 0.', 'NUMBER 0.0',
+          'NUMBER 0.001', 'NUMBER 010', 'NUMBER 3.e2', 'NUMBER 3.e-2',
+          'NUMBER 3.e+2', 'NUMBER 3E2', 'NUMBER 3E+2', 'NUMBER 3E-2',
+          'NUMBER 0.5e2', 'NUMBER 0.5e+2', 'NUMBER 0.5e-2', 'NUMBER 33',
+          'NUMBER 128.15', 'NUMBER 0x001', 'NUMBER 0X12ABCDEF',
+          'NUMBER 0xabcdef']
+         ),
+
+        # Strings
+        (""" '"' """, ["""STRING '"'"""]),
+        (r'''"foo" 'foo' "x\";" 'x\';' "foo\tbar"''',
+         ['STRING "foo"', """STRING 'foo'""", r'STRING "x\";"',
+          r"STRING 'x\';'", r'STRING "foo\tbar"']
+         ),
+        (r"""'\x55' "\x12ABCDEF" '!@#$%^&*()_+{}[]\";?'""",
+         [r"STRING '\x55'", r'STRING "\x12ABCDEF"',
+          r"STRING '!@#$%^&*()_+{}[]\";?'"]
+         ),
+        (r"""'\u0001' "\uFCEF" 'a\\\b\n'""",
+         [r"STRING '\u0001'", r'STRING "\uFCEF"', r"STRING 'a\\\b\n'"]
+         ),
+        (ur'"тест строки\""', [ur'STRING "тест строки\""']),
+        # Bug - https://github.com/rspivak/slimit/issues/5
+        (r"var tagRegExp = new RegExp('<(\/*)(FooBar)', 'gi');",
+         ['VAR var', 'ID tagRegExp', 'EQ =',
+          'NEW new', 'ID RegExp', 'LPAREN (',
+          r"STRING '<(\/*)(FooBar)'", 'COMMA ,', "STRING 'gi'",
+          'RPAREN )', 'SEMI ;']
+        ),
+        # same as above but inside double quotes
+        (r'"<(\/*)(FooBar)"', [r'STRING "<(\/*)(FooBar)"']),
+        # multiline string (string written across multiple lines
+        # of code) https://github.com/rspivak/slimit/issues/24
+        (r"""var a = 'hello \
+world'""",
+         ['VAR var', 'ID a', 'EQ =', "STRING 'hello world'"]),
+        (r'''var a = "hello \
+world"''',
+         ['VAR var', 'ID a', 'EQ =', 'STRING "hello world"']),
+
+        # # Comments
+        # ("""
+        # //comment
+        # a = 5;
+        # """, ['LINE_COMMENT //comment', 'ID a', 'EQ =', 'NUMBER 5', 'SEMI ;']
+        #  ),
+        # ('a//comment', ['ID a', 'LINE_COMMENT //comment']),
+        # ('/***/b/=3//line',
+        #  ['BLOCK_COMMENT /***/', 'ID b', 'DIVEQUAL /=',
+        #   'NUMBER 3', 'LINE_COMMENT //line']
+        #  ),
+        # ('/*\n * Copyright LGPL 2011 \n*/\na = 1;',
+        #  ['BLOCK_COMMENT /*\n * Copyright LGPL 2011 \n*/',
+        #   'ID a', 'EQ =', 'NUMBER 1', 'SEMI ;']
+        #  ),
+
+        # regex
+        (r'a=/a*/,1', ['ID a', 'EQ =', 'REGEX /a*/', 'COMMA ,', 'NUMBER 1']),
+        (r'a=/a*[^/]+/,1',
+         ['ID a', 'EQ =', 'REGEX /a*[^/]+/', 'COMMA ,', 'NUMBER 1']
+         ),
+        (r'a=/a*\[^/,1',
+         ['ID a', 'EQ =', r'REGEX /a*\[^/', 'COMMA ,', 'NUMBER 1']
+         ),
+        (r'a=/\//,1', ['ID a', 'EQ =', r'REGEX /\//', 'COMMA ,', 'NUMBER 1']),
+        # not a regex, just a division
+        # https://github.com/rspivak/slimit/issues/6
+        (r'x = this / y;',
+         ['ID x', 'EQ =', 'THIS this', r'DIV /', r'ID y', r'SEMI ;']),
+
+        # next two are from
+        # http://www.mozilla.org/js/language/js20-2002-04/rationale/syntax.html#regular-expressions
+        ("""for (var x = a in foo && "</x>" || mot ? z:/x:3;x<5;y</g/i) {xyz(x++);}""",
+         ["FOR for", "LPAREN (", "VAR var", "ID x", "EQ =", "ID a", "IN in",
+          "ID foo", "AND &&", 'STRING "</x>"', "OR ||", "ID mot", "CONDOP ?",
+          "ID z", "COLON :", "REGEX /x:3;x<5;y</g", "DIV /", "ID i", "RPAREN )",
+          "LBRACE {",  "ID xyz", "LPAREN (", "ID x", "PLUSPLUS ++", "RPAREN )",
+          "SEMI ;", "RBRACE }"]
+         ),
+
+        ("""for (var x = a in foo && "</x>" || mot ? z/x:3;x<5;y</g/i) {xyz(x++);}""",
+         ["FOR for", "LPAREN (", "VAR var", "ID x", "EQ =", "ID a", "IN in",
+          "ID foo", "AND &&", 'STRING "</x>"', "OR ||", "ID mot", "CONDOP ?",
+          "ID z", "DIV /", "ID x", "COLON :", "NUMBER 3", "SEMI ;", "ID x",
+          "LT <", "NUMBER 5", "SEMI ;", "ID y", "LT <", "REGEX /g/i",
+          "RPAREN )", "LBRACE {", "ID xyz", "LPAREN (", "ID x", "PLUSPLUS ++",
+          "RPAREN )", "SEMI ;", "RBRACE }"]
+         ),
+
+        # Various "illegal" regexes that are valid according to the std.
+        (r"""/????/, /++++/, /[----]/ """,
+         ['REGEX /????/', 'COMMA ,',
+          'REGEX /++++/', 'COMMA ,', 'REGEX /[----]/']
+         ),
+
+        # Stress cases from http://stackoverflow.com/questions/5533925/what-javascript-constructs-does-jslex-incorrectly-lex/5573409#5573409
+        (r"""/\[/""", [r"""REGEX /\[/"""]),
+        (r"""/[i]/""", [r"""REGEX /[i]/"""]),
+        (r"""/[\]]/""", [r"""REGEX /[\]]/"""]),
+        (r"""/a[\]]/""", [r"""REGEX /a[\]]/"""]),
+        (r"""/a[\]]b/""", [r"""REGEX /a[\]]b/"""]),
+        (r"""/[\]/]/gi""", [r"""REGEX /[\]/]/gi"""]),
+        (r"""/\[[^\]]+\]/gi""", [r"""REGEX /\[[^\]]+\]/gi"""]),
+        ("""
+            rexl.re = {
+            NAME: /^(?!\d)(?:\w)+|^"(?:[^"]|"")+"/,
+            UNQUOTED_LITERAL: /^@(?:(?!\d)(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/,
+            QUOTED_LITERAL: /^'(?:[^']|'')*'/,
+            NUMERIC_LITERAL: /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/,
+            SYMBOL: /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/
+            };
+            """,
+         ["ID rexl", "PERIOD .", "ID re", "EQ =", "LBRACE {",
+          "ID NAME", "COLON :",
+          r"""REGEX /^(?!\d)(?:\w)+|^"(?:[^"]|"")+"/""", "COMMA ,",
+          "ID UNQUOTED_LITERAL", "COLON :",
+          r"""REGEX /^@(?:(?!\d)(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/""",
+          "COMMA ,", "ID QUOTED_LITERAL", "COLON :",
+          r"""REGEX /^'(?:[^']|'')*'/""", "COMMA ,", "ID NUMERIC_LITERAL",
+          "COLON :",
+          r"""REGEX /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/""", "COMMA ,",
+          "ID SYMBOL", "COLON :",
+          r"""REGEX /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/""",
+         "RBRACE }", "SEMI ;"]
+          ),
+        ("""
+            rexl.re = {
+            NAME: /^(?!\d)(?:\w)+|^"(?:[^"]|"")+"/,
+            UNQUOTED_LITERAL: /^@(?:(?!\d)(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/,
+            QUOTED_LITERAL: /^'(?:[^']|'')*'/,
+            NUMERIC_LITERAL: /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/,
+            SYMBOL: /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/
+            };
+            str = '"';
+        """,
+        ["ID rexl", "PERIOD .", "ID re", "EQ =", "LBRACE {",
+         "ID NAME", "COLON :", r"""REGEX /^(?!\d)(?:\w)+|^"(?:[^"]|"")+"/""",
+         "COMMA ,", "ID UNQUOTED_LITERAL", "COLON :",
+         r"""REGEX /^@(?:(?!\d)(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/""",
+         "COMMA ,", "ID QUOTED_LITERAL", "COLON :",
+         r"""REGEX /^'(?:[^']|'')*'/""", "COMMA ,",
+         "ID NUMERIC_LITERAL", "COLON :",
+         r"""REGEX /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/""", "COMMA ,",
+         "ID SYMBOL", "COLON :",
+         r"""REGEX /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/""",
+         "RBRACE }", "SEMI ;",
+         "ID str", "EQ =", """STRING '"'""", "SEMI ;",
+         ]),
+        (r""" this._js = "e.str(\"" + this.value.replace(/\\/g, "\\\\").replace(/"/g, "\\\"") + "\")"; """,
+         ["THIS this", "PERIOD .", "ID _js", "EQ =",
+          r'''STRING "e.str(\""''', "PLUS +", "THIS this", "PERIOD .",
+          "ID value", "PERIOD .", "ID replace", "LPAREN (", r"REGEX /\\/g",
+          "COMMA ,", r'STRING "\\\\"', "RPAREN )", "PERIOD .", "ID replace",
+          "LPAREN (", r'REGEX /"/g', "COMMA ,", r'STRING "\\\""', "RPAREN )",
+          "PLUS +", r'STRING "\")"', "SEMI ;"]),
+        ]
+
+
+def test_suite():
+    return unittest.TestSuite((
+        unittest.makeSuite(LexerTestCase),
+        doctest.DocFileSuite(
+            '../lexer.py',
+            optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS
+            ),
+        ))
diff --git a/tools/slimit/tests/test_mangler.py b/tools/slimit/tests/test_mangler.py
new file mode 100644 (file)
index 0000000..111429e
--- /dev/null
@@ -0,0 +1,159 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+import textwrap
+import unittest
+
+from slimit.parser import Parser
+from slimit.mangler import mangle
+
+
+def decorator(cls):
+    def make_test_function(input, expected):
+        def test_func(self):
+            parser = Parser()
+            tree = parser.parse(input)
+            mangle(tree, toplevel=True)
+            self.assertMultiLineEqual(
+                textwrap.dedent(tree.to_ecma()).strip(),
+                textwrap.dedent(expected).strip()
+                )
+
+        return test_func
+
+    for index, (input, expected) in enumerate(cls.TEST_CASES):
+        func = make_test_function(input, expected)
+        setattr(cls, 'test_case_%d' % index, func)
+
+    return cls
+
+
+@decorator
+class ManglerTestCase(unittest.TestCase):
+
+    TEST_CASES = [
+        # test nested function declaration
+        # test that object properties ids are not changed
+        ("""
+        function test() {
+          function is_false() {
+            var xpos = 5;
+            var point = {
+              xpos: 17,
+              ypos: 10
+            };
+            return true;
+          }
+        }
+        """,
+         """
+         function a() {
+           function a() {
+             var a = 5;
+             var b = {
+               xpos: 17,
+               ypos: 10
+             };
+             return true;
+           }
+         }
+         """),
+
+        # test that mangled names are not shadowed when we reference
+        # original names from any sub-scope
+        ("""
+        var result = function() {
+          var long_name = 'long name';
+          var not_so_long = 'indeed', log = 5;
+          global_x = 56;
+          console.log(long_name + not_so_long);
+          new_result = function(arg1, arg2) {
+            var arg2 = 'qwerty';
+            console.log(long_name + not_so_long + arg1 + arg2 + global_x);
+          };
+        };
+        """,
+         """
+         var a = function() {
+           var a = 'long name';
+           var b = 'indeed', c = 5;
+           global_x = 56;
+           console.log(a + b);
+           new_result = function(c, d) {
+             var d = 'qwerty';
+             console.log(a + b + c + d + global_x);
+           };
+         };
+         """),
+
+        # https://github.com/rspivak/slimit/issues/7
+        ("""
+        function a() {
+          var $exc1 = null;
+          try {
+            lala();
+          } catch($exc) {
+            if ($exc.__name__ == 'hi') {
+              return 'bam';
+            }
+          }
+          return 'bum';
+        }
+        """,
+         """
+         function a() {
+           var a = null;
+           try {
+             lala();
+           } catch (b) {
+             if (b.__name__ == 'hi') {
+               return 'bam';
+             }
+           }
+           return 'bum';
+         }
+         """),
+
+        # Handle the case when function arguments are redefined;
+        # in the example below statement arg = 9; doesn't create
+        # a global variable -it changes the value of arguments[0].
+        # The same is with statement var arg = 0;
+        # http://spin.atomicobject.com/2011/04/10/javascript-don-t-reassign-your-function-arguments/
+        ("""
+        function a(arg) {
+          arg = 9;
+          var arg = 0;
+          return arg;
+        }
+        """,
+         """
+         function a(a) {
+           a = 9;
+           var a = 0;
+           return a;
+         }
+         """),
+        ]
diff --git a/tools/slimit/tests/test_minifier.py b/tools/slimit/tests/test_minifier.py
new file mode 100644 (file)
index 0000000..e999335
--- /dev/null
@@ -0,0 +1,470 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+import unittest
+
+from slimit import minify
+
+
+def decorator(cls):
+    def make_test_function(input, expected):
+
+        def test_func(self):
+            self.assertMinified(input, expected)
+
+        return test_func
+
+    for index, (input, expected) in enumerate(cls.TEST_CASES):
+        func = make_test_function(input, expected)
+        setattr(cls, 'test_case_%d' % index, func)
+
+    return cls
+
+
+@decorator
+class MinifierTestCase(unittest.TestCase):
+
+    def assertMinified(self, source, expected):
+        minified = minify(source)
+        self.maxDiff = None
+        self.assertSequenceEqual(minified, expected)
+
+    TEST_CASES = [
+        ("""
+        jQuery.fn = jQuery.prototype = {
+                // For internal use only.
+                _data: function( elem, name, data ) {
+                        return jQuery.data( elem, name, data, true );
+                }
+        };
+        """,
+         'jQuery.fn=jQuery.prototype={_data:function(elem,name,data){return jQuery.data(elem,name,data,true);}};'),
+
+        ('context = context instanceof jQuery ? context[0] : context;',
+         'context=context instanceof jQuery?context[0]:context;'
+         ),
+
+        ("""
+        /*
+        * A number of helper functions used for managing events.
+        * Many of the ideas behind this code originated from
+        * Dean Edwards' addEvent library.
+        */
+        if ( elem && elem.parentNode ) {
+                // Handle the case where IE and Opera return items
+                // by name instead of ID
+                if ( elem.id !== match[2] ) {
+                        return rootjQuery.find( selector );
+                }
+
+                // Otherwise, we inject the element directly into the jQuery object
+                this.length = 1;
+                this[0] = elem;
+        }
+        """,
+
+         'if(elem&&elem.parentNode){if(elem.id!==match[2])return rootjQuery.find(selector);this.length=1;this[0]=elem;}'
+         ),
+
+        ("""
+        var a = function( obj ) {
+                for ( var name in obj ) {
+                        return false;
+                }
+                return true;
+        };
+        """,
+         'var a=function(obj){for(var name in obj)return false;return true;};'
+         ),
+
+        ("""
+        x = "string", y = 5;
+
+        (x = 5) ? true : false;
+
+        for (p in obj)
+        ;
+
+        if (true)
+          val = null;
+        else
+          val = false;
+
+        """,
+         'x="string",y=5;(x=5)?true:false;for(p in obj);if(true)val=null;else val=false;'
+         ),
+
+        # for loops + empty statement in loop body
+        ("""
+        for (x = 0; true; x++)
+        ;
+        for (; true; x++)
+        ;
+        for (x = 0, y = 5; true; x++)
+        ;
+
+        y = (x + 5) * 20;
+
+        """,
+         'for(x=0;true;x++);for(;true;x++);for(x=0,y=5;true;x++);y=(x+5)*20;'),
+
+
+        # unary expressions
+        ("""
+        delete x;
+        typeof x;
+        void x;
+        x += (!y)++;
+        """,
+         'delete x;typeof x;void x;x+=(!y)++;'),
+
+        # label + break label + continue label
+        ("""
+        label:
+        if ( i == 0 )
+          continue label;
+        switch (day) {
+        case 5:
+          break ;
+        default:
+          break label;
+        }
+        """,
+         'label:if(i==0)continue label;switch(day){case 5:break;default:break label;}'),
+
+        # break + continue: no labels
+        ("""
+        while (i <= 7) {
+          if ( i == 3 )
+              continue;
+          if ( i == 0 )
+              break;
+        }
+        """,
+         'while(i<=7){if(i==3)continue;if(i==0)break;}'),
+
+        # regex + one line statements in if and if .. else
+        ("""
+        function a(x, y) {
+         var re = /ab+c/;
+         if (x == 1)
+           return x + y;
+         if (x == 3)
+           return {x: 1};
+         else
+           return;
+        }
+        """,
+         'function a(x,y){var re=/ab+c/;if(x==1)return x+y;if(x==3)return{x:1};else return;}'),
+
+        # new
+        ('return new jQuery.fn.init( selector, context, rootjQuery );',
+         'return new jQuery.fn.init(selector,context,rootjQuery);'
+         ),
+
+        # no space after 'else' when the next token is (, {
+        ("""
+        if (true) {
+          x = true;
+          y = 3;
+        } else {
+          x = false
+          y = 5
+        }
+        """,
+         'if(true){x=true;y=3;}else{x=false;y=5;}'),
+
+        ("""
+        if (true) {
+          x = true;
+          y = 3;
+        } else
+          (x + ' qw').split(' ');
+        """,
+         "if(true){x=true;y=3;}else(x+' qw').split(' ');"),
+
+
+        ##############################################################
+        # Block braces removal
+        ##############################################################
+
+        # do while
+        ('do { x += 1; } while(true);', 'do x+=1;while(true);'),
+        # do while: multiple statements
+        ('do { x += 1; y += 1;} while(true);', 'do{x+=1;y+=1;}while(true);'),
+
+        # elision
+        ('var a = [1, 2, 3, ,,,5];', 'var a=[1,2,3,,,,5];'),
+
+        # with
+        ("""
+        with (obj) {
+          a = b;
+        }
+        """,
+         'with(obj)a=b;'),
+
+        # with: multiple statements
+        ("""
+        with (obj) {
+          a = b;
+          c = d;
+        }
+        """,
+         'with(obj){a=b;c=d;}'),
+
+        # if else
+        ("""
+        if (true) {
+          x = true;
+        } else {
+          x = false
+        }
+        """,
+         'if(true)x=true;else x=false;'),
+
+        # if: multiple statements
+        ("""
+        if (true) {
+          x = true;
+          y = false;
+        } else {
+          x = false;
+          y = true;
+        }
+        """,
+         'if(true){x=true;y=false;}else{x=false;y=true;}'),
+
+        # try catch finally: one statement
+        ("""
+        try {
+          throw "my_exception"; // generates an exception
+        }
+        catch (e) {
+          // statements to handle any exceptions
+          log(e); // pass exception object to error handler
+        }
+        finally {
+          closefiles(); // always close the resource
+        }
+        """,
+         'try{throw "my_exception";}catch(e){log(e);}finally{closefiles();}'
+         ),
+
+        # try catch finally: no statements
+        ("""
+        try {
+        }
+        catch (e) {
+        }
+        finally {
+        }
+        """,
+         'try{}catch(e){}finally{}'
+         ),
+
+        # try catch finally: multiple statements
+        ("""
+        try {
+          x = 3;
+          y = 5;
+        }
+        catch (e) {
+          log(e);
+          log('e');
+        }
+        finally {
+          z = 7;
+          log('z');
+        }
+        """,
+         "try{x=3;y=5;}catch(e){log(e);log('e');}finally{z=7;log('z');}"
+         ),
+
+        # tricky case with an 'if' nested in 'if .. else'
+        # We need to preserve braces in the first 'if' otherwise
+        # 'else' might get associated with nested 'if' instead
+        ("""
+        if ( obj ) {
+                for ( n in obj ) {
+                        if ( v === false) {
+                                break;
+                        }
+                }
+        } else {
+                for ( ; i < l; ) {
+                        if ( nv === false ) {
+                                break;
+                        }
+                }
+        }
+        """,
+         'if(obj){for(n in obj)if(v===false)break;}else for(;i<l;)if(nv===false)break;'),
+
+        # We don't care about nested 'if' when enclosing 'if' block
+        # contains multiple statements because braces won't be removed
+        # by visit_Block when there are multiple statements in the block
+        ("""
+        if ( obj ) {
+                for ( n in obj ) {
+                        if ( v === false) {
+                                break;
+                        }
+                }
+                x = 5;
+        } else {
+                for ( ; i < l; ) {
+                        if ( nv === false ) {
+                                break;
+                        }
+                }
+        }
+        """,
+         'if(obj){for(n in obj)if(v===false)break;x=5;}else for(;i<l;)if(nv===false)break;'),
+
+
+        # No dangling 'else' - remove braces
+        ("""
+        if ( obj ) {
+                for ( n in obj ) {
+                        if ( v === false) {
+                                break;
+                        } else {
+                                n = 3;
+                        }
+                }
+        } else {
+                for ( ; i < l; ) {
+                        if ( nv === false ) {
+                                break;
+                        }
+                }
+        }
+        """,
+         'if(obj)for(n in obj)if(v===false)break;else n=3;else for(;i<l;)if(nv===false)break;'),
+
+        # foo["bar"] --> foo.bar
+        ('foo["bar"];', 'foo.bar;'),
+        ("foo['bar'];", 'foo.bar;'),
+        ("""foo['bar"']=42;""", """foo['bar"']=42;"""),
+        ("""foo["bar'"]=42;""", """foo["bar'"]=42;"""),
+        ('foo["bar bar"];', 'foo["bar bar"];'),
+        ('foo["bar"+"bar"];', 'foo["bar"+"bar"];'),
+        # https://github.com/rspivak/slimit/issues/34
+        # test some reserved keywords
+        ('foo["for"];', 'foo["for"];'),
+        ('foo["class"];', 'foo["class"];'),
+
+
+        # https://github.com/rspivak/slimit/issues/21
+        # c||(c=393,a=323,b=2321); --> c||c=393,a=323,b=2321; ERROR
+        ('c||(c=393);', 'c||(c=393);'),
+        ('c||(c=393,a=323,b=2321);', 'c||(c=393,a=323,b=2321);'),
+
+        # https://github.com/rspivak/slimit/issues/25
+        ('for(a?b:c;d;)e=1;', 'for(a?b:c;d;)e=1;'),
+
+        # https://github.com/rspivak/slimit/issues/26
+        ('"begin"+ ++a+"end";', '"begin"+ ++a+"end";'),
+
+        # https://github.com/rspivak/slimit/issues/28
+        ("""
+         (function($) {
+             $.hello = 'world';
+         }(jQuery));
+         """,
+         "(function($){$.hello='world';}(jQuery));"),
+
+        # function call in FOR init
+        ('for(o(); i < 3; i++) {}', 'for(o();i<3;i++){}'),
+
+        # unary increment operator in FOR init
+        ('for(i++; i < 3; i++) {}', 'for(i++;i<3;i++){}'),
+
+        # unary decrement operator in FOR init
+        ('for(i--; i < 3; i++) {}', 'for(i--;i<3;i++){}'),
+
+        # issue-37, simple identifier in FOR init
+        ('for(i; i < 3; i++) {}', 'for(i;i<3;i++){}'),
+
+        # https://github.com/rspivak/slimit/issues/32
+        ("""
+         Name.prototype = {
+           getPageProp: function Page_getPageProp(key) {
+             return this.pageDict.get(key);
+           },
+
+           get fullName() {
+             return this.first + " " + this.last;
+           },
+
+           set fullName(name) {
+             var names = name.split(" ");
+             this.first = names[0];
+             this.last = names[1];
+           }
+         };
+         """,
+         ('Name.prototype={getPageProp:function Page_getPageProp(key){'
+          'return this.pageDict.get(key);},'
+          'get fullName(){return this.first+" "+this.last;},'
+          'set fullName(name){var names=name.split(" ");this.first=names[0];'
+          'this.last=names[1];}};')
+        ),
+
+        # https://github.com/rspivak/slimit/issues/47 - might be a Python 3
+        # related issue
+        ('testObj[":"] = undefined; // Breaks', 'testObj[":"]=undefined;'),
+        ('testObj["::"] = undefined; // Breaks', 'testObj["::"]=undefined;'),
+        ('testObj["a:"] = undefined; // Breaks', 'testObj["a:"]=undefined;'),
+        ('testObj["."] = undefined; // OK', 'testObj["."]=undefined;'),
+        ('testObj["{"] = undefined; // OK', 'testObj["{"]=undefined;'),
+        ('testObj["}"] = undefined; // OK', 'testObj["}"]=undefined;'),
+        ('testObj["["] = undefined; // Breaks', 'testObj["["]=undefined;'),
+        ('testObj["]"] = undefined; // Breaks', 'testObj["]"]=undefined;'),
+        ('testObj["("] = undefined; // OK', 'testObj["("]=undefined;'),
+        ('testObj[")"] = undefined; // OK', 'testObj[")"]=undefined;'),
+        ('testObj["="] = undefined; // Breaks', 'testObj["="]=undefined;'),
+        ('testObj["-"] = undefined; // OK', 'testObj["-"]=undefined;'),
+        ('testObj["+"] = undefined; // OK', 'testObj["+"]=undefined;'),
+        ('testObj["*"] = undefined; // OK', 'testObj["*"]=undefined;'),
+        ('testObj["/"] = undefined; // OK', 'testObj["/"]=undefined;'),
+        (r'testObj["\\"] = undefined; // Breaks', r'testObj["\\"]=undefined;'),
+        ('testObj["%"] = undefined; // OK', 'testObj["%"]=undefined;'),
+        ('testObj["<"] = undefined; // Breaks', 'testObj["<"]=undefined;'),
+        ('testObj[">"] = undefined; // Breaks', 'testObj[">"]=undefined;'),
+        ('testObj["!"] = undefined; // OK', 'testObj["!"]=undefined;'),
+        ('testObj["?"] = undefined; // Breaks', 'testObj["?"]=undefined;'),
+        ('testObj[","] = undefined; // OK', 'testObj[","]=undefined;'),
+        ('testObj["@"] = undefined; // Breaks', 'testObj["@"]=undefined;'),
+        ('testObj["#"] = undefined; // OK', 'testObj["#"]=undefined;'),
+        ('testObj["&"] = undefined; // OK', 'testObj["&"]=undefined;'),
+        ('testObj["|"] = undefined; // OK', 'testObj["|"]=undefined;'),
+        ('testObj["~"] = undefined; // OK', 'testObj["~"]=undefined;'),
+        ('testObj["`"] = undefined; // Breaks', 'testObj["`"]=undefined;'),
+        ('testObj["."] = undefined; // OK', 'testObj["."]=undefined;'),
+        ]
+
diff --git a/tools/slimit/tests/test_nodevisitor.py b/tools/slimit/tests/test_nodevisitor.py
new file mode 100644 (file)
index 0000000..a6a5c7e
--- /dev/null
@@ -0,0 +1,37 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+import doctest
+import unittest
+
+
+def test_suite():
+    return unittest.TestSuite((
+        doctest.DocFileSuite(
+            '../visitors/nodevisitor.py',
+            optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS
+            ),
+        ))
diff --git a/tools/slimit/tests/test_parser.py b/tools/slimit/tests/test_parser.py
new file mode 100644 (file)
index 0000000..f0e98fe
--- /dev/null
@@ -0,0 +1,243 @@
+###############################################################################
+#
+# Copyright (c) 2011-2012 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+import textwrap
+import unittest
+
+from slimit import ast
+from slimit.parser import Parser
+from slimit.visitors import nodevisitor
+
+
+def decorator(cls):
+    def make_test_function(input, expected):
+
+        def test_func(self):
+            parser = Parser()
+            result = parser.parse(input).to_ecma()
+            self.assertMultiLineEqual(result, expected)
+
+        return test_func
+
+    for index, (input, expected) in enumerate(cls.TEST_CASES):
+        input = textwrap.dedent(input).strip()
+        expected = textwrap.dedent(expected).strip()
+        func = make_test_function(input, expected)
+        setattr(cls, 'test_case_%d' % index, func)
+
+    return cls
+
+
+class ParserTestCase(unittest.TestCase):
+
+    def test_line_terminator_at_the_end_of_file(self):
+        parser = Parser()
+        parser.parse('var $_ = function(x){}(window);\n')
+
+    # XXX: function expression ?
+    def _test_function_expression(self):
+        text = """
+        if (true) {
+          function() {
+            foo;
+            location = 'http://anywhere.com';
+          }
+        }
+        """
+        parser = Parser()
+        parser.parse(text)
+
+    def test_modify_tree(self):
+        text = """
+        for (var i = 0; i < 10; i++) {
+          var x = 5 + i;
+        }
+        """
+        parser = Parser()
+        tree = parser.parse(text)
+        for node in nodevisitor.visit(tree):
+            if isinstance(node, ast.Identifier) and node.value == 'i':
+                node.value = 'hello'
+        self.assertMultiLineEqual(
+            tree.to_ecma(),
+            textwrap.dedent("""
+            for (var hello = 0; hello < 10; hello++) {
+              var x = 5 + hello;
+            }
+            """).strip()
+            )
+
+    def test_bug_no_semicolon_at_the_end_of_block_plus_newline_at_eof(self):
+        # https://github.com/rspivak/slimit/issues/3
+        text = textwrap.dedent("""
+        function add(x, y) {
+          return x + y;
+        }
+        """)
+        parser = Parser()
+        tree = parser.parse(text)
+        self.assertTrue(bool(tree.children()))
+
+    def test_function_expression_is_part_of_member_expr_nobf(self):
+        # https://github.com/rspivak/slimit/issues/22
+        # The problem happened to be that function_expr was not
+        # part of member_expr_nobf rule
+        text = 'window.done_already || function () { return "slimit!" ; }();'
+        self.assertTrue(bool(Parser().parse(text).children()))
+
+    # https://github.com/rspivak/slimit/issues/29
+    def test_that_parsing_eventually_stops(self):
+        text = """var a;
+        , b;"""
+        parser = Parser()
+        self.assertRaises(SyntaxError, parser.parse, text)
+
+
+@decorator
+class ASITestCase(unittest.TestCase):
+    TEST_CASES = [
+        ("""
+        switch (day) {
+          case 1:
+            result = 'Mon';
+            break
+          case 2:
+            break
+        }
+        """,
+         """
+         switch (day) {
+           case 1:
+             result = 'Mon';
+             break;
+           case 2:
+             break;
+         }
+         """),
+
+        ("""
+        while (true)
+          continue
+        a = 1;
+        """,
+         """
+         while (true) continue;
+         a = 1;
+         """),
+
+        ("""
+        return
+        a;
+        """,
+        """
+         return;
+         a;
+        """),
+        # test 3
+        ("""
+        x = 5
+        """,
+         """
+         x = 5;
+         """),
+
+        ("""
+        var a, b
+        var x
+        """,
+         """
+         var a, b;
+         var x;
+         """),
+
+        ("""
+        var a, b
+        var x
+        """,
+         """
+         var a, b;
+         var x;
+         """),
+
+        # test 6
+        ("""
+        return
+        a + b
+        """,
+         """
+         return;
+         a + b;
+         """),
+
+        ('while (true) ;', 'while (true) ;'),
+
+        ("""
+        if (x) {
+          y()
+        }
+        """,
+         """
+         if (x) {
+           y();
+         }
+         """),
+
+        # test 9
+        ("""
+        for ( ; i < length; i++) {
+        }
+        """,
+         """
+         for ( ; i < length; i++) {
+
+         }
+         """),
+
+        ("""
+        var i;
+        for (i; i < length; i++) {
+        }
+        """,
+         """
+         var i;
+         for (i; i < length; i++) {
+
+         }
+         """),
+        ]
+
+    def test_throw_statement(self):
+        # expression is not optional in throw statement
+        input = textwrap.dedent("""
+        throw
+          'exc';
+        """)
+        parser = Parser()
+        # ASI at lexer level should insert ';' after throw
+        self.assertRaises(SyntaxError, parser.parse, input)
+
+
+
diff --git a/tools/slimit/unicode_chars.py b/tools/slimit/unicode_chars.py
new file mode 100644 (file)
index 0000000..eec4411
--- /dev/null
@@ -0,0 +1,156 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+# Reference - http://xregexp.com/plugins/#unicode
+# Adapted from https://github.com/mishoo/UglifyJS/blob/master/lib/parse-js.js
+
+# 'Uppercase letter (Lu)', 'Lowercase letter (Ll)',
+# 'Titlecase letter(Lt)', 'Modifier letter (Lm)', 'Other letter (Lo)'
+LETTER = (
+    ur'[\u0041-\u005A\u0061-\u007A\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6'
+    ur'\u00F8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376'
+    ur'\u0377\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5'
+    ur'\u03F7-\u0481\u048A-\u0523\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA'
+    ur'\u05F0-\u05F2\u0621-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6'
+    ur'\u06EE\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1'
+    ur'\u07CA-\u07EA\u07F4\u07F5\u07FA\u0904-\u0939\u093D\u0950\u0958-\u0961'
+    ur'\u0971\u0972\u097B-\u097F\u0985-\u098C\u098F\u0990\u0993-\u09A8'
+    ur'\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1'
+    ur'\u09F0\u09F1\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32'
+    ur'\u0A33\u0A35\u0A36\u0A38\u0A39\u0A59-\u0A5C\u0A5E\u0A72-\u0A74'
+    ur'\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3'
+    ur'\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C\u0B0F\u0B10'
+    ur'\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C\u0B5D'
+    ur'\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99'
+    ur'\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0'
+    ur'\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D'
+    ur'\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8'
+    ur'\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0D05-\u0D0C'
+    ur'\u0D0E-\u0D10\u0D12-\u0D28\u0D2A-\u0D39\u0D3D\u0D60\u0D61\u0D7A-\u0D7F'
+    ur'\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30'
+    ur'\u0E32\u0E33\u0E40-\u0E46\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D'
+    ur'\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB'
+    ur'\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6\u0EDC\u0EDD\u0F00'
+    ur'\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8B\u1000-\u102A\u103F\u1050-\u1055'
+    ur'\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081\u108E'
+    ur'\u10A0-\u10C5\u10D0-\u10FA\u10FC\u1100-\u1159\u115F-\u11A2\u11A8-\u11F9'
+    ur'\u1200-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288'
+    ur'\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5'
+    ur'\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u1380-\u138F'
+    ur'\u13A0-\u13F4\u1401-\u166C\u166F-\u1676\u1681-\u169A\u16A0-\u16EA'
+    ur'\u1700-\u170C\u170E-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176C'
+    ur'\u176E-\u1770\u1780-\u17B3\u17D7\u17DC\u1820-\u1877\u1880-\u18A8\u18AA'
+    ur'\u1900-\u191C\u1950-\u196D\u1970-\u1974\u1980-\u19A9\u19C1-\u19C7'
+    ur'\u1A00-\u1A16\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF'
+    ur'\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1D00-\u1DBF\u1E00-\u1F15'
+    ur'\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D'
+    ur'\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC'
+    ur'\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071'
+    ur'\u207F\u2090-\u2094\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124'
+    ur'\u2126\u2128\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E'
+    ur'\u2183\u2184\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2C6F\u2C71-\u2C7D'
+    ur'\u2C80-\u2CE4\u2D00-\u2D25\u2D30-\u2D65\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6'
+    ur'\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE'
+    ur'\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005\u3006\u3031-\u3035\u303B\u303C'
+    ur'\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D'
+    ur'\u3131-\u318E\u31A0-\u31B7\u31F0-\u31FF\u3400\u4DB5\u4E00\u9FC3'
+    ur'\uA000-\uA48C\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B\uA640-\uA65F'
+    ur'\uA662-\uA66E\uA67F-\uA697\uA717-\uA71F\uA722-\uA788\uA78B\uA78C'
+    ur'\uA7FB-\uA801\uA803-\uA805\uA807-\uA80A\uA80C-\uA822\uA840-\uA873'
+    ur'\uA882-\uA8B3\uA90A-\uA925\uA930-\uA946\uAA00-\uAA28\uAA40-\uAA42'
+    ur'\uAA44-\uAA4B\uAC00\uD7A3\uF900-\uFA2D\uFA30-\uFA6A\uFA70-\uFAD9'
+    ur'\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C'
+    ur'\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F'
+    ur'\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC\uFF21-\uFF3A'
+    ur'\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7'
+    ur'\uFFDA-\uFFDC]'
+    )
+
+NON_SPACING_MARK = (
+    ur'[\u0300-\u036F\u0483-\u0487\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5'
+    ur'\u05C7\u0610-\u061A\u064B-\u065E\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7'
+    ur'\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3'
+    ur'\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0900-\u0902\u093C'
+    ur'\u0941-\u0948\u094D\u0951-\u0955\u0962\u0963\u0981\u09BC\u09C1-\u09C4'
+    ur'\u09CD\u09E2\u09E3\u0A01\u0A02\u0A3C\u0A41\u0A42\u0A47\u0A48'
+    ur'\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81\u0A82\u0ABC\u0AC1-\u0AC5'
+    ur'\u0AC7\u0AC8\u0ACD\u0AE2\u0AE3\u0B01\u0B3C\u0B3F\u0B41-\u0B44\u0B4D'
+    ur'\u0B56\u0B62\u0B63\u0B82\u0BC0\u0BCD\u0C3E-\u0C40\u0C46-\u0C48'
+    ur'\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0CBC\u0CBF\u0CC6\u0CCC\u0CCD'
+    ur'\u0CE2\u0CE3\u0D41-\u0D44\u0D4D\u0D62\u0D63\u0DCA\u0DD2-\u0DD4\u0DD6'
+    ur'\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC'
+    ur'\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F71-\u0F7E\u0F80-\u0F84'
+    ur'\u0F86\u0F87\u0F90-\u0F97\u0F99-\u0FBC\u0FC6\u102D-\u1030\u1032-\u1037'
+    ur'\u1039\u103A\u103D\u103E\u1058\u1059\u105E-\u1060\u1071-\u1074\u1082'
+    ur'\u1085\u1086\u108D\u109D\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753'
+    ur'\u1772\u1773\u17B7-\u17BD\u17C6\u17C9-\u17D3\u17DD\u180B-\u180D\u18A9'
+    ur'\u1920-\u1922\u1927\u1928\u1932\u1939-\u193B\u1A17\u1A18\u1A56'
+    ur'\u1A58-\u1A5E\u1A60\u1A62\u1A65-\u1A6C\u1A73-\u1A7C\u1A7F\u1B00-\u1B03'
+    ur'\u1B34\u1B36-\u1B3A\u1B3C\u1B42\u1B6B-\u1B73\u1B80\u1B81\u1BA2-\u1BA5'
+    ur'\u1BA8\u1BA9\u1C2C-\u1C33\u1C36\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE0'
+    ur'\u1CE2-\u1CE8\u1CED\u1DC0-\u1DE6\u1DFD-\u1DFF\u20D0-\u20DC\u20E1'
+    ur'\u20E5-\u20F0\u2CEF-\u2CF1\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F'
+    ur'\uA67C\uA67D\uA6F0\uA6F1\uA802\uA806\uA80B\uA825\uA826\uA8C4'
+    ur'\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA951\uA980-\uA982\uA9B3\uA9B6-\uA9B9'
+    ur'\uA9BC\uAA29-\uAA2E\uAA31\uAA32\uAA35\uAA36\uAA43\uAA4C\uAAB0'
+    ur'\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uABE5\uABE8\uABED\uFB1E'
+    ur'\uFE00-\uFE0F\uFE20-\uFE26]'
+    )
+
+COMBINING_SPACING_MARK = (
+    ur'[\u0903\u093E-\u0940\u0949-\u094C\u094E\u0982\u0983\u09BE-\u09C0\u09C7'
+    ur'\u09C8\u09CB\u09CC\u09D7\u0A03\u0A3E-\u0A40\u0A83\u0ABE-\u0AC0\u0AC9'
+    ur'\u0ACB\u0ACC\u0B02\u0B03\u0B3E\u0B40\u0B47\u0B48\u0B4B\u0B4C\u0B57'
+    ur'\u0BBE\u0BBF\u0BC1\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCC\u0BD7\u0C01-\u0C03'
+    ur'\u0C41-\u0C44\u0C82\u0C83\u0CBE\u0CC0-\u0CC4\u0CC7\u0CC8\u0CCA\u0CCB'
+    ur'\u0CD5\u0CD6\u0D02\u0D03\u0D3E-\u0D40\u0D46-\u0D48\u0D4A-\u0D4C\u0D57'
+    ur'\u0D82\u0D83\u0DCF-\u0DD1\u0DD8-\u0DDF\u0DF2\u0DF3\u0F3E\u0F3F\u0F7F'
+    ur'\u102B\u102C\u1031\u1038\u103B\u103C\u1056\u1057\u1062-\u1064'
+    ur'\u1067-\u106D\u1083\u1084\u1087-\u108C\u108F\u109A-\u109C\u17B6'
+    ur'\u17BE-\u17C5\u17C7\u17C8\u1923-\u1926\u1929-\u192B\u1930\u1931'
+    ur'\u1933-\u1938\u19B0-\u19C0\u19C8\u19C9\u1A19-\u1A1B\u1A55\u1A57\u1A61'
+    ur'\u1A63\u1A64\u1A6D-\u1A72\u1B04\u1B35\u1B3B\u1B3D-\u1B41\u1B43\u1B44'
+    ur'\u1B82\u1BA1\u1BA6\u1BA7\u1BAA\u1C24-\u1C2B\u1C34\u1C35\u1CE1\u1CF2'
+    ur'\uA823\uA824\uA827\uA880\uA881\uA8B4-\uA8C3\uA952\uA953\uA983\uA9B4'
+    ur'\uA9B5\uA9BA\uA9BB\uA9BD-\uA9C0\uAA2F\uAA30\uAA33\uAA34\uAA4D\uAA7B'
+    ur'\uABE3\uABE4\uABE6\uABE7\uABE9\uABEA\uABEC]'
+    )
+
+COMBINING_MARK = ur'%s|%s' % (NON_SPACING_MARK, COMBINING_SPACING_MARK)
+
+CONNECTOR_PUNCTUATION = (
+        ur'[\u005F\u203F\u2040\u2054\uFE33\uFE34\uFE4D-\uFE4F\uFF3F]'
+        )
+
+DIGIT = (
+    ur'[\u0030-\u0039\u0660-\u0669\u06F0-\u06F9\u07C0-\u07C9\u0966-\u096F'
+    ur'\u09E6-\u09EF\u0A66-\u0A6F\u0AE6-\u0AEF\u0B66-\u0B6F\u0BE6-\u0BEF'
+    ur'\u0C66-\u0C6F\u0CE6-\u0CEF\u0D66-\u0D6F\u0E50-\u0E59\u0ED0-\u0ED9'
+    ur'\u0F20-\u0F29\u1040-\u1049\u1090-\u1099\u17E0-\u17E9\u1810-\u1819'
+    ur'\u1946-\u194F\u19D0-\u19DA\u1A80-\u1A89\u1A90-\u1A99\u1B50-\u1B59'
+    ur'\u1BB0-\u1BB9\u1C40-\u1C49\u1C50-\u1C59\uA620-\uA629\uA8D0-\uA8D9'
+    ur'\uA900-\uA909\uA9D0-\uA9D9\uAA50-\uAA59\uABF0-\uABF9\uFF10-\uFF19]'
+    )
diff --git a/tools/slimit/visitors/__init__.py b/tools/slimit/visitors/__init__.py
new file mode 100644 (file)
index 0000000..8b13789
--- /dev/null
@@ -0,0 +1 @@
+
diff --git a/tools/slimit/visitors/ecmavisitor.py b/tools/slimit/visitors/ecmavisitor.py
new file mode 100644 (file)
index 0000000..1c98598
--- /dev/null
@@ -0,0 +1,397 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+from slimit import ast
+
+
+class ECMAVisitor(object):
+
+    def __init__(self):
+        self.indent_level = 0
+
+    def _make_indent(self):
+        return ' ' * self.indent_level
+
+    def visit(self, node):
+        method = 'visit_%s' % node.__class__.__name__
+        return getattr(self, method, self.generic_visit)(node)
+
+    def generic_visit(self, node):
+        return 'GEN: %r' % node
+
+    def visit_Program(self, node):
+        return '\n'.join(self.visit(child) for child in node)
+
+    def visit_Block(self, node):
+        s = '{\n'
+        self.indent_level += 2
+        s += '\n'.join(
+            self._make_indent() + self.visit(child) for child in node)
+        self.indent_level -= 2
+        s += '\n' + self._make_indent() + '}'
+        return s
+
+    def visit_VarStatement(self, node):
+        s = 'var %s;' % ', '.join(self.visit(child) for child in node)
+        return s
+
+    def visit_VarDecl(self, node):
+        output = []
+        output.append(self.visit(node.identifier))
+        if node.initializer is not None:
+            output.append(' = %s' % self.visit(node.initializer))
+        return ''.join(output)
+
+    def visit_Identifier(self, node):
+        return node.value
+
+    def visit_Assign(self, node):
+        if node.op == ':':
+            template = '%s%s %s'
+        else:
+            template = '%s %s %s'
+        if getattr(node, '_parens', False):
+            template = '(%s)' % template
+        return template % (
+            self.visit(node.left), node.op, self.visit(node.right))
+
+    def visit_GetPropAssign(self, node):
+        template = 'get %s() {\n%s\n%s}'
+        if getattr(node, '_parens', False):
+            template = '(%s)' % template
+        self.indent_level += 2
+        body = '\n'.join(
+            (self._make_indent() + self.visit(el))
+            for el in node.elements
+            )
+        self.indent_level -= 2
+        tail = self._make_indent()
+        return template % (self.visit(node.prop_name), body, tail)
+
+    def visit_SetPropAssign(self, node):
+        template = 'set %s(%s) {\n%s\n%s}'
+        if getattr(node, '_parens', False):
+            template = '(%s)' % template
+        if len(node.parameters) > 1:
+            raise SyntaxError(
+                'Setter functions must have one argument: %s' % node)
+        params = ','.join(self.visit(param) for param in node.parameters)
+        self.indent_level += 2
+        body = '\n'.join(
+            (self._make_indent() + self.visit(el))
+            for el in node.elements
+            )
+        self.indent_level -= 2
+        tail = self._make_indent()
+        return template % (self.visit(node.prop_name), params, body, tail)
+
+    def visit_Number(self, node):
+        return node.value
+
+    def visit_Comma(self, node):
+        s = '%s, %s' % (self.visit(node.left), self.visit(node.right))
+        if getattr(node, '_parens', False):
+            s = '(' + s + ')'
+        return s
+
+    def visit_EmptyStatement(self, node):
+        return node.value
+
+    def visit_If(self, node):
+        s = 'if ('
+        if node.predicate is not None:
+            s += self.visit(node.predicate)
+        s += ') '
+        s += self.visit(node.consequent)
+        if node.alternative is not None:
+            s += ' else '
+            s += self.visit(node.alternative)
+        return s
+
+    def visit_Boolean(self, node):
+        return node.value
+
+    def visit_For(self, node):
+        s = 'for ('
+        if node.init is not None:
+            s += self.visit(node.init)
+        if node.init is None:
+            s += ' ; '
+        elif isinstance(node.init, (ast.Assign, ast.Comma, ast.FunctionCall,
+                                    ast.UnaryOp, ast.Identifier, ast.BinOp,
+                                    ast.Conditional, ast.Regex, ast.NewExpr)):
+            s += '; '
+        else:
+            s += ' '
+        if node.cond is not None:
+            s += self.visit(node.cond)
+        s += '; '
+        if node.count is not None:
+            s += self.visit(node.count)
+        s += ') ' + self.visit(node.statement)
+        return s
+
+    def visit_ForIn(self, node):
+        if isinstance(node.item, ast.VarDecl):
+            template = 'for (var %s in %s) '
+        else:
+            template = 'for (%s in %s) '
+        s = template % (self.visit(node.item), self.visit(node.iterable))
+        s += self.visit(node.statement)
+        return s
+
+    def visit_BinOp(self, node):
+        if getattr(node, '_parens', False):
+            template = '(%s %s %s)'
+        else:
+            template = '%s %s %s'
+        return template % (
+            self.visit(node.left), node.op, self.visit(node.right))
+
+    def visit_UnaryOp(self, node):
+        s = self.visit(node.value)
+        if node.postfix:
+            s += node.op
+        elif node.op in ('delete', 'void', 'typeof'):
+            s = '%s %s' % (node.op, s)
+        else:
+            s = '%s%s' % (node.op, s)
+        if getattr(node, '_parens', False):
+            s = '(%s)' % s
+        return s
+
+    def visit_ExprStatement(self, node):
+        return '%s;' % self.visit(node.expr)
+
+    def visit_DoWhile(self, node):
+        s = 'do '
+        s += self.visit(node.statement)
+        s += ' while (%s);' % self.visit(node.predicate)
+        return s
+
+    def visit_While(self, node):
+        s = 'while (%s) ' % self.visit(node.predicate)
+        s += self.visit(node.statement)
+        return s
+
+    def visit_Null(self, node):
+        return 'null'
+
+    def visit_String(self, node):
+        return node.value
+
+    def visit_Continue(self, node):
+        if node.identifier is not None:
+            s = 'continue %s;' % self.visit_Identifier(node.identifier)
+        else:
+            s = 'continue;'
+        return s
+
+    def visit_Break(self, node):
+        if node.identifier is not None:
+            s = 'break %s;' % self.visit_Identifier(node.identifier)
+        else:
+            s = 'break;'
+        return s
+
+    def visit_Return(self, node):
+        if node.expr is None:
+            return 'return;'
+        else:
+            return 'return %s;' % self.visit(node.expr)
+
+    def visit_With(self, node):
+        s = 'with (%s) ' % self.visit(node.expr)
+        s += self.visit(node.statement)
+        return s
+
+    def visit_Label(self, node):
+        s = '%s: %s' % (
+            self.visit(node.identifier), self.visit(node.statement))
+        return s
+
+    def visit_Switch(self, node):
+        s = 'switch (%s) {\n' % self.visit(node.expr)
+        self.indent_level += 2
+        for case in node.cases:
+            s += self._make_indent() + self.visit_Case(case)
+        if node.default is not None:
+            s += self.visit_Default(node.default)
+        self.indent_level -= 2
+        s += self._make_indent() + '}'
+        return s
+
+    def visit_Case(self, node):
+        s = 'case %s:\n' % self.visit(node.expr)
+        self.indent_level += 2
+        elements = '\n'.join(self._make_indent() + self.visit(element)
+                             for element in node.elements)
+        if elements:
+            s += elements + '\n'
+        self.indent_level -= 2
+        return s
+
+    def visit_Default(self, node):
+        s = self._make_indent() + 'default:\n'
+        self.indent_level += 2
+        s += '\n'.join(self._make_indent() + self.visit(element)
+                       for element in node.elements)
+        if node.elements is not None:
+            s += '\n'
+        self.indent_level -= 2
+        return s
+
+    def visit_Throw(self, node):
+        s = 'throw %s;' % self.visit(node.expr)
+        return s
+
+    def visit_Debugger(self, node):
+        return '%s;' % node.value
+
+    def visit_Try(self, node):
+        s = 'try '
+        s += self.visit(node.statements)
+        if node.catch is not None:
+            s += ' ' + self.visit(node.catch)
+        if node.fin is not None:
+            s += ' ' + self.visit(node.fin)
+        return s
+
+    def visit_Catch(self, node):
+        s = 'catch (%s) %s' % (
+            self.visit(node.identifier), self.visit(node.elements))
+        return s
+
+    def visit_Finally(self, node):
+        s = 'finally %s' % self.visit(node.elements)
+        return s
+
+    def visit_FuncDecl(self, node):
+        self.indent_level += 2
+        elements = '\n'.join(self._make_indent() + self.visit(element)
+                             for element in node.elements)
+        self.indent_level -= 2
+
+        s = 'function %s(%s) {\n%s' % (
+            self.visit(node.identifier),
+            ', '.join(self.visit(param) for param in node.parameters),
+            elements,
+            )
+        s += '\n' + self._make_indent() + '}'
+        return s
+
+    def visit_FuncExpr(self, node):
+        self.indent_level += 2
+        elements = '\n'.join(self._make_indent() + self.visit(element)
+                             for element in node.elements)
+        self.indent_level -= 2
+
+        ident = node.identifier
+        ident = '' if ident is None else ' %s' % self.visit(ident)
+
+        header = 'function%s(%s)'
+        if getattr(node, '_parens', False):
+            header = '(' + header
+        s = (header + ' {\n%s') % (
+            ident,
+            ', '.join(self.visit(param) for param in node.parameters),
+            elements,
+            )
+        s += '\n' + self._make_indent() + '}'
+        if getattr(node, '_parens', False):
+            s += ')'
+        return s
+
+    def visit_Conditional(self, node):
+        if getattr(node, '_parens', False):
+            template = '(%s ? %s : %s)'
+        else:
+            template = '%s ? %s : %s'
+
+        s = template % (
+            self.visit(node.predicate),
+            self.visit(node.consequent), self.visit(node.alternative))
+        return s
+
+    def visit_Regex(self, node):
+        if getattr(node, '_parens', False):
+            return '(%s)' % node.value
+        else:
+            return node.value
+
+    def visit_NewExpr(self, node):
+        s = 'new %s(%s)' % (
+            self.visit(node.identifier),
+            ', '.join(self.visit(arg) for arg in node.args)
+            )
+        return s
+
+    def visit_DotAccessor(self, node):
+        if getattr(node, '_parens', False):
+            template = '(%s.%s)'
+        else:
+            template = '%s.%s'
+        s = template % (self.visit(node.node), self.visit(node.identifier))
+        return s
+
+    def visit_BracketAccessor(self, node):
+        s = '%s[%s]' % (self.visit(node.node), self.visit(node.expr))
+        return s
+
+    def visit_FunctionCall(self, node):
+        s = '%s(%s)' % (self.visit(node.identifier),
+                        ', '.join(self.visit(arg) for arg in node.args))
+        if getattr(node, '_parens', False):
+            s = '(' + s + ')'
+        return s
+
+    def visit_Object(self, node):
+        s = '{\n'
+        self.indent_level += 2
+        s += ',\n'.join(self._make_indent() + self.visit(prop)
+                        for prop in node.properties)
+        self.indent_level -= 2
+        if node.properties:
+            s += '\n'
+        s += self._make_indent() + '}'
+        return s
+
+    def visit_Array(self, node):
+        s = '['
+        length = len(node.items) - 1
+        for index, item in enumerate(node.items):
+            if isinstance(item, ast.Elision):
+                s += ','
+            elif index != length:
+                s += self.visit(item) + ','
+            else:
+                s += self.visit(item)
+        s += ']'
+        return s
+
+    def visit_This(self, node):
+        return 'this'
+
diff --git a/tools/slimit/visitors/minvisitor.py b/tools/slimit/visitors/minvisitor.py
new file mode 100644 (file)
index 0000000..1c6fd15
--- /dev/null
@@ -0,0 +1,437 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+import re
+
+from slimit import ast
+from slimit.lexer import Lexer
+
+_HAS_ID_MATCH = re.compile('^%s$' % Lexer.identifier).match
+
+def _is_identifier(value):
+    return _HAS_ID_MATCH(value) and value not in Lexer.keywords_dict
+
+
+class ECMAMinifier(object):
+
+    def __init__(self):
+        self.in_block = 0
+        self.ifelse_stack = []
+
+    def visit(self, node):
+        method = 'visit_%s' % node.__class__.__name__
+        return getattr(self, method, self.generic_visit)(node)
+
+    def generic_visit(self, node):
+        return 'GEN: %r' % node
+
+    def visit_Program(self, node):
+        return ''.join(self.visit(child) for child in node)
+
+    def visit_Block(self, node):
+        children = [self.visit(child) for child in node]
+        if len(children) == 1:
+            return children[0]
+        else:
+            return '{%s}' % ''.join(children)
+
+    def visit_VarStatement(self, node):
+        s = 'var %s;' % ','.join(self.visit(child) for child in node)
+        return s
+
+    def visit_VarDecl(self, node):
+        output = []
+        output.append(self.visit(node.identifier))
+        if node.initializer is not None:
+            output.append('=%s' % self.visit(node.initializer))
+        return ''.join(output)
+
+    def visit_Identifier(self, node):
+        return node.value
+
+    def visit_Assign(self, node):
+        template = '%s%s%s'
+        if getattr(node, '_parens', False):
+            template = '(%s)' % template
+        return template % (
+            self.visit(node.left), node.op, self.visit(node.right))
+
+    def visit_GetPropAssign(self, node):
+        template = 'get %s(){%s}'
+        if getattr(node, '_parens', False):
+            template = '(%s)' % template
+        return template % (
+            self.visit(node.prop_name),
+            ''.join(self.visit(element) for element in node.elements)
+            )
+
+    def visit_SetPropAssign(self, node):
+        template = 'set %s(%s){%s}'
+        if getattr(node, '_parens', False):
+            template = '(%s)' % template
+        if len(node.parameters) > 1:
+            raise SyntaxError(
+                'Setter functions must have one argument: %s' % node)
+        return template % (
+            self.visit(node.prop_name),
+            ''.join(self.visit(param) for param in node.parameters),
+            ''.join(self.visit(element) for element in node.elements)
+            )
+
+    def visit_Number(self, node):
+        return node.value
+
+    def visit_Comma(self, node):
+        template = '%s,%s'
+        if getattr(node, '_parens', False):
+            template = '(%s)' % template
+        return template % (self.visit(node.left), self.visit(node.right))
+
+    def visit_EmptyStatement(self, node):
+        return node.value
+
+    def visit_If(self, node):
+        has_alternative = node.alternative is not None
+
+        def _is_singleline_block(n):
+            return isinstance(n, ast.Block) and (len(n.children()) == 1)
+
+        s = 'if('
+        if node.predicate is not None:
+            s += self.visit(node.predicate)
+        s += ')'
+
+        # if we are an 'if..else' statement and 'if' part contains only
+        # one statement
+        if has_alternative and _is_singleline_block(node.consequent):
+            self.ifelse_stack.append({'if_in_ifelse': False})
+            consequent = self.visit(node.consequent)
+            record = self.ifelse_stack.pop()
+            if record['if_in_ifelse']:
+                s += '{%s}' % consequent
+            else:
+                s += consequent
+        elif has_alternative:
+            # we are an 'if..else' statement and 'if' part contains
+            # myltiple statements
+            s += self.visit(node.consequent)
+        else:
+            # 'if' without alternative - mark it so that an enclosing
+            # 'if..else' can act on it and add braces around 'if' part
+            if self.ifelse_stack:
+                self.ifelse_stack[-1]['if_in_ifelse'] = True
+            s += self.visit(node.consequent)
+
+        if has_alternative:
+            alternative = self.visit(node.alternative)
+            if alternative.startswith(('(', '{')):
+                s += 'else%s' % alternative
+            else:
+                s += 'else %s' % alternative
+        return s
+
+    def visit_Boolean(self, node):
+        return node.value
+
+    def visit_For(self, node):
+        s = 'for('
+        if node.init is not None:
+            s += self.visit(node.init)
+        if node.init is None:
+            s += ';'
+        elif isinstance(node.init, (ast.Assign, ast.Comma, ast.Conditional,
+                                    ast.FunctionCall, ast.UnaryOp,
+                                    ast.Identifier)):
+            s += ';'
+        else:
+            s += ''
+        if node.cond is not None:
+            s += self.visit(node.cond)
+        s += ';'
+        if node.count is not None:
+            s += self.visit(node.count)
+        s += ')' + self.visit(node.statement)
+        return s
+
+    def visit_ForIn(self, node):
+        if isinstance(node.item, ast.VarDecl):
+            template = 'for(var %s in %s)'
+        else:
+            template = 'for(%s in %s)'
+        s = template % (self.visit(node.item), self.visit(node.iterable))
+        s += self.visit(node.statement)
+        return s
+
+    def visit_BinOp(self, node):
+        if node.op in ('instanceof', 'in'):
+            template = '%s %s %s'
+        elif (node.op == '+' and
+              isinstance(node.right, ast.UnaryOp) and
+              node.right.op == '++' and not node.right.postfix
+              ):
+            # make a space between + and ++
+            # https://github.com/rspivak/slimit/issues/26
+            template = '%s%s %s'
+        else:
+            template = '%s%s%s'
+        if getattr(node, '_parens', False):
+            template = '(%s)' % template
+        return template % (
+            self.visit(node.left), node.op, self.visit(node.right))
+
+    def visit_UnaryOp(self, node):
+        s = self.visit(node.value)
+        if node.postfix:
+            s += node.op
+        elif node.op in ('delete', 'void', 'typeof'):
+            s = '%s %s' % (node.op, s)
+        else:
+            s = '%s%s' % (node.op, s)
+        if getattr(node, '_parens', False):
+            s = '(%s)' % s
+        return s
+
+    def visit_ExprStatement(self, node):
+        return '%s;' % self.visit(node.expr)
+
+    def visit_DoWhile(self, node):
+        statement = self.visit(node.statement)
+        if statement.startswith(('{', '(')):
+            s = 'do%s' % statement
+        else:
+            s = 'do %s' % statement
+        s += 'while(%s);' % self.visit(node.predicate)
+        return s
+
+    def visit_While(self, node):
+        s = 'while(%s)' % self.visit(node.predicate)
+        s += self.visit(node.statement)
+        return s
+
+    def visit_Null(self, node):
+        return 'null'
+
+    def visit_String(self, node):
+        return node.value
+
+    def visit_Continue(self, node):
+        if node.identifier is not None:
+            s = 'continue %s;' % self.visit_Identifier(node.identifier)
+        else:
+            s = 'continue;'
+        return s
+
+    def visit_Break(self, node):
+        if node.identifier is not None:
+            s = 'break %s;' % self.visit_Identifier(node.identifier)
+        else:
+            s = 'break;'
+        return s
+
+    def visit_Return(self, node):
+        if node.expr is None:
+            return 'return;'
+
+        expr_text = self.visit(node.expr)
+        if expr_text.startswith(('(', '{')):
+            return 'return%s;' % expr_text
+        else:
+            return 'return %s;' % expr_text
+
+    def visit_With(self, node):
+        s = 'with(%s)' % self.visit(node.expr)
+        s += self.visit(node.statement)
+        return s
+
+    def visit_Label(self, node):
+        s = '%s:%s' % (
+            self.visit(node.identifier), self.visit(node.statement))
+        return s
+
+    def visit_Switch(self, node):
+        s = 'switch(%s){' % self.visit(node.expr)
+        for case in node.cases:
+            s += self.visit_Case(case)
+        if node.default is not None:
+            s += self.visit_Default(node.default)
+        s += '}'
+        return s
+
+    def visit_Case(self, node):
+        s = 'case %s:' % self.visit(node.expr)
+        elements = ''.join(self.visit(element) for element in node.elements)
+        if elements:
+            s += elements
+        return s
+
+    def visit_Default(self, node):
+        s = 'default:'
+        s += ''.join(self.visit(element) for element in node.elements)
+        if node.elements is not None:
+            s += ''
+        return s
+
+    def visit_Throw(self, node):
+        s = 'throw %s;' % self.visit(node.expr)
+        return s
+
+    def visit_Debugger(self, node):
+        return '%s;' % node.value
+
+    def visit_Try(self, node):
+        result = self.visit(node.statements)
+        if result.startswith('{'):
+            s = 'try%s' % result
+        else:
+            s = 'try{%s}' % result
+        if node.catch is not None:
+            s += self.visit(node.catch)
+        if node.fin is not None:
+            s += self.visit(node.fin)
+        return s
+
+    def visit_Catch(self, node):
+        ident = self.visit(node.identifier)
+        result = self.visit(node.elements)
+        if result.startswith('{'):
+            s = 'catch(%s)%s' % (ident, result)
+        else:
+            s = 'catch(%s){%s}' % (ident, result)
+        return s
+
+    def visit_Finally(self, node):
+        result = self.visit(node.elements)
+        if result.startswith('{'):
+            s = 'finally%s' % result
+        else:
+            s = 'finally{%s}' % result
+        return s
+
+    def visit_FuncDecl(self, node):
+        elements = ''.join(self.visit(element) for element in node.elements)
+        s = 'function %s(%s){%s' % (
+            self.visit(node.identifier),
+            ','.join(self.visit(param) for param in node.parameters),
+            elements,
+            )
+        s += '}'
+        return s
+
+    def visit_FuncExpr(self, node):
+        elements = ''.join(self.visit(element) for element in node.elements)
+
+        ident = node.identifier
+        ident = '' if ident is None else ' %s' % self.visit(ident)
+
+        header = 'function%s(%s)'
+        if getattr(node, '_parens', False):
+            header = '(' + header
+        s = (header + '{%s') % (
+            ident,
+            ','.join(self.visit(param) for param in node.parameters),
+            elements,
+            )
+        s += '}'
+        if getattr(node, '_parens', False):
+            s += ')'
+        return s
+
+    def visit_Conditional(self, node):
+        if getattr(node, '_parens', False):
+            template = '(%s?%s:%s)'
+        else:
+            template = '%s?%s:%s'
+
+        s = template % (
+            self.visit(node.predicate),
+            self.visit(node.consequent), self.visit(node.alternative))
+        return s
+
+    def visit_Regex(self, node):
+        if getattr(node, '_parens', False):
+            return '(%s)' % node.value
+        else:
+            return node.value
+
+    def visit_NewExpr(self, node):
+        s = 'new %s(%s)' % (
+            self.visit(node.identifier),
+            ','.join(self.visit(arg) for arg in node.args)
+            )
+        return s
+
+    def visit_DotAccessor(self, node):
+        if getattr(node, '_parens', False):
+            template = '(%s.%s)'
+        else:
+            template = '%s.%s'
+        s = template % (self.visit(node.node), self.visit(node.identifier))
+        return s
+
+    def visit_BracketAccessor(self, node):
+        if isinstance(node.expr, ast.String):
+            value = node.expr.value
+            # remove single or double quotes around the value, but not both
+            if value.startswith("'"):
+                value = value.strip("'")
+            elif value.startswith('"'):
+                value = value.strip('"')
+            if _is_identifier(value):
+                s = '%s.%s' % (self.visit(node.node), value)
+                return s
+
+        s = '%s[%s]' % (self.visit(node.node), self.visit(node.expr))
+        return s
+
+    def visit_FunctionCall(self, node):
+        template = '%s(%s)'
+        if getattr(node, '_parens', False):
+            template = '(%s)' % template
+
+        s = template % (self.visit(node.identifier),
+                        ','.join(self.visit(arg) for arg in node.args))
+        return s
+
+    def visit_Object(self, node):
+        s = '{%s}' % ','.join(self.visit(prop) for prop in node.properties)
+        return s
+
+    def visit_Array(self, node):
+        s = '['
+        length = len(node.items) - 1
+        for index, item in enumerate(node.items):
+            if isinstance(item, ast.Elision):
+                s += ','
+            elif index != length:
+                s += self.visit(item) + ','
+            else:
+                s += self.visit(item)
+        s += ']'
+        return s
+
+    def visit_This(self, node):
+        return 'this'
+
diff --git a/tools/slimit/visitors/nodevisitor.py b/tools/slimit/visitors/nodevisitor.py
new file mode 100644 (file)
index 0000000..d3559d7
--- /dev/null
@@ -0,0 +1,85 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+
+class ASTVisitor(object):
+    """Base class for custom AST node visitors.
+
+    Example:
+
+    >>> from slimit.parser import Parser
+    >>> from slimit.visitors.nodevisitor import ASTVisitor
+    >>>
+    >>> text = '''
+    ... var x = {
+    ...     "key1": "value1",
+    ...     "key2": "value2"
+    ... };
+    ... '''
+    >>>
+    >>> class MyVisitor(ASTVisitor):
+    ...     def visit_Object(self, node):
+    ...         '''Visit object literal.'''
+    ...         for prop in node:
+    ...             left, right = prop.left, prop.right
+    ...             print 'Property value: %s' % right.value
+    ...             # visit all children in turn
+    ...             self.visit(prop)
+    ...
+    >>>
+    >>> parser = Parser()
+    >>> tree = parser.parse(text)
+    >>> visitor = MyVisitor()
+    >>> visitor.visit(tree)
+    Property value: "value1"
+    Property value: "value2"
+
+    """
+
+    def visit(self, node):
+        method = 'visit_%s' % node.__class__.__name__
+        return getattr(self, method, self.generic_visit)(node)
+
+    def generic_visit(self, node):
+        for child in node:
+            self.visit(child)
+
+
+class NodeVisitor(object):
+    """Simple node visitor."""
+
+    def visit(self, node):
+        """Returns a generator that walks all children recursively."""
+        for child in node:
+            yield child
+            for subchild in self.visit(child):
+                yield subchild
+
+
+def visit(node):
+    visitor = NodeVisitor()
+    for child in visitor.visit(node):
+        yield child
diff --git a/tools/slimit/visitors/scopevisitor.py b/tools/slimit/visitors/scopevisitor.py
new file mode 100644 (file)
index 0000000..10e55cb
--- /dev/null
@@ -0,0 +1,199 @@
+###############################################################################
+#
+# Copyright (c) 2011 Ruslan Spivak
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+###############################################################################
+
+__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
+
+from slimit import ast
+from slimit.scope import VarSymbol, FuncSymbol, LocalScope, SymbolTable
+
+
+class Visitor(object):
+    def visit(self, node):
+        method = 'visit_%s' % node.__class__.__name__
+        return getattr(self, method, self.generic_visit)(node)
+
+    def generic_visit(self, node):
+        if node is None:
+            return
+        if isinstance(node, list):
+            for child in node:
+                self.visit(child)
+        else:
+            for child in node.children():
+                self.visit(child)
+
+
+class ScopeTreeVisitor(Visitor):
+    """Builds scope tree."""
+
+    def __init__(self, sym_table):
+        self.sym_table = sym_table
+        self.current_scope = sym_table.globals
+
+    def visit_VarDecl(self, node):
+        ident = node.identifier
+        symbol = VarSymbol(name=ident.value)
+        if symbol not in self.current_scope:
+            self.current_scope.define(symbol)
+        ident.scope = self.current_scope
+        self.visit(node.initializer)
+
+    def visit_Identifier(self, node):
+        node.scope = self.current_scope
+
+    def visit_FuncDecl(self, node):
+        if node.identifier is not None:
+            name = node.identifier.value
+            self.visit_Identifier(node.identifier)
+        else:
+            name = None
+
+        func_sym = FuncSymbol(
+            name=name, enclosing_scope=self.current_scope)
+        if name is not None:
+            self.current_scope.define(func_sym)
+            node.scope = self.current_scope
+
+        # push function scope
+        self.current_scope = func_sym
+        for ident in node.parameters:
+            self.current_scope.define(VarSymbol(ident.value))
+            ident.scope = self.current_scope
+
+        for element in node.elements:
+            self.visit(element)
+
+        # pop the function scope
+        self.current_scope = self.current_scope.get_enclosing_scope()
+
+    # alias
+    visit_FuncExpr = visit_FuncDecl
+
+    def visit_Catch(self, node):
+        # The catch identifier actually lives in a new scope, but additional
+        # variables defined in the catch statement belong to the outer scope.
+        # For the sake of simplicity we just reuse any existing variables
+        # from the outer scope if they exist.
+        ident = node.identifier
+        existing_symbol = self.current_scope.symbols.get(ident.value)
+        if existing_symbol is None:
+            self.current_scope.define(VarSymbol(ident.value))
+        ident.scope = self.current_scope
+
+        for element in node.elements:
+            self.visit(element)
+
+class RefVisitor(Visitor):
+    """Fill 'ref' attribute in scopes."""
+
+    def visit_Identifier(self, node):
+        if self._is_id_in_expr(node):
+            self._fill_scope_refs(node.value, node.scope)
+
+    @staticmethod
+    def _is_id_in_expr(node):
+        """Return True if Identifier node is part of an expression."""
+        return (
+            getattr(node, 'scope', None) is not None and
+            getattr(node, '_in_expression', False)
+            )
+
+    @staticmethod
+    def _fill_scope_refs(name, scope):
+        """Put referenced name in 'ref' dictionary of a scope.
+
+        Walks up the scope tree and adds the name to 'ref' of every scope
+        up in the tree until a scope that defines referenced name is reached.
+        """
+        symbol = scope.resolve(name)
+        if symbol is None:
+            return
+
+        orig_scope = symbol.scope
+        scope.refs[name] = orig_scope
+        while scope is not orig_scope:
+            scope = scope.get_enclosing_scope()
+            scope.refs[name] = orig_scope
+
+
+def mangle_scope_tree(root, toplevel):
+    """Walk over a scope tree and mangle symbol names.
+
+    Args:
+        toplevel: Defines if global scope should be mangled or not.
+    """
+    def mangle(scope):
+        # don't mangle global scope if not specified otherwise
+        if scope.get_enclosing_scope() is None and not toplevel:
+            return
+        for name in scope.symbols:
+            mangled_name = scope.get_next_mangled_name()
+            scope.mangled[name] = mangled_name
+            scope.rev_mangled[mangled_name] = name
+
+    def visit(node):
+        mangle(node)
+        for child in node.children:
+            visit(child)
+
+    visit(root)
+
+
+def fill_scope_references(tree):
+    """Fill 'ref' scope attribute with values."""
+    visitor = RefVisitor()
+    visitor.visit(tree)
+
+
+class NameManglerVisitor(Visitor):
+    """Mangles names.
+
+    Walks over a parsed tree and changes ID values to corresponding
+    mangled names.
+    """
+
+    @staticmethod
+    def _is_mangle_candidate(id_node):
+        """Return True if Identifier node is a candidate for mangling.
+
+        There are 5 cases when Identifier is a mangling candidate:
+        1. Function declaration identifier
+        2. Function expression identifier
+        3. Function declaration/expression parameter
+        4. Variable declaration identifier
+        5. Identifier is a part of an expression (primary_expr_no_brace rule)
+        """
+        return getattr(id_node, '_mangle_candidate', False)
+
+    def visit_Identifier(self, node):
+        """Mangle names."""
+        if not self._is_mangle_candidate(node):
+            return
+        name = node.value
+        symbol = node.scope.resolve(node.value)
+        if symbol is None:
+            return
+        mangled = symbol.scope.mangled.get(name)
+        if mangled is not None:
+            node.value = mangled
diff --git a/tools/slimit/yacctab.py b/tools/slimit/yacctab.py
new file mode 100644 (file)
index 0000000..9106390
--- /dev/null
@@ -0,0 +1,330 @@
+
+# yacctab.py
+# This file is automatically generated. Do not edit.
+_tabversion = '3.2'
+
+_lr_method = 'LALR'
+
+_lr_signature = ':\xbe\xd7 \xc4\xd1\xd4\x7f\xef\xac_JV{\x19\xa8'
+    
+_lr_action_items = {'DO':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[68,-22,-15,68,-23,-21,-13,-19,-17,-20,-16,-11,68,-9,-10,-8,-24,-12,-6,68,-244,-18,-14,-7,-292,-291,-2,68,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,68,68,-290,-288,68,68,-273,68,68,-251,-274,-247,68,68,68,68,68,68,-293,68,-254,-289,-275,-249,-250,-248,68,-294,68,-255,68,68,68,68,-256,-252,-276,-253,]),'OREQUAL':([3,8,12,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,82,85,87,101,102,103,105,107,108,109,110,142,159,169,172,173,216,218,221,222,227,229,239,301,303,305,315,319,338,345,346,350,353,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-30,-95,-78,-31,-32,-33,-35,-34,-37,-36,-43,-44,206,-38,-29,-92,-39,-93,-40,-67,-66,-76,-41,206,-85,-84,-79,-76,-47,-46,-77,-76,-81,-55,-80,206,-87,-88,-74,-75,-45,-70,-83,-56,-69,-86,-89,-73,-48,-82,-57,-68,206,-295,-296,-297,-297,-298,-298,]),'DIVEQUAL':([3,8,12,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,82,85,87,101,102,103,105,107,108,109,110,142,159,169,172,173,216,218,221,222,227,229,239,301,303,305,315,319,338,345,346,350,353,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-30,-95,-78,-31,-32,-33,-35,-34,-37,-36,-43,-44,193,-38,-29,-92,-39,-93,-40,-67,-66,-76,-41,193,-85,-84,-79,-76,-47,-46,-77,-76,-81,-55,-80,193,-87,-88,-74,-75,-45,-70,-83,-56,-69,-86,-89,-73,-48,-82,-57,-68,193,-295,-296,-297,-297,-298,-298,]),'RETURN':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[26,-22,-15,26,-23,-21,-13,-19,-17,-20,-16,-11,26,-9,-10,-8,-24,-12,-6,26,-244,-18,-14,-7,-292,-291,-2,26,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,26,26,-290,-288,26,26,-273,26,26,-251,-274,-247,26,26,26,26,26,26,-293,26,-254,-289,-275,-249,-250,-248,26,-294,26,-255,26,26,26,26,-256,-252,-276,-253,]),'RSHIFTEQUAL':([3,8,12,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,82,85,87,101,102,103,105,107,108,109,110,142,159,169,172,173,216,218,221,222,227,229,239,301,303,305,315,319,338,345,346,350,353,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-30,-95,-78,-31,-32,-33,-35,-34,-37,-36,-43,-44,194,-38,-29,-92,-39,-93,-40,-67,-66,-76,-41,194,-85,-84,-79,-76,-47,-46,-77,-76,-81,-55,-80,194,-87,-88,-74,-75,-45,-70,-83,-56,-69,-86,-89,-73,-48,-82,-57,-68,194,-295,-296,-297,-297,-298,-298,]),'DEFAULT':([2,5,7,13,19,21,28,29,31,36,43,45,50,58,59,62,65,67,72,75,77,111,114,115,116,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,414,416,435,471,472,473,475,496,497,498,499,508,514,516,518,519,522,523,524,529,532,534,541,542,543,544,547,],[-22,-15,-5,-23,-21,-13,-19,-17,-20,-16,-11,-9,-10,-8,-4,-24,-12,-6,-244,-18,-14,-7,-292,-291,-2,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,-290,-288,-273,-251,-274,-1,-247,-278,521,-279,-277,-293,-254,-289,-280,-275,-249,-250,-248,-294,-255,-1,-256,-252,-281,-276,-253,]),'VOID':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[11,-22,-1,-15,11,11,11,11,-23,-21,-13,11,11,11,-19,-17,11,-20,-16,11,-11,11,-9,11,-10,-8,-24,-12,-6,11,-244,-18,-14,11,11,11,11,11,11,-53,-52,-51,-7,-292,-291,-2,11,11,11,11,11,11,-270,-269,11,-245,-246,11,11,11,11,11,11,11,-261,-262,11,11,11,11,11,-265,-266,-25,11,11,11,11,11,11,11,11,11,11,11,11,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,11,11,-1,-54,11,11,-232,-233,11,-283,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,-271,-272,11,11,11,11,11,11,11,-287,-286,-26,-263,-264,-268,-267,-284,-285,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,-290,-288,11,11,11,11,-273,11,11,11,11,11,-251,-274,-247,11,11,11,11,11,11,11,11,11,11,-293,11,11,-254,-289,-275,-249,-250,-248,11,-294,11,-255,11,11,11,11,-256,-252,-276,-253,]),'SETPROP':([104,349,],[231,231,]),'NUMBER':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,54,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,98,104,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,228,231,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,349,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[70,-22,-1,-15,70,70,70,70,-23,-21,-13,70,70,70,-19,-17,70,-20,-16,70,-11,70,-9,70,-10,70,-8,-24,-12,-6,70,-244,-18,-14,70,70,70,70,70,70,-53,-52,-51,70,70,-7,-292,-291,-2,70,70,70,70,70,70,-270,-269,70,-245,-246,70,70,70,70,70,70,70,-261,-262,70,70,70,70,70,-265,-266,-25,70,70,70,70,70,70,70,70,70,70,70,70,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,70,70,-1,-54,70,70,70,70,-232,-233,70,-283,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,-271,-272,70,70,70,70,70,70,70,-287,-286,-26,-263,-264,-268,-267,-284,-285,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,-290,-288,70,70,70,70,-273,70,70,70,70,70,-251,-274,-247,70,70,70,70,70,70,70,70,70,70,-293,70,70,-254,-289,-275,-249,-250,-248,70,-294,70,-255,70,70,70,70,-256,-252,-276,-253,]),'LBRACKET':([0,2,3,4,5,6,7,8,10,11,13,15,16,19,20,21,23,24,25,26,28,29,30,31,36,38,40,41,43,44,45,48,49,50,54,58,61,62,63,64,65,67,68,70,71,72,75,77,78,79,80,81,83,84,85,87,88,89,90,92,93,94,95,98,102,103,105,107,108,109,110,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,159,160,163,164,168,169,170,171,173,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,216,218,219,222,226,227,229,238,239,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,303,305,310,311,312,313,314,315,319,322,323,338,340,341,342,343,345,346,350,352,353,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,418,420,421,424,427,429,433,434,435,436,441,443,446,447,471,472,475,477,482,483,488,491,492,493,495,501,502,504,506,508,509,512,514,516,519,522,523,524,526,528,529,531,532,533,534,536,537,540,541,542,544,547,],[4,-22,-28,-1,-15,4,4,-72,4,4,-23,-71,-27,-21,-42,-13,4,-41,4,4,-19,-17,4,-20,-16,-30,4,158,-11,4,-9,4,168,-10,4,-8,-31,-24,-32,-33,-12,-6,4,-35,-34,-244,-18,-14,-37,-36,-43,-44,4,4,-38,-29,4,4,4,4,-53,-52,-51,4,-39,226,-40,-67,-66,238,-41,-7,-292,-291,-2,4,4,4,4,4,4,-270,-269,4,-245,-246,4,4,4,4,4,4,-85,4,-261,-262,4,-84,4,4,238,4,4,-265,-266,-25,4,4,4,4,4,4,4,4,4,4,4,4,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,4,4,-1,-47,-46,-54,238,4,-81,-55,4,-80,-232,-233,4,-283,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,-271,-272,4,4,4,4,4,4,4,-87,-88,-287,-286,-26,-263,-264,-74,-75,-268,-267,-45,-284,-285,4,4,-70,-83,-56,4,-69,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,-86,4,-89,-290,-288,-73,4,4,4,-48,-82,-57,-68,4,-273,4,4,4,4,4,-251,-274,-247,4,-295,4,4,4,4,4,4,4,4,4,-296,-293,4,4,-254,-289,-275,-249,-250,-248,-297,4,-294,4,-255,4,4,4,-298,4,-256,-252,-276,-253,]),'BXOR':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,53,60,61,63,64,70,71,73,74,76,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,132,134,135,136,140,142,144,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,290,291,292,293,294,301,303,305,315,318,319,325,326,327,328,329,330,331,332,333,334,335,336,338,345,346,350,353,357,358,359,360,361,362,363,364,365,366,368,369,370,371,372,373,374,375,377,378,382,411,413,417,424,427,429,433,448,449,450,451,452,453,454,455,456,457,458,462,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,171,-104,-31,-32,-33,-35,-34,-176,-167,-182,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,-137,-115,265,-172,-129,-96,-178,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,-162,-180,-144,399,-174,-96,-87,-88,-74,-183,-75,171,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,-177,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,265,-132,-131,-130,-179,-158,-159,-161,-160,-124,-125,-173,-86,-89,-73,-48,-82,-57,-68,-149,-146,-145,-148,-147,-175,-181,-163,-164,-166,-165,399,-96,-295,-296,-297,-297,-298,-298,]),'WHILE':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,163,164,176,177,179,180,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[52,-22,-15,52,-23,-21,-13,-19,-17,-20,-16,-11,52,-9,-10,-8,-24,-12,-6,52,-244,-18,-14,-7,-292,-291,-2,52,-270,-269,-245,-246,-261,-262,-265,-266,-25,324,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,52,52,-290,-288,52,52,-273,52,52,-251,-274,-247,52,52,52,52,52,52,-293,52,-254,-289,-275,-249,-250,-248,52,-294,52,-255,52,52,52,52,-256,-252,-276,-253,]),'COLON':([3,16,20,24,38,61,63,64,70,71,78,79,80,81,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,130,131,132,134,135,136,137,139,140,141,142,144,145,156,166,209,212,216,218,221,222,223,224,227,229,232,234,235,236,239,288,290,291,292,293,294,297,298,302,305,338,345,346,350,353,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,413,424,427,429,433,442,448,449,450,451,452,453,454,455,456,457,458,462,464,466,467,468,480,482,506,515,517,521,526,537,],[-28,-27,-42,126,-30,-31,-32,-33,-35,-34,-37,-36,-43,-44,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,-190,-196,-137,-115,-184,-172,-208,-226,-129,-202,-96,-178,-113,-109,-114,-110,342,-47,-46,-77,-76,-97,-98,-81,-55,-65,-63,352,-64,-80,-198,-162,-180,-144,-186,-174,-192,-210,-204,-88,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-185,-191,-132,-131,-130,-179,-158,-159,-161,-160,-227,-124,-125,434,-197,-209,-173,-89,-48,-82,-57,-68,-193,-149,-146,-145,-148,-147,-175,-181,-163,-164,-166,-165,-187,-96,-211,493,-199,-203,-295,-296,-205,534,536,-297,-298,]),'BNOT':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[30,-22,-1,-15,30,30,30,30,-23,-21,-13,30,30,30,-19,-17,30,-20,-16,30,-11,30,-9,30,-10,-8,-24,-12,-6,30,-244,-18,-14,30,30,30,30,30,30,-53,-52,-51,-7,-292,-291,-2,30,30,30,30,30,30,-270,-269,30,-245,-246,30,30,30,30,30,30,30,-261,-262,30,30,30,30,30,-265,-266,-25,30,30,30,30,30,30,30,30,30,30,30,30,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,30,30,-1,-54,30,30,-232,-233,30,-283,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,-271,-272,30,30,30,30,30,30,30,-287,-286,-26,-263,-264,-268,-267,-284,-285,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,-290,-288,30,30,30,30,-273,30,30,30,30,30,-251,-274,-247,30,30,30,30,30,30,30,30,30,30,-293,30,30,-254,-289,-275,-249,-250,-248,30,-294,30,-255,30,30,30,30,-256,-252,-276,-253,]),'LSHIFT':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,132,134,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,292,301,303,305,315,319,330,331,332,333,334,335,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,377,378,411,413,417,424,427,429,433,448,449,450,451,452,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,122,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,264,-115,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,264,-96,-87,-88,-74,-75,264,264,264,264,264,264,-45,-70,-83,-56,-69,-117,-118,-116,264,264,264,264,264,264,-132,-131,-130,-124,-125,-86,-89,-73,-48,-82,-57,-68,264,264,264,264,264,-96,-295,-296,-297,-297,-298,-298,]),'NEW':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,54,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,98,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[54,-22,-1,-15,98,54,98,98,-23,-21,-13,98,98,98,-19,-17,98,-20,-16,98,-11,54,-9,98,-10,98,-8,-24,-12,-6,54,-244,-18,-14,98,98,98,98,54,98,-53,-52,-51,98,-7,-292,-291,-2,98,98,98,98,98,54,-270,-269,98,-245,-246,98,98,98,98,98,98,98,-261,-262,98,98,54,54,98,-265,-266,-25,54,98,98,98,98,98,98,98,98,98,98,54,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,98,98,-1,-54,98,98,-232,-233,98,-283,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,-271,-272,98,98,98,98,98,98,98,-287,-286,-26,-263,-264,-268,-267,-284,-285,98,98,98,54,54,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,-290,-288,54,98,54,98,-273,54,54,98,98,98,-251,-274,-247,54,54,98,98,54,98,98,54,54,54,-293,98,54,-254,-289,-275,-249,-250,-248,54,-294,54,-255,54,54,54,54,-256,-252,-276,-253,]),'DIV':([3,8,12,15,16,20,24,27,35,38,41,46,49,60,61,63,64,70,71,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,134,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,248,249,281,282,283,301,303,305,315,319,338,345,346,350,353,357,358,359,377,378,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-119,151,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,251,-115,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,251,251,-122,-121,-120,-96,-87,-88,-74,-75,-45,-70,-83,-56,-69,-117,-118,-116,251,251,-86,-89,-73,-48,-82,-57,-68,-96,-295,-296,-297,-297,-298,-298,]),'NULL':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,54,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,98,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[71,-22,-1,-15,71,71,71,71,-23,-21,-13,71,71,71,-19,-17,71,-20,-16,71,-11,71,-9,71,-10,71,-8,-24,-12,-6,71,-244,-18,-14,71,71,71,71,71,71,-53,-52,-51,71,-7,-292,-291,-2,71,71,71,71,71,71,-270,-269,71,-245,-246,71,71,71,71,71,71,71,-261,-262,71,71,71,71,71,-265,-266,-25,71,71,71,71,71,71,71,71,71,71,71,71,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,71,71,-1,-54,71,71,-232,-233,71,-283,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,-271,-272,71,71,71,71,71,71,71,-287,-286,-26,-263,-264,-268,-267,-284,-285,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,-290,-288,71,71,71,71,-273,71,71,71,71,71,-251,-274,-247,71,71,71,71,71,71,71,71,71,71,-293,71,71,-254,-289,-275,-249,-250,-248,71,-294,71,-255,71,71,71,71,-256,-252,-276,-253,]),'TRUE':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,54,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,98,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[63,-22,-1,-15,63,63,63,63,-23,-21,-13,63,63,63,-19,-17,63,-20,-16,63,-11,63,-9,63,-10,63,-8,-24,-12,-6,63,-244,-18,-14,63,63,63,63,63,63,-53,-52,-51,63,-7,-292,-291,-2,63,63,63,63,63,63,-270,-269,63,-245,-246,63,63,63,63,63,63,63,-261,-262,63,63,63,63,63,-265,-266,-25,63,63,63,63,63,63,63,63,63,63,63,63,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,63,63,-1,-54,63,63,-232,-233,63,-283,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,-271,-272,63,63,63,63,63,63,63,-287,-286,-26,-263,-264,-268,-267,-284,-285,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,-290,-288,63,63,63,63,-273,63,63,63,63,63,-251,-274,-247,63,63,63,63,63,63,63,63,63,63,-293,63,63,-254,-289,-275,-249,-250,-248,63,-294,63,-255,63,63,63,63,-256,-252,-276,-253,]),'MINUS':([0,2,3,4,5,6,7,8,10,11,12,13,15,16,19,20,21,22,23,24,25,26,27,28,29,30,31,35,36,38,40,41,43,44,45,46,48,49,50,58,60,61,62,63,64,65,67,68,70,71,72,75,77,78,79,80,81,82,83,84,85,87,88,89,90,92,93,94,95,97,99,100,101,102,103,105,106,107,108,109,110,111,112,113,114,115,116,120,121,122,123,124,125,126,127,128,133,134,140,142,143,145,146,147,148,149,150,151,152,155,156,158,159,160,163,164,166,168,169,170,171,172,173,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,209,210,213,215,216,218,219,221,222,223,224,226,227,229,238,239,240,241,243,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,281,282,283,301,303,305,310,311,312,313,314,315,319,322,323,338,340,341,342,343,345,346,350,352,353,357,358,359,368,369,370,377,378,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,418,420,421,424,427,429,433,434,435,436,441,443,446,447,464,471,472,475,477,482,483,488,491,492,493,495,501,502,504,506,508,509,512,514,516,519,522,523,524,526,528,529,531,532,533,534,536,537,540,541,542,544,547,],[6,-22,-28,-1,-15,6,6,-72,6,6,-94,-23,-71,-27,-21,-42,-13,124,6,-41,6,6,-119,-19,-17,6,-20,-126,-16,-30,6,-95,-11,6,-9,-105,6,-78,-10,-8,-104,-31,-24,-32,-33,-12,-6,6,-35,-34,-244,-18,-14,-37,-36,-43,-44,-99,6,6,-38,-29,6,6,6,6,-53,-52,-51,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-7,-111,-107,-292,-291,-2,6,6,6,6,6,-108,6,-106,-123,-270,-115,274,-96,-269,-113,6,-245,-246,6,6,6,6,6,-109,6,-85,6,-261,-262,-114,6,-84,6,6,-79,-76,6,6,-265,-266,-25,6,6,6,6,6,6,6,6,6,6,6,6,-216,-221,-222,-100,-219,-217,-224,-215,-218,-220,-223,-101,-214,-225,6,-110,6,-99,-1,-47,-46,-54,-77,-76,-97,-98,6,-81,-55,6,-80,-232,-233,6,274,274,274,-127,-128,-283,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,-271,-272,6,6,6,6,6,6,6,-122,-121,-120,-96,-87,-88,-287,-286,-26,-263,-264,-74,-75,-268,-267,-45,-284,-285,6,6,-70,-83,-56,6,-69,-117,-118,-116,274,274,274,-124,-125,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,-86,6,-89,-290,-288,-73,6,6,6,-48,-82,-57,-68,6,-273,6,6,6,6,6,-96,-251,-274,-247,6,-295,6,6,6,6,6,6,6,6,6,-296,-293,6,6,-254,-289,-275,-249,-250,-248,-297,6,-294,6,-255,6,6,6,-298,6,-256,-252,-276,-253,]),'MULT':([3,8,12,15,16,20,24,27,35,38,41,46,49,60,61,63,64,70,71,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,134,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,248,249,281,282,283,301,303,305,315,319,338,345,346,350,353,357,358,359,377,378,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-119,152,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,253,-115,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,253,253,-122,-121,-120,-96,-87,-88,-74,-75,-45,-70,-83,-56,-69,-117,-118,-116,253,253,-86,-89,-73,-48,-82,-57,-68,-96,-295,-296,-297,-297,-298,-298,]),'DEBUGGER':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[14,-22,-15,14,-23,-21,-13,-19,-17,-20,-16,-11,14,-9,-10,-8,-24,-12,-6,14,-244,-18,-14,-7,-292,-291,-2,14,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,14,14,-290,-288,14,14,-273,14,14,-251,-274,-247,14,14,14,14,14,14,-293,14,-254,-289,-275,-249,-250,-248,14,-294,14,-255,14,14,14,14,-256,-252,-276,-253,]),'CASE':([2,5,7,13,19,21,28,29,31,36,43,45,50,58,59,62,65,67,72,75,77,111,114,115,116,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,414,416,435,471,472,473,475,496,498,508,514,516,518,519,520,522,523,524,529,532,534,536,541,542,543,544,545,547,],[-22,-15,-5,-23,-21,-13,-19,-17,-20,-16,-11,-9,-10,-8,-4,-24,-12,-6,-244,-18,-14,-7,-292,-291,-2,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,-290,-288,-273,-251,-274,495,-247,495,-279,-293,-254,-289,-280,-275,495,-249,-250,-248,-294,-255,-1,-1,-256,-252,-281,-276,-282,-253,]),'LE':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,74,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,132,134,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,290,292,301,303,305,315,319,326,327,328,329,330,331,332,333,334,335,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,372,373,374,375,377,378,411,413,417,424,427,429,433,448,449,450,451,452,455,456,457,458,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,190,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,258,-137,-115,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,397,-144,-96,-87,-88,-74,-75,258,258,258,258,-155,-152,-151,-154,-153,-156,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-132,-131,-130,258,258,258,258,-124,-125,-86,-89,-73,-48,-82,-57,-68,-149,-146,-145,-148,-147,258,258,258,258,-96,-295,-296,-297,-297,-298,-298,]),'RPAREN':([3,16,20,38,61,63,64,70,71,78,79,80,81,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,130,131,132,134,135,136,137,139,140,141,142,144,145,153,156,160,166,208,209,216,218,221,222,223,224,227,229,239,279,284,286,287,305,306,307,317,321,338,339,344,345,346,350,353,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,380,381,382,387,413,424,425,427,428,429,433,437,460,461,465,469,470,474,479,480,482,489,491,506,509,511,513,526,530,537,],[-28,-27,-42,-30,-31,-32,-33,-35,-34,-37,-36,-43,-44,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,-190,-196,-137,-115,-184,-172,-208,-226,-129,-202,-96,-178,-113,285,-109,305,-114,338,-110,-47,-46,-77,-76,-97,-98,-81,-55,-80,383,384,-299,388,-88,413,-90,418,419,-45,421,426,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-185,-191,-132,-131,-130,-179,-158,-159,-161,-160,-227,-124,-125,-197,-209,-173,440,-89,-48,476,-82,478,-57,-68,-300,-257,-258,492,-91,494,500,505,-203,-295,512,-1,-296,-1,531,533,-297,540,-298,]),'URSHIFT':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,132,134,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,292,301,303,305,315,319,330,331,332,333,334,335,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,377,378,411,413,417,424,427,429,433,448,449,450,451,452,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,120,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,262,-115,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,262,-96,-87,-88,-74,-75,262,262,262,262,262,262,-45,-70,-83,-56,-69,-117,-118,-116,262,262,262,262,262,262,-132,-131,-130,-124,-125,-86,-89,-73,-48,-82,-57,-68,262,262,262,262,262,-96,-295,-296,-297,-297,-298,-298,]),'SEMI':([0,1,2,3,5,7,8,12,13,14,15,16,18,19,20,21,22,24,26,27,28,29,31,34,35,36,38,41,43,44,45,46,47,49,50,51,53,55,56,58,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,147,148,155,156,159,163,164,165,166,169,172,173,176,177,178,179,196,204,209,211,213,214,216,218,221,222,223,224,227,229,239,240,241,244,245,246,247,248,249,250,270,271,280,281,282,283,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,305,310,311,312,313,314,315,318,319,320,322,323,325,326,327,328,329,330,331,332,333,334,335,336,337,338,340,341,345,346,350,353,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,380,381,382,383,386,390,391,392,404,411,413,414,416,417,418,421,422,424,427,429,433,435,436,441,442,443,445,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,466,468,471,472,475,477,480,482,483,485,486,487,490,492,500,501,502,504,506,508,510,512,514,515,516,519,522,523,524,526,528,529,531,532,533,534,536,537,540,541,542,544,547,],[72,-206,-22,-28,-15,72,-72,-94,-23,115,-71,-27,-150,-21,-42,-13,-133,-41,143,-119,-19,-17,-20,147,-126,-16,-30,-95,-11,72,-9,-105,163,-78,-10,-230,-188,-212,-200,-8,-104,-31,-24,-32,-33,-12,176,-6,72,-194,-35,-34,-244,-176,-167,-18,-182,-14,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-7,-111,-107,-292,-291,-2,240,-238,-234,-108,72,-106,-123,-157,-190,-196,-137,-270,-115,-184,-172,-208,270,-226,-129,-202,-96,-269,-178,-113,-245,-246,-1,-109,-85,-261,-262,313,-114,-84,-79,-76,-265,-266,323,-25,-100,-101,-110,340,-99,-201,-47,-46,-77,-76,-97,-98,-81,-55,-80,-232,-233,-239,-136,-135,-134,-127,-128,-283,-271,-272,-231,-122,-121,-120,-198,-162,-180,-144,-186,-174,-228,404,-192,-210,-260,-259,-96,-204,-87,-88,-287,-286,-26,-263,-264,-74,-183,-75,-195,-268,-267,-189,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,-177,-213,-45,-284,-285,-70,-83,-56,-69,-235,-242,-117,-118,-116,-142,-139,-138,-141,-140,-143,-185,-191,-132,-131,-130,-179,-158,-159,-161,-160,-227,-124,-125,-197,-209,-173,72,72,-236,443,-240,-1,-86,-89,-290,-288,-73,72,72,-207,-48,-82,-57,-68,-273,72,72,-193,-1,-241,-149,-146,-145,-148,-147,-175,-181,-163,-164,-166,-165,491,-257,-258,-187,-229,-96,-211,-199,-251,-274,-247,72,-203,-295,72,509,-237,-240,-243,72,522,72,72,72,-296,-293,-241,72,-254,-205,-289,-275,-249,-250,-248,-297,72,-294,72,-255,72,72,72,-298,72,-256,-252,-276,-253,]),'WITH':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[32,-22,-15,32,-23,-21,-13,-19,-17,-20,-16,-11,32,-9,-10,-8,-24,-12,-6,32,-244,-18,-14,-7,-292,-291,-2,32,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,32,32,-290,-288,32,32,-273,32,32,-251,-274,-247,32,32,32,32,32,32,-293,32,-254,-289,-275,-249,-250,-248,32,-294,32,-255,32,32,32,32,-256,-252,-276,-253,]),'MODEQUAL':([3,8,12,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,82,85,87,101,102,103,105,107,108,109,110,142,159,169,172,173,216,218,221,222,227,229,239,301,303,305,315,319,338,345,346,350,353,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-30,-95,-78,-31,-32,-33,-35,-34,-37,-36,-43,-44,198,-38,-29,-92,-39,-93,-40,-67,-66,-76,-41,198,-85,-84,-79,-76,-47,-46,-77,-76,-81,-55,-80,198,-87,-88,-74,-75,-45,-70,-83,-56,-69,-86,-89,-73,-48,-82,-57,-68,198,-295,-296,-297,-297,-298,-298,]),'NE':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,73,74,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,132,134,136,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,290,292,294,301,303,305,315,319,326,327,328,329,330,331,332,333,334,335,336,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,372,373,374,375,377,378,382,411,413,417,424,427,429,433,448,449,450,451,452,453,455,456,457,458,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,183,-167,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,-137,-115,267,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,-162,-144,401,-96,-87,-88,-74,-75,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,183,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-132,-131,-130,-158,-159,-161,-160,-124,-125,267,-86,-89,-73,-48,-82,-57,-68,-149,-146,-145,-148,-147,401,-163,-164,-166,-165,-96,-295,-296,-297,-297,-298,-298,]),'MULTEQUAL':([3,8,12,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,82,85,87,101,102,103,105,107,108,109,110,142,159,169,172,173,216,218,221,222,227,229,239,301,303,305,315,319,338,345,346,350,353,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-30,-95,-78,-31,-32,-33,-35,-34,-37,-36,-43,-44,200,-38,-29,-92,-39,-93,-40,-67,-66,-76,-41,200,-85,-84,-79,-76,-47,-46,-77,-76,-81,-55,-80,200,-87,-88,-74,-75,-45,-70,-83,-56,-69,-86,-89,-73,-48,-82,-57,-68,200,-295,-296,-297,-297,-298,-298,]),'EQEQ':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,73,74,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,132,134,136,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,290,292,294,301,303,305,315,319,326,327,328,329,330,331,332,333,334,335,336,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,372,373,374,375,377,378,382,411,413,417,424,427,429,433,448,449,450,451,452,453,455,456,457,458,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,182,-167,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,-137,-115,266,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,-162,-144,400,-96,-87,-88,-74,-75,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,182,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-132,-131,-130,-158,-159,-161,-160,-124,-125,266,-86,-89,-73,-48,-82,-57,-68,-149,-146,-145,-148,-147,400,-163,-164,-166,-165,-96,-295,-296,-297,-297,-298,-298,]),'SWITCH':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[57,-22,-15,57,-23,-21,-13,-19,-17,-20,-16,-11,57,-9,-10,-8,-24,-12,-6,57,-244,-18,-14,-7,-292,-291,-2,57,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,57,57,-290,-288,57,57,-273,57,57,-251,-274,-247,57,57,57,57,57,57,-293,57,-254,-289,-275,-249,-250,-248,57,-294,57,-255,57,57,57,57,-256,-252,-276,-253,]),'LSHIFTEQUAL':([3,8,12,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,82,85,87,101,102,103,105,107,108,109,110,142,159,169,172,173,216,218,221,222,227,229,239,301,303,305,315,319,338,345,346,350,353,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-30,-95,-78,-31,-32,-33,-35,-34,-37,-36,-43,-44,202,-38,-29,-92,-39,-93,-40,-67,-66,-76,-41,202,-85,-84,-79,-76,-47,-46,-77,-76,-81,-55,-80,202,-87,-88,-74,-75,-45,-70,-83,-56,-69,-86,-89,-73,-48,-82,-57,-68,202,-295,-296,-297,-297,-298,-298,]),'PLUS':([0,2,3,4,5,6,7,8,10,11,12,13,15,16,19,20,21,22,23,24,25,26,27,28,29,30,31,35,36,38,40,41,43,44,45,46,48,49,50,58,60,61,62,63,64,65,67,68,70,71,72,75,77,78,79,80,81,82,83,84,85,87,88,89,90,92,93,94,95,97,99,100,101,102,103,105,106,107,108,109,110,111,112,113,114,115,116,120,121,122,123,124,125,126,127,128,133,134,140,142,143,145,146,147,148,149,150,151,152,155,156,158,159,160,163,164,166,168,169,170,171,172,173,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,209,210,213,215,216,218,219,221,222,223,224,226,227,229,238,239,240,241,243,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,281,282,283,301,303,305,310,311,312,313,314,315,319,322,323,338,340,341,342,343,345,346,350,352,353,357,358,359,368,369,370,377,378,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,418,420,421,424,427,429,433,434,435,436,441,443,446,447,464,471,472,475,477,482,483,488,491,492,493,495,501,502,504,506,508,509,512,514,516,519,522,523,524,526,528,529,531,532,533,534,536,537,540,541,542,544,547,],[10,-22,-28,-1,-15,10,10,-72,10,10,-94,-23,-71,-27,-21,-42,-13,123,10,-41,10,10,-119,-19,-17,10,-20,-126,-16,-30,10,-95,-11,10,-9,-105,10,-78,-10,-8,-104,-31,-24,-32,-33,-12,-6,10,-35,-34,-244,-18,-14,-37,-36,-43,-44,-99,10,10,-38,-29,10,10,10,10,-53,-52,-51,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-7,-111,-107,-292,-291,-2,10,10,10,10,10,-108,10,-106,-123,-270,-115,273,-96,-269,-113,10,-245,-246,10,10,10,10,10,-109,10,-85,10,-261,-262,-114,10,-84,10,10,-79,-76,10,10,-265,-266,-25,10,10,10,10,10,10,10,10,10,10,10,10,-216,-221,-222,-100,-219,-217,-224,-215,-218,-220,-223,-101,-214,-225,10,-110,10,-99,-1,-47,-46,-54,-77,-76,-97,-98,10,-81,-55,10,-80,-232,-233,10,273,273,273,-127,-128,-283,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,-271,-272,10,10,10,10,10,10,10,-122,-121,-120,-96,-87,-88,-287,-286,-26,-263,-264,-74,-75,-268,-267,-45,-284,-285,10,10,-70,-83,-56,10,-69,-117,-118,-116,273,273,273,-124,-125,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,-86,10,-89,-290,-288,-73,10,10,10,-48,-82,-57,-68,10,-273,10,10,10,10,10,-96,-251,-274,-247,10,-295,10,10,10,10,10,10,10,10,10,-296,-293,10,10,-254,-289,-275,-249,-250,-248,-297,10,-294,10,-255,10,10,10,-298,10,-256,-252,-276,-253,]),'CATCH':([161,312,],[309,-26,]),'COMMA':([1,3,4,8,12,15,16,18,20,22,24,27,34,35,38,41,46,49,51,53,55,56,60,61,63,64,69,70,71,73,74,76,78,79,80,81,82,85,87,91,93,94,97,99,100,101,102,103,105,106,107,108,109,110,112,113,117,118,119,125,127,128,129,130,131,132,134,135,136,137,138,139,140,141,142,144,145,156,159,166,169,172,173,196,204,208,209,211,213,214,215,216,217,218,219,221,222,223,224,227,229,230,233,239,244,245,246,247,248,249,279,280,281,282,283,284,286,288,290,291,292,293,294,295,297,298,299,301,302,303,304,305,306,307,315,316,317,318,319,320,321,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,345,346,347,350,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,380,381,382,387,390,391,392,411,413,417,422,423,424,425,427,429,430,432,433,437,442,445,448,449,450,451,452,453,454,455,456,457,458,461,462,463,464,465,466,468,469,474,479,480,482,486,487,489,490,506,508,510,511,515,517,526,529,537,538,546,],[-206,-28,93,-72,-94,-71,-27,-150,-42,-133,-41,-119,149,-126,-30,-95,-105,-78,-230,-188,-212,-200,-104,-31,-32,-33,-194,-35,-34,-176,-167,-182,-37,-36,-43,-44,-99,-38,-29,215,-53,219,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,242,-238,-234,-108,-106,-123,-157,-190,-196,-137,-115,-184,-172,-208,272,-226,-129,-202,-96,-178,-113,-109,-85,-114,-84,-79,-76,-100,-101,272,-110,272,-99,-201,93,-47,-49,-46,-54,-77,-76,-97,-98,-81,-55,349,-58,-80,-239,-136,-135,-134,-127,-128,272,-231,-122,-121,-120,385,-299,-198,-162,-180,-144,-186,-174,-228,-192,-210,406,-96,-204,-87,272,-88,412,-90,-74,272,272,-183,-75,-195,272,-189,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,-177,-213,-45,272,-70,-83,272,-56,-69,272,-235,-242,-117,-118,-116,-142,-139,-138,-141,-140,-143,-185,-191,-132,-131,-130,-179,-158,-159,-161,-160,-227,-124,-125,-197,-209,-173,385,-236,444,-240,-86,-89,-73,-207,-50,-48,385,-82,-57,-59,-60,-68,-300,-193,-241,-149,-146,-145,-148,-147,-175,-181,-163,-164,-166,-165,272,-187,-229,-96,272,-211,-199,-91,272,385,-203,-295,-237,-240,272,-243,-296,-297,-241,272,-205,272,-297,-298,-298,-61,-62,]),'STREQ':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,73,74,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,132,134,136,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,290,292,294,301,303,305,315,319,326,327,328,329,330,331,332,333,334,335,336,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,372,373,374,375,377,378,382,411,413,417,424,427,429,433,448,449,450,451,452,453,455,456,457,458,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,185,-167,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,-137,-115,269,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,-162,-144,403,-96,-87,-88,-74,-75,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,185,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-132,-131,-130,-158,-159,-161,-160,-124,-125,269,-86,-89,-73,-48,-82,-57,-68,-149,-146,-145,-148,-147,403,-163,-164,-166,-165,-96,-295,-296,-297,-297,-298,-298,]),'BOR':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,53,60,61,63,64,69,70,71,73,74,76,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,130,132,134,135,136,140,142,144,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,290,291,292,293,294,297,301,303,305,315,318,319,320,325,326,327,328,329,330,331,332,333,334,335,336,338,345,346,350,353,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,377,378,382,411,413,417,424,427,429,433,442,448,449,450,451,452,453,454,455,456,457,458,462,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-188,-104,-31,-32,-33,181,-35,-34,-176,-167,-182,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,260,-137,-115,-184,-172,-129,-96,-178,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,-162,-180,-144,-186,-174,405,-96,-87,-88,-74,-183,-75,181,-189,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,-177,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-185,260,-132,-131,-130,-179,-158,-159,-161,-160,-124,-125,-173,-86,-89,-73,-48,-82,-57,-68,405,-149,-146,-145,-148,-147,-175,-181,-163,-164,-166,-165,-187,-96,-295,-296,-297,-297,-298,-298,]),'$end':([0,2,5,7,9,13,19,21,28,29,31,33,36,43,45,50,58,59,62,65,67,72,75,77,111,114,115,116,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,414,416,435,471,472,475,508,514,516,519,522,523,524,529,532,541,542,544,547,],[-1,-22,-15,-5,0,-23,-21,-13,-19,-17,-20,-3,-16,-11,-9,-10,-8,-4,-24,-12,-6,-244,-18,-14,-7,-292,-291,-2,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,-290,-288,-273,-251,-274,-247,-293,-254,-289,-275,-249,-250,-248,-294,-255,-256,-252,-276,-253,]),'FUNCTION':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,54,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,98,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[37,-22,-1,-15,96,37,96,96,-23,-21,-13,96,96,96,-19,-17,96,-20,-16,96,-11,37,-9,96,-10,96,-8,-24,-12,-6,37,-244,-18,-14,96,96,96,96,96,96,-53,-52,-51,96,-7,-292,-291,-2,96,96,96,96,96,37,-270,-269,96,-245,-246,96,96,96,96,96,96,96,-261,-262,96,96,96,96,96,-265,-266,-25,96,96,96,96,96,96,96,96,96,96,96,96,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,96,96,-1,-54,96,96,-232,-233,96,-283,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,-271,-272,96,96,96,96,96,96,96,-287,-286,-26,-263,-264,-268,-267,-284,-285,96,96,96,37,37,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,-290,-288,37,96,37,96,-273,37,37,96,96,96,-251,-274,-247,37,37,96,96,37,96,96,37,37,37,-293,96,37,-254,-289,-275,-249,-250,-248,37,-294,37,-255,37,37,37,37,-256,-252,-276,-253,]),'INSTANCEOF':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,74,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,132,134,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,290,292,301,303,305,315,319,326,327,328,329,330,331,332,333,334,335,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,372,373,374,375,377,378,411,413,417,424,427,429,433,448,449,450,451,452,455,456,457,458,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,186,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,254,-137,-115,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,393,-144,-96,-87,-88,-74,-75,254,254,254,254,-155,-152,-151,-154,-153,-156,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-132,-131,-130,254,254,254,254,-124,-125,-86,-89,-73,-48,-82,-57,-68,-149,-146,-145,-148,-147,254,254,254,254,-96,-295,-296,-297,-297,-298,-298,]),'GT':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,74,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,132,134,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,290,292,301,303,305,315,319,326,327,328,329,330,331,332,333,334,335,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,372,373,374,375,377,378,411,413,417,424,427,429,433,448,449,450,451,452,455,456,457,458,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,187,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,255,-137,-115,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,394,-144,-96,-87,-88,-74,-75,255,255,255,255,-155,-152,-151,-154,-153,-156,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-132,-131,-130,255,255,255,255,-124,-125,-86,-89,-73,-48,-82,-57,-68,-149,-146,-145,-148,-147,255,255,255,255,-96,-295,-296,-297,-297,-298,-298,]),'STRING':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,54,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,98,104,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,228,231,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,349,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[79,-22,-1,-15,79,79,79,79,-23,-21,-13,79,79,79,-19,-17,79,-20,-16,79,-11,79,-9,79,-10,79,-8,-24,-12,-6,79,-244,-18,-14,79,79,79,79,79,79,-53,-52,-51,79,79,-7,-292,-291,-2,79,79,79,79,79,79,-270,-269,79,-245,-246,79,79,79,79,79,79,79,-261,-262,79,79,79,79,79,-265,-266,-25,79,79,79,79,79,79,79,79,79,79,79,79,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,79,79,-1,-54,79,79,79,79,-232,-233,79,-283,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,-271,-272,79,79,79,79,79,79,79,-287,-286,-26,-263,-264,-268,-267,-284,-285,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,-290,-288,79,79,79,79,-273,79,79,79,79,79,-251,-274,-247,79,79,79,79,79,79,79,79,79,79,-293,79,79,-254,-289,-275,-249,-250,-248,79,-294,79,-255,79,79,79,79,-256,-252,-276,-253,]),'FOR':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[39,-22,-15,39,-23,-21,-13,-19,-17,-20,-16,-11,39,-9,-10,-8,-24,-12,-6,39,-244,-18,-14,-7,-292,-291,-2,39,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,39,39,-290,-288,39,39,-273,39,39,-251,-274,-247,39,39,39,39,39,39,-293,39,-254,-289,-275,-249,-250,-248,39,-294,39,-255,39,39,39,39,-256,-252,-276,-253,]),'PLUSPLUS':([0,2,3,4,5,6,7,8,10,11,12,13,15,16,19,20,21,23,24,25,26,28,29,30,31,36,38,40,41,43,44,45,48,49,50,58,61,62,63,64,65,67,68,70,71,72,75,77,78,79,80,81,82,83,84,85,87,88,89,90,92,93,94,95,99,101,102,103,105,107,108,109,110,111,114,115,116,120,121,122,123,124,126,133,142,143,146,147,148,149,150,151,152,155,158,159,160,163,164,168,169,170,171,172,173,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,213,215,216,218,219,221,222,226,227,229,238,239,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,301,303,305,310,311,312,313,314,315,319,322,323,338,340,341,342,343,345,346,350,352,353,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,418,420,421,424,427,429,433,434,435,436,441,443,446,447,464,471,472,475,477,482,483,488,491,492,493,495,501,502,504,506,508,509,512,514,516,519,522,523,524,526,528,529,531,532,533,534,536,537,540,541,542,544,547,],[40,-22,-28,-1,-15,40,40,-72,40,40,-94,-23,-71,-27,-21,-42,-13,40,-41,40,40,-19,-17,40,-20,-16,-30,40,-95,-11,40,-9,40,-78,-10,-8,-31,-24,-32,-33,-12,-6,40,-35,-34,-244,-18,-14,-37,-36,-43,-44,196,40,40,-38,-29,40,40,40,40,-53,-52,-51,223,-92,-39,-93,-40,-67,-66,-76,-41,-7,-292,-291,-2,40,40,40,40,40,40,-270,223,-269,40,-245,-246,40,40,40,40,40,40,-85,40,-261,-262,40,-84,40,40,-79,-76,40,40,-265,-266,-25,40,40,40,40,40,40,40,40,40,40,40,40,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,40,40,196,-1,-47,-46,-54,-77,-76,40,-81,-55,40,-80,-232,-233,40,-283,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,-271,-272,40,40,40,40,40,40,40,223,-87,-88,-287,-286,-26,-263,-264,-74,-75,-268,-267,-45,-284,-285,40,40,-70,-83,-56,40,-69,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,-86,40,-89,-290,-288,-73,40,40,40,-48,-82,-57,-68,40,-273,40,40,40,40,40,223,-251,-274,-247,40,-295,40,40,40,40,40,40,40,40,40,-296,-293,40,40,-254,-289,-275,-249,-250,-248,-297,40,-294,40,-255,40,40,40,-298,40,-256,-252,-276,-253,]),'PERIOD':([3,8,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,85,87,102,103,105,107,108,109,110,159,169,173,216,218,222,227,229,239,303,305,315,319,338,345,346,350,353,411,413,417,424,427,429,433,482,506,508,526,529,537,],[-28,-72,-71,-27,-42,-41,-30,157,167,-31,-32,-33,-35,-34,-37,-36,-43,-44,-38,-29,-39,225,-40,-67,-66,237,-41,-85,-84,237,-47,-46,237,-81,-55,-80,-87,-88,-74,-75,-45,-70,-83,-56,-69,-86,-89,-73,-48,-82,-57,-68,-295,-296,-297,-297,-298,-298,]),'RBRACE':([2,3,5,7,13,16,19,20,21,28,29,31,36,38,43,44,45,50,58,59,61,62,63,64,65,67,70,71,72,75,77,78,79,80,81,85,87,97,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,125,127,128,129,130,131,132,133,134,135,136,137,140,141,142,143,144,145,147,148,156,162,163,164,166,176,177,179,209,216,218,221,222,223,224,227,229,230,233,239,240,241,250,270,271,305,310,311,312,313,314,322,323,338,340,341,345,346,349,350,353,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,377,378,380,381,382,386,413,414,416,424,427,429,430,432,433,435,436,438,439,441,471,472,473,475,477,480,481,482,483,484,496,497,498,499,502,503,504,506,507,508,514,516,518,519,520,522,523,524,525,526,527,528,529,532,534,535,536,537,538,539,541,542,543,544,545,546,547,],[-22,-28,-15,-5,-23,-27,-21,-42,-13,-19,-17,-20,-16,-30,-11,-1,-9,-10,-8,-4,-31,-24,-32,-33,-12,-6,-35,-34,-244,-18,-14,-37,-36,-43,-44,-38,-29,-112,-96,-102,-92,-39,-93,229,-40,-103,-67,-66,-76,-41,-7,-111,-107,-292,-291,-2,-108,-106,-123,-157,-190,-196,-137,-270,-115,-184,-172,-208,-129,-202,-96,-269,-178,-113,-245,-246,-109,312,-261,-262,-114,-265,-266,-25,-110,-47,-46,-77,-76,-97,-98,-81,-55,350,-58,-80,-232,-233,-283,-271,-272,-88,-287,-286,-26,-263,-264,-268,-267,-45,-284,-285,-70,-83,429,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-185,-191,-132,-131,-130,-179,-158,-159,-161,-160,-124,-125,-197,-209,-173,-1,-89,-290,-288,-48,-82,-57,-59,-60,-68,-273,-1,-301,482,-1,-251,-274,-1,-247,-1,-203,506,-295,-1,508,-278,519,-279,-277,-1,526,-1,-296,529,-293,-254,-289,-280,-275,-1,-249,-250,-248,537,-297,538,-1,-294,-255,-1,544,-1,-298,-61,546,-256,-252,-281,-276,-282,-62,-253,]),'ELSE':([2,5,13,19,21,28,29,31,36,43,50,62,65,72,75,77,114,115,116,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,414,416,435,471,472,475,508,514,516,519,522,523,524,529,532,541,542,544,547,],[-22,-15,-23,-21,-13,-19,-17,-20,-16,-11,-10,-24,-12,-244,-18,-14,-292,-291,-2,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,-290,-288,-273,-251,-274,501,-293,-254,-289,-275,-249,-250,-248,-294,-255,-256,-252,-276,-253,]),'TRY':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[42,-22,-15,42,-23,-21,-13,-19,-17,-20,-16,-11,42,-9,-10,-8,-24,-12,-6,42,-244,-18,-14,-7,-292,-291,-2,42,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,42,42,-290,-288,42,42,-273,42,42,-251,-274,-247,42,42,42,42,42,42,-293,42,-254,-289,-275,-249,-250,-248,42,-294,42,-255,42,42,42,42,-256,-252,-276,-253,]),'BAND':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,73,74,76,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,132,134,136,140,142,144,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,290,291,292,294,301,303,305,315,318,319,326,327,328,329,330,331,332,333,334,335,336,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,371,372,373,374,375,377,378,382,411,413,417,424,427,429,433,448,449,450,451,452,453,454,455,456,457,458,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,-176,-167,192,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,-137,-115,-172,-129,-96,278,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,-162,398,-144,-174,-96,-87,-88,-74,192,-75,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,-177,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-132,-131,-130,278,-158,-159,-161,-160,-124,-125,-173,-86,-89,-73,-48,-82,-57,-68,-149,-146,-145,-148,-147,-175,398,-163,-164,-166,-165,-96,-295,-296,-297,-297,-298,-298,]),'GE':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,74,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,132,134,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,290,292,301,303,305,315,319,326,327,328,329,330,331,332,333,334,335,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,372,373,374,375,377,378,411,413,417,424,427,429,433,448,449,450,451,452,455,456,457,458,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,189,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,257,-137,-115,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,396,-144,-96,-87,-88,-74,-75,257,257,257,257,-155,-152,-151,-154,-153,-156,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-132,-131,-130,257,257,257,257,-124,-125,-86,-89,-73,-48,-82,-57,-68,-149,-146,-145,-148,-147,257,257,257,257,-96,-295,-296,-297,-297,-298,-298,]),'LT':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,74,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,132,134,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,290,292,301,303,305,315,319,326,327,328,329,330,331,332,333,334,335,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,372,373,374,375,377,378,411,413,417,424,427,429,433,448,449,450,451,452,455,456,457,458,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,188,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,256,-137,-115,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,395,-144,-96,-87,-88,-74,-75,256,256,256,256,-155,-152,-151,-154,-153,-156,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-132,-131,-130,256,256,256,256,-124,-125,-86,-89,-73,-48,-82,-57,-68,-149,-146,-145,-148,-147,256,256,256,256,-96,-295,-296,-297,-297,-298,-298,]),'REGEX':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,54,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,98,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[78,-22,-1,-15,78,78,78,78,-23,-21,-13,78,78,78,-19,-17,78,-20,-16,78,-11,78,-9,78,-10,78,-8,-24,-12,-6,78,-244,-18,-14,78,78,78,78,78,78,-53,-52,-51,78,-7,-292,-291,-2,78,78,78,78,78,78,-270,-269,78,-245,-246,78,78,78,78,78,78,78,-261,-262,78,78,78,78,78,-265,-266,-25,78,78,78,78,78,78,78,78,78,78,78,78,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,78,78,-1,-54,78,78,-232,-233,78,-283,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,-271,-272,78,78,78,78,78,78,78,-287,-286,-26,-263,-264,-268,-267,-284,-285,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,-290,-288,78,78,78,78,-273,78,78,78,78,78,-251,-274,-247,78,78,78,78,78,78,78,78,78,78,-293,78,78,-254,-289,-275,-249,-250,-248,78,-294,78,-255,78,78,78,78,-256,-252,-276,-253,]),'STRNEQ':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,73,74,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,132,134,136,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,290,292,294,301,303,305,315,319,326,327,328,329,330,331,332,333,334,335,336,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,372,373,374,375,377,378,382,411,413,417,424,427,429,433,448,449,450,451,452,453,455,456,457,458,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,184,-167,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,-137,-115,268,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,-162,-144,402,-96,-87,-88,-74,-75,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,184,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-132,-131,-130,-158,-159,-161,-160,-124,-125,268,-86,-89,-73,-48,-82,-57,-68,-149,-146,-145,-148,-147,402,-163,-164,-166,-165,-96,-295,-296,-297,-297,-298,-298,]),'LPAREN':([0,2,3,4,5,6,7,8,10,11,13,15,16,19,20,21,23,24,25,26,28,29,30,31,32,36,37,38,39,40,41,43,44,45,48,49,50,52,54,57,58,61,62,63,64,65,67,68,70,71,72,75,77,78,79,80,81,83,84,85,86,87,88,89,90,92,93,94,95,96,98,102,103,105,107,108,109,110,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,154,155,158,159,160,163,164,168,169,170,171,173,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,216,218,219,220,222,226,227,229,232,234,236,238,239,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,303,305,309,310,311,312,313,314,315,319,322,323,324,338,340,341,342,343,345,346,348,350,351,352,353,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,418,420,421,424,427,429,433,434,435,436,441,443,446,447,471,472,475,477,482,483,488,491,492,493,495,501,502,504,506,508,509,512,514,516,519,522,523,524,526,528,529,531,532,533,534,536,537,540,541,542,544,547,],[83,-22,-28,-1,-15,83,83,-72,83,83,-23,-71,-27,-21,-42,-13,83,-41,83,83,-19,-17,83,-20,146,-16,153,-30,155,83,160,-11,83,-9,83,160,-10,170,83,175,-8,-31,-24,-32,-33,-12,-6,83,-35,-34,-244,-18,-14,-37,-36,-43,-44,83,83,-38,210,-29,83,83,83,83,-53,-52,-51,153,83,-39,160,-40,-67,-66,160,-41,-7,-292,-291,-2,83,83,83,83,83,83,-270,-269,83,-245,-246,83,83,83,83,287,83,83,-85,83,-261,-262,83,-84,83,83,160,83,83,-265,-266,-25,83,83,83,83,83,83,83,83,83,83,83,83,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,83,83,-1,-47,-46,-54,344,160,83,-81,-55,-65,-63,-64,83,-80,-232,-233,83,-283,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,-271,-272,83,83,83,83,83,83,83,-87,-88,415,-287,-286,-26,-263,-264,-74,-75,-268,-267,420,-45,-284,-285,83,83,-70,-83,428,-56,431,83,-69,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,-86,83,-89,-290,-288,-73,83,83,83,-48,-82,-57,-68,83,-273,83,83,83,83,83,-251,-274,-247,83,-295,83,83,83,83,83,83,83,83,83,-296,-293,83,83,-254,-289,-275,-249,-250,-248,-297,83,-294,83,-255,83,83,83,-298,83,-256,-252,-276,-253,]),'IN':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,74,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,132,134,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,288,290,291,292,293,294,297,298,301,302,303,305,315,319,326,327,328,329,330,331,332,333,334,335,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,372,373,374,375,377,378,392,411,413,417,424,427,429,433,442,445,448,449,450,451,452,453,454,455,456,457,458,462,464,466,468,482,490,506,508,515,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,191,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,259,-137,-115,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,-198,-162,-180,-144,-186,-174,-192,-210,407,-204,-87,-88,-74,-75,259,259,259,259,-155,-152,-151,-154,-153,-156,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-132,-131,-130,259,259,259,259,-124,-125,446,-86,-89,-73,-48,-82,-57,-68,-193,488,-149,-146,-145,-148,-147,-175,-181,259,259,259,259,-187,-96,-211,-199,-295,-243,-296,-297,-205,-297,-298,-298,]),'VAR':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,155,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[17,-22,-15,17,-23,-21,-13,-19,-17,-20,-16,-11,17,-9,-10,-8,-24,-12,-6,17,-244,-18,-14,-7,-292,-291,-2,17,-270,-269,-245,-246,289,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,17,17,-290,-288,17,17,-273,17,17,-251,-274,-247,17,17,17,17,17,17,-293,17,-254,-289,-275,-249,-250,-248,17,-294,17,-255,17,17,17,17,-256,-252,-276,-253,]),'MINUSMINUS':([0,2,3,4,5,6,7,8,10,11,12,13,15,16,19,20,21,23,24,25,26,28,29,30,31,36,38,40,41,43,44,45,48,49,50,58,61,62,63,64,65,67,68,70,71,72,75,77,78,79,80,81,82,83,84,85,87,88,89,90,92,93,94,95,99,101,102,103,105,107,108,109,110,111,114,115,116,120,121,122,123,124,126,133,142,143,146,147,148,149,150,151,152,155,158,159,160,163,164,168,169,170,171,172,173,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,213,215,216,218,219,221,222,226,227,229,238,239,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,301,303,305,310,311,312,313,314,315,319,322,323,338,340,341,342,343,345,346,350,352,353,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,416,417,418,420,421,424,427,429,433,434,435,436,441,443,446,447,464,471,472,475,477,482,483,488,491,492,493,495,501,502,504,506,508,509,512,514,516,519,522,523,524,526,528,529,531,532,533,534,536,537,540,541,542,544,547,],[84,-22,-28,-1,-15,84,84,-72,84,84,-94,-23,-71,-27,-21,-42,-13,84,-41,84,84,-19,-17,84,-20,-16,-30,84,-95,-11,84,-9,84,-78,-10,-8,-31,-24,-32,-33,-12,-6,84,-35,-34,-244,-18,-14,-37,-36,-43,-44,204,84,84,-38,-29,84,84,84,84,-53,-52,-51,224,-92,-39,-93,-40,-67,-66,-76,-41,-7,-292,-291,-2,84,84,84,84,84,84,-270,224,-269,84,-245,-246,84,84,84,84,84,84,-85,84,-261,-262,84,-84,84,84,-79,-76,84,84,-265,-266,-25,84,84,84,84,84,84,84,84,84,84,84,84,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,84,84,204,-1,-47,-46,-54,-77,-76,84,-81,-55,84,-80,-232,-233,84,-283,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,-271,-272,84,84,84,84,84,84,84,224,-87,-88,-287,-286,-26,-263,-264,-74,-75,-268,-267,-45,-284,-285,84,84,-70,-83,-56,84,-69,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,-86,84,-89,-290,-288,-73,84,84,84,-48,-82,-57,-68,84,-273,84,84,84,84,84,224,-251,-274,-247,84,-295,84,84,84,84,84,84,84,84,84,-296,-293,84,84,-254,-289,-275,-249,-250,-248,-297,84,-294,84,-255,84,84,84,-298,84,-256,-252,-276,-253,]),'EQ':([3,8,12,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,82,85,87,101,102,103,105,107,108,109,110,118,142,159,169,172,173,216,218,221,222,227,229,239,301,303,305,315,319,338,345,346,350,353,392,411,413,417,424,427,429,433,464,482,487,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-30,-95,-78,-31,-32,-33,-35,-34,-37,-36,-43,-44,205,-38,-29,-92,-39,-93,-40,-67,-66,-76,-41,243,205,-85,-84,-79,-76,-47,-46,-77,-76,-81,-55,-80,205,-87,-88,-74,-75,-45,-70,-83,-56,-69,447,-86,-89,-73,-48,-82,-57,-68,205,-295,447,-296,-297,-297,-298,-298,]),'ID':([0,2,4,5,6,7,10,11,13,17,19,21,23,25,26,28,29,30,31,36,37,40,43,44,45,47,48,50,54,58,62,65,66,67,68,72,75,77,83,84,88,89,90,92,93,94,95,96,98,104,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,153,155,157,158,160,163,164,167,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,225,226,228,231,237,238,240,241,242,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,287,289,310,311,312,313,314,322,323,340,341,342,343,344,349,352,383,385,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,415,416,418,420,421,431,434,435,436,441,443,444,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[85,-22,-1,-15,85,85,85,85,-23,85,-21,-13,85,85,85,-19,-17,85,-20,-16,85,85,-11,85,-9,85,85,-10,85,-8,-24,-12,85,-6,85,-244,-18,-14,85,85,85,85,85,85,-53,-52,-51,85,85,85,-7,-292,-291,-2,85,85,85,85,85,85,-270,-269,85,-245,-246,85,85,85,85,85,85,85,85,85,-261,-262,85,85,85,85,85,85,-265,-266,-25,85,85,85,85,85,85,85,85,85,85,85,85,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,85,85,-1,-54,85,85,85,85,85,85,-232,-233,85,85,-283,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,-271,-272,85,85,85,85,85,85,85,85,85,-287,-286,-26,-263,-264,-268,-267,-284,-285,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,-290,85,-288,85,85,85,85,85,-273,85,85,85,85,85,85,-251,-274,-247,85,85,85,85,85,85,85,85,85,85,-293,85,85,-254,-289,-275,-249,-250,-248,85,-294,85,-255,85,85,85,85,-256,-252,-276,-253,]),'IF':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[86,-22,-15,86,-23,-21,-13,-19,-17,-20,-16,-11,86,-9,-10,-8,-24,-12,-6,86,-244,-18,-14,-7,-292,-291,-2,86,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,86,86,-290,-288,86,86,-273,86,86,-251,-274,-247,86,86,86,86,86,86,-293,86,-254,-289,-275,-249,-250,-248,86,-294,86,-255,86,86,86,86,-256,-252,-276,-253,]),'AND':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,53,56,60,61,63,64,69,70,71,73,74,76,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,130,131,132,134,135,136,140,142,144,145,156,159,166,169,172,173,196,204,209,213,214,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,288,290,291,292,293,294,297,301,303,305,315,318,319,320,325,326,327,328,329,330,331,332,333,334,335,336,338,345,346,350,353,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,377,378,380,382,411,413,417,424,427,429,433,442,448,449,450,451,452,453,454,455,456,457,458,462,464,468,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-188,174,-104,-31,-32,-33,-194,-35,-34,-176,-167,-182,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,-190,261,-137,-115,-184,-172,-129,-96,-178,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,174,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,389,-162,-180,-144,-186,-174,-192,-96,-87,-88,-74,-183,-75,-195,-189,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,-177,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-185,-191,-132,-131,-130,-179,-158,-159,-161,-160,-124,-125,261,-173,-86,-89,-73,-48,-82,-57,-68,-193,-149,-146,-145,-148,-147,-175,-181,-163,-164,-166,-165,-187,-96,389,-295,-296,-297,-297,-298,-298,]),'LBRACE':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,42,43,44,45,48,50,54,58,62,65,67,68,72,75,77,83,84,88,89,92,93,94,95,98,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,175,176,177,179,182,183,184,185,186,187,188,189,190,191,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,285,308,310,311,312,313,314,322,323,340,341,342,343,352,383,384,386,388,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,419,420,421,426,434,435,436,440,441,443,446,447,471,472,475,476,477,478,483,488,491,492,493,494,495,501,502,504,505,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[44,-22,-1,-15,104,44,104,104,-23,-21,-13,104,104,104,-19,-17,104,-20,-16,104,44,-11,44,-9,104,-10,104,-8,-24,-12,-6,44,-244,-18,-14,104,104,104,104,104,-53,-52,-51,104,-7,-292,-291,-2,104,104,104,104,104,44,-270,-269,104,-245,-246,104,104,104,104,104,104,104,-261,-262,104,104,104,-265,-266,-25,104,104,104,104,104,104,104,104,104,104,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,104,104,-1,-54,104,104,-232,-233,104,-283,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,-271,-272,104,104,104,104,104,104,104,386,44,-287,-286,-26,-263,-264,-268,-267,-284,-285,104,104,104,44,436,44,441,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,-290,-288,44,473,104,44,477,104,-273,44,483,44,104,104,104,-251,-274,-247,502,44,504,44,104,104,44,104,44,104,44,44,44,528,-293,104,44,-254,-289,-275,-249,-250,-248,44,-294,44,-255,44,44,44,44,-256,-252,-276,-253,]),'FALSE':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,54,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,98,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[64,-22,-1,-15,64,64,64,64,-23,-21,-13,64,64,64,-19,-17,64,-20,-16,64,-11,64,-9,64,-10,64,-8,-24,-12,-6,64,-244,-18,-14,64,64,64,64,64,64,-53,-52,-51,64,-7,-292,-291,-2,64,64,64,64,64,64,-270,-269,64,-245,-246,64,64,64,64,64,64,64,-261,-262,64,64,64,64,64,-265,-266,-25,64,64,64,64,64,64,64,64,64,64,64,64,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,64,64,-1,-54,64,64,-232,-233,64,-283,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,-271,-272,64,64,64,64,64,64,64,-287,-286,-26,-263,-264,-268,-267,-284,-285,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,-290,-288,64,64,64,64,-273,64,64,64,64,64,-251,-274,-247,64,64,64,64,64,64,64,64,64,64,-293,64,64,-254,-289,-275,-249,-250,-248,64,-294,64,-255,64,64,64,64,-256,-252,-276,-253,]),'RSHIFT':([3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,60,61,63,64,70,71,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,132,134,140,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,292,301,303,305,315,319,330,331,332,333,334,335,338,345,346,350,353,357,358,359,360,361,362,363,364,365,368,369,370,377,378,411,413,417,424,427,429,433,448,449,450,451,452,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,121,-42,-133,-41,-119,-126,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,263,-115,-129,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,263,-96,-87,-88,-74,-75,263,263,263,263,263,263,-45,-70,-83,-56,-69,-117,-118,-116,263,263,263,263,263,263,-132,-131,-130,-124,-125,-86,-89,-73,-48,-82,-57,-68,263,263,263,263,263,-96,-295,-296,-297,-297,-298,-298,]),'PLUSEQUAL':([3,8,12,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,82,85,87,101,102,103,105,107,108,109,110,142,159,169,172,173,216,218,221,222,227,229,239,301,303,305,315,319,338,345,346,350,353,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-30,-95,-78,-31,-32,-33,-35,-34,-37,-36,-43,-44,201,-38,-29,-92,-39,-93,-40,-67,-66,-76,-41,201,-85,-84,-79,-76,-47,-46,-77,-76,-81,-55,-80,201,-87,-88,-74,-75,-45,-70,-83,-56,-69,-86,-89,-73,-48,-82,-57,-68,201,-295,-296,-297,-297,-298,-298,]),'THIS':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,54,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,98,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[20,-22,-1,-15,20,20,20,20,-23,-21,-13,20,20,20,-19,-17,20,-20,-16,20,-11,20,-9,20,-10,20,-8,-24,-12,-6,20,-244,-18,-14,20,20,20,20,20,20,-53,-52,-51,20,-7,-292,-291,-2,20,20,20,20,20,20,-270,-269,20,-245,-246,20,20,20,20,20,20,20,-261,-262,20,20,20,20,20,-265,-266,-25,20,20,20,20,20,20,20,20,20,20,20,20,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,20,20,-1,-54,20,20,-232,-233,20,-283,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,-271,-272,20,20,20,20,20,20,20,-287,-286,-26,-263,-264,-268,-267,-284,-285,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,-290,-288,20,20,20,20,-273,20,20,20,20,20,-251,-274,-247,20,20,20,20,20,20,20,20,20,20,-293,20,20,-254,-289,-275,-249,-250,-248,20,-294,20,-255,20,20,20,20,-256,-252,-276,-253,]),'MINUSEQUAL':([3,8,12,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,82,85,87,101,102,103,105,107,108,109,110,142,159,169,172,173,216,218,221,222,227,229,239,301,303,305,315,319,338,345,346,350,353,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-30,-95,-78,-31,-32,-33,-35,-34,-37,-36,-43,-44,197,-38,-29,-92,-39,-93,-40,-67,-66,-76,-41,197,-85,-84,-79,-76,-47,-46,-77,-76,-81,-55,-80,197,-87,-88,-74,-75,-45,-70,-83,-56,-69,-86,-89,-73,-48,-82,-57,-68,197,-295,-296,-297,-297,-298,-298,]),'CONDOP':([1,3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,53,56,60,61,63,64,69,70,71,73,74,76,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,130,131,132,134,135,136,140,141,142,144,145,156,159,166,169,172,173,196,204,209,213,214,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,288,290,291,292,293,294,297,301,302,303,305,315,318,319,320,325,326,327,328,329,330,331,332,333,334,335,336,338,345,346,350,353,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,377,378,380,382,411,413,417,424,427,429,433,442,448,449,450,451,452,453,454,455,456,457,458,462,464,468,482,506,508,526,529,537,],[89,-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-188,-200,-104,-31,-32,-33,-194,-35,-34,-176,-167,-182,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,-190,-196,-137,-115,-184,-172,-129,275,-96,-178,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-201,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,-198,-162,-180,-144,-186,-174,-192,-96,409,-87,-88,-74,-183,-75,-195,-189,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,-177,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-185,-191,-132,-131,-130,-179,-158,-159,-161,-160,-124,-125,-197,-173,-86,-89,-73,-48,-82,-57,-68,-193,-149,-146,-145,-148,-147,-175,-181,-163,-164,-166,-165,-187,-96,-199,-295,-296,-297,-297,-298,-298,]),'XOREQUAL':([3,8,12,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,82,85,87,101,102,103,105,107,108,109,110,142,159,169,172,173,216,218,221,222,227,229,239,301,303,305,315,319,338,345,346,350,353,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-30,-95,-78,-31,-32,-33,-35,-34,-37,-36,-43,-44,199,-38,-29,-92,-39,-93,-40,-67,-66,-76,-41,199,-85,-84,-79,-76,-47,-46,-77,-76,-81,-55,-80,199,-87,-88,-74,-75,-45,-70,-83,-56,-69,-86,-89,-73,-48,-82,-57,-68,199,-295,-296,-297,-297,-298,-298,]),'OR':([1,3,8,12,15,16,18,20,22,24,27,35,38,41,46,49,53,56,60,61,63,64,69,70,71,73,74,76,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,130,131,132,134,135,136,140,141,142,144,145,156,159,166,169,172,173,196,204,209,213,214,216,218,221,222,223,224,227,229,239,245,246,247,248,249,281,282,283,288,290,291,292,293,294,297,301,302,303,305,315,318,319,320,325,326,327,328,329,330,331,332,333,334,335,336,338,345,346,350,353,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,377,378,380,382,411,413,417,424,427,429,433,442,448,449,450,451,452,453,454,455,456,457,458,462,464,468,482,506,508,526,529,537,],[90,-28,-72,-94,-71,-27,-150,-42,-133,-41,-119,-126,-30,-95,-105,-78,-188,-200,-104,-31,-32,-33,-194,-35,-34,-176,-167,-182,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,-190,-196,-137,-115,-184,-172,-129,276,-96,-178,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-201,-47,-46,-77,-76,-97,-98,-81,-55,-80,-136,-135,-134,-127,-128,-122,-121,-120,-198,-162,-180,-144,-186,-174,-192,-96,410,-87,-88,-74,-183,-75,-195,-189,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,-177,-45,-70,-83,-56,-69,-117,-118,-116,-142,-139,-138,-141,-140,-143,-185,-191,-132,-131,-130,-179,-158,-159,-161,-160,-124,-125,-197,-173,-86,-89,-73,-48,-82,-57,-68,-193,-149,-146,-145,-148,-147,-175,-181,-163,-164,-166,-165,-187,-96,-199,-295,-296,-297,-297,-298,-298,]),'BREAK':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[66,-22,-15,66,-23,-21,-13,-19,-17,-20,-16,-11,66,-9,-10,-8,-24,-12,-6,66,-244,-18,-14,-7,-292,-291,-2,66,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,66,66,-290,-288,66,66,-273,66,66,-251,-274,-247,66,66,66,66,66,66,-293,66,-254,-289,-275,-249,-250,-248,66,-294,66,-255,66,66,66,66,-256,-252,-276,-253,]),'URSHIFTEQUAL':([3,8,12,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,82,85,87,101,102,103,105,107,108,109,110,142,159,169,172,173,216,218,221,222,227,229,239,301,303,305,315,319,338,345,346,350,353,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-30,-95,-78,-31,-32,-33,-35,-34,-37,-36,-43,-44,195,-38,-29,-92,-39,-93,-40,-67,-66,-76,-41,195,-85,-84,-79,-76,-47,-46,-77,-76,-81,-55,-80,195,-87,-88,-74,-75,-45,-70,-83,-56,-69,-86,-89,-73,-48,-82,-57,-68,195,-295,-296,-297,-297,-298,-298,]),'CONTINUE':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[47,-22,-15,47,-23,-21,-13,-19,-17,-20,-16,-11,47,-9,-10,-8,-24,-12,-6,47,-244,-18,-14,-7,-292,-291,-2,47,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,47,47,-290,-288,47,47,-273,47,47,-251,-274,-247,47,47,47,47,47,47,-293,47,-254,-289,-275,-249,-250,-248,47,-294,47,-255,47,47,47,47,-256,-252,-276,-253,]),'FINALLY':([161,311,312,516,],[308,308,-26,-289,]),'TYPEOF':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[23,-22,-1,-15,23,23,23,23,-23,-21,-13,23,23,23,-19,-17,23,-20,-16,23,-11,23,-9,23,-10,-8,-24,-12,-6,23,-244,-18,-14,23,23,23,23,23,23,-53,-52,-51,-7,-292,-291,-2,23,23,23,23,23,23,-270,-269,23,-245,-246,23,23,23,23,23,23,23,-261,-262,23,23,23,23,23,-265,-266,-25,23,23,23,23,23,23,23,23,23,23,23,23,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,23,23,-1,-54,23,23,-232,-233,23,-283,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,-271,-272,23,23,23,23,23,23,23,-287,-286,-26,-263,-264,-268,-267,-284,-285,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,-290,-288,23,23,23,23,-273,23,23,23,23,23,-251,-274,-247,23,23,23,23,23,23,23,23,23,23,-293,23,23,-254,-289,-275,-249,-250,-248,23,-294,23,-255,23,23,23,23,-256,-252,-276,-253,]),'error':([1,3,8,12,14,15,16,18,20,22,24,26,27,34,35,38,41,46,47,49,51,53,55,56,60,61,63,64,66,69,70,71,73,74,76,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,117,118,119,125,127,128,129,130,131,132,134,135,136,137,138,139,140,141,142,144,145,156,159,165,166,169,172,173,178,196,204,209,211,213,214,216,218,221,222,223,224,227,229,239,244,245,246,247,248,249,280,281,282,283,303,305,315,318,319,320,325,326,327,328,329,330,331,332,333,334,335,336,337,338,345,346,350,353,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,380,381,382,411,413,417,422,424,427,429,433,480,482,500,506,508,526,529,537,],[-206,-28,-72,-94,116,-71,-27,-150,-42,-133,-41,116,-119,116,-126,-30,-95,-105,116,-78,-230,-188,-212,-200,-104,-31,-32,-33,116,-194,-35,-34,-176,-167,-182,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,116,-238,-234,-108,-106,-123,-157,-190,-196,-137,-115,-184,-172,-208,116,-226,-129,-202,-96,-178,-113,-109,-85,116,-114,-84,-79,-76,116,-100,-101,-110,116,-99,-201,-47,-46,-77,-76,-97,-98,-81,-55,-80,-239,-136,-135,-134,-127,-128,-231,-122,-121,-120,-87,-88,-74,-183,-75,-195,-189,-168,-169,-171,-170,-155,-152,-151,-154,-153,-156,-177,-213,-45,-70,-83,-56,-69,-235,-242,-117,-118,-116,-142,-139,-138,-141,-140,-143,-185,-191,-132,-131,-130,-179,-158,-159,-161,-160,-227,-124,-125,-197,-209,-173,-86,-89,-73,-207,-48,-82,-57,-68,-203,-295,116,-296,-297,-297,-298,-298,]),'NOT':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[48,-22,-1,-15,48,48,48,48,-23,-21,-13,48,48,48,-19,-17,48,-20,-16,48,-11,48,-9,48,-10,-8,-24,-12,-6,48,-244,-18,-14,48,48,48,48,48,48,-53,-52,-51,-7,-292,-291,-2,48,48,48,48,48,48,-270,-269,48,-245,-246,48,48,48,48,48,48,48,-261,-262,48,48,48,48,48,-265,-266,-25,48,48,48,48,48,48,48,48,48,48,48,48,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,48,48,-1,-54,48,48,-232,-233,48,-283,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,-271,-272,48,48,48,48,48,48,48,-287,-286,-26,-263,-264,-268,-267,-284,-285,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,-290,-288,48,48,48,48,-273,48,48,48,48,48,-251,-274,-247,48,48,48,48,48,48,48,48,48,48,-293,48,48,-254,-289,-275,-249,-250,-248,48,-294,48,-255,48,48,48,48,-256,-252,-276,-253,]),'ANDEQUAL':([3,8,12,15,16,20,24,38,41,49,61,63,64,70,71,78,79,80,81,82,85,87,101,102,103,105,107,108,109,110,142,159,169,172,173,216,218,221,222,227,229,239,301,303,305,315,319,338,345,346,350,353,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-30,-95,-78,-31,-32,-33,-35,-34,-37,-36,-43,-44,203,-38,-29,-92,-39,-93,-40,-67,-66,-76,-41,203,-85,-84,-79,-76,-47,-46,-77,-76,-81,-55,-80,203,-87,-88,-74,-75,-45,-70,-83,-56,-69,-86,-89,-73,-48,-82,-57,-68,203,-295,-296,-297,-297,-298,-298,]),'RBRACKET':([3,4,16,20,38,61,63,64,70,71,78,79,80,81,85,87,91,92,93,94,95,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,129,130,131,132,134,135,136,137,139,140,141,142,144,145,156,166,209,215,216,217,218,219,221,222,223,224,227,229,239,304,305,316,338,343,345,346,347,350,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,380,381,382,413,423,424,427,429,433,480,482,506,526,537,],[-28,-1,-27,-42,-30,-31,-32,-33,-35,-34,-37,-36,-43,-44,-38,-29,216,218,-53,-52,-51,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,-123,-157,-190,-196,-137,-115,-184,-172,-208,-226,-129,-202,-96,-178,-113,-109,-114,-110,-1,-47,-49,-46,-54,-77,-76,-97,-98,-81,-55,-80,411,-88,417,-45,424,-70,-83,427,-56,-69,433,-117,-118,-116,-142,-139,-138,-141,-140,-143,-185,-191,-132,-131,-130,-179,-158,-159,-161,-160,-227,-124,-125,-197,-209,-173,-89,-50,-48,-82,-57,-68,-203,-295,-296,-297,-298,]),'MOD':([3,8,12,15,16,20,24,27,35,38,41,46,49,60,61,63,64,70,71,78,79,80,81,82,85,87,97,99,100,101,102,103,105,106,107,108,109,110,112,113,125,127,128,134,142,145,156,159,166,169,172,173,196,204,209,213,216,218,221,222,223,224,227,229,239,248,249,281,282,283,301,303,305,315,319,338,345,346,350,353,357,358,359,377,378,411,413,417,424,427,429,433,464,482,506,508,526,529,537,],[-28,-72,-94,-71,-27,-42,-41,-119,150,-30,-95,-105,-78,-104,-31,-32,-33,-35,-34,-37,-36,-43,-44,-99,-38,-29,-112,-96,-102,-92,-39,-93,-40,-103,-67,-66,-76,-41,-111,-107,-108,-106,252,-115,-96,-113,-109,-85,-114,-84,-79,-76,-100,-101,-110,-99,-47,-46,-77,-76,-97,-98,-81,-55,-80,252,252,-122,-121,-120,-96,-87,-88,-74,-75,-45,-70,-83,-56,-69,-117,-118,-116,252,252,-86,-89,-73,-48,-82,-57,-68,-96,-295,-296,-297,-297,-298,-298,]),'THROW':([0,2,5,7,13,19,21,28,29,31,36,43,44,45,50,58,62,65,67,68,72,75,77,111,114,115,116,126,133,143,147,148,163,164,176,177,179,240,241,250,270,271,310,311,312,313,314,322,323,340,341,383,386,414,416,418,421,435,436,441,471,472,475,477,483,492,501,502,504,508,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[88,-22,-15,88,-23,-21,-13,-19,-17,-20,-16,-11,88,-9,-10,-8,-24,-12,-6,88,-244,-18,-14,-7,-292,-291,-2,88,-270,-269,-245,-246,-261,-262,-265,-266,-25,-232,-233,-283,-271,-272,-287,-286,-26,-263,-264,-268,-267,-284,-285,88,88,-290,-288,88,88,-273,88,88,-251,-274,-247,88,88,88,88,88,88,-293,88,-254,-289,-275,-249,-250,-248,88,-294,88,-255,88,88,88,88,-256,-252,-276,-253,]),'GETPROP':([104,349,],[228,228,]),'DELETE':([0,2,4,5,6,7,10,11,13,19,21,23,25,26,28,29,30,31,36,40,43,44,45,48,50,58,62,65,67,68,72,75,77,83,84,88,89,90,92,93,94,95,111,114,115,116,120,121,122,123,124,126,133,143,146,147,148,149,150,151,152,155,158,160,163,164,168,170,171,174,175,176,177,179,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,202,203,205,206,207,210,215,219,226,238,240,241,243,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,310,311,312,313,314,322,323,340,341,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,414,416,418,420,421,434,435,436,441,443,446,447,471,472,475,477,483,488,491,492,493,495,501,502,504,508,509,512,514,516,519,522,523,524,528,529,531,532,533,534,536,540,541,542,544,547,],[25,-22,-1,-15,25,25,25,25,-23,-21,-13,25,25,25,-19,-17,25,-20,-16,25,-11,25,-9,25,-10,-8,-24,-12,-6,25,-244,-18,-14,25,25,25,25,25,25,-53,-52,-51,-7,-292,-291,-2,25,25,25,25,25,25,-270,-269,25,-245,-246,25,25,25,25,25,25,25,-261,-262,25,25,25,25,25,-265,-266,-25,25,25,25,25,25,25,25,25,25,25,25,25,-216,-221,-222,-219,-217,-224,-215,-218,-220,-223,-214,-225,25,25,-1,-54,25,25,-232,-233,25,-283,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,-271,-272,25,25,25,25,25,25,25,-287,-286,-26,-263,-264,-268,-267,-284,-285,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,-290,-288,25,25,25,25,-273,25,25,25,25,25,-251,-274,-247,25,25,25,25,25,25,25,25,25,25,-293,25,25,-254,-289,-275,-249,-250,-248,25,-294,25,-255,25,25,25,25,-256,-252,-276,-253,]),}
+
+_lr_action = { }
+for _k, _v in _lr_action_items.items():
+   for _x,_y in zip(_v[0],_v[1]):
+      if not _x in _lr_action:  _lr_action[_x] = { }
+      _lr_action[_x][_k] = _y
+del _lr_action_items
+
+_lr_goto_items = {'logical_or_expr_nobf':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,]),'throw_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,]),'boolean_literal':([0,6,7,10,11,23,25,26,30,40,44,48,54,68,83,84,88,89,90,92,98,120,121,122,123,124,126,146,149,150,151,152,155,158,160,168,170,171,174,175,181,182,183,184,185,186,187,188,189,190,191,192,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,418,420,421,434,436,441,443,446,447,477,483,488,491,492,493,495,501,502,504,509,512,528,531,533,534,536,540,],[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,]),'bitwise_or_expr':([26,83,88,89,92,146,149,158,160,168,170,175,207,210,226,238,243,261,272,275,276,277,342,343,352,404,407,412,420,434,443,446,488,491,495,509,],[130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,367,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,]),'property_assignment':([104,349,],[233,430,]),'logical_and_expr_noin':([155,406,408,409,410,447,493,],[288,288,288,288,468,288,288,]),'iteration_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,]),'variable_declaration_noin':([289,444,],[390,486,]),'source_element_list':([0,44,386,436,441,477,483,502,504,528,534,536,],[7,7,7,7,7,7,7,7,7,7,7,7,]),'function_expr':([0,6,7,10,11,23,25,26,30,40,44,48,54,68,83,84,88,89,90,92,98,120,121,122,123,124,126,146,149,150,151,152,155,158,160,168,170,171,174,175,181,182,183,184,185,186,187,188,189,190,191,192,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,418,420,421,434,436,441,443,446,447,477,483,488,491,492,493,495,501,502,504,509,512,528,531,533,534,536,540,],[8,107,8,107,107,107,107,107,107,107,8,107,107,8,107,107,107,107,8,107,107,107,107,107,107,107,8,107,107,107,107,107,107,107,107,107,107,8,8,107,8,107,107,107,107,107,107,107,107,107,107,8,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,8,8,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,8,107,8,107,8,8,107,107,107,8,8,107,107,8,107,107,8,8,8,107,8,8,8,8,8,8,8,]),'multiplicative_expr':([26,83,88,89,92,120,121,122,123,124,146,149,155,158,160,168,170,175,182,183,184,185,186,187,188,189,190,191,207,210,226,238,243,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,420,434,443,446,447,488,491,493,495,509,],[128,128,128,128,128,128,128,128,248,249,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,377,378,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,]),'finally':([161,311,],[310,416,]),'program':([0,],[9,]),'case_block':([419,],[472,]),'formal_parameter_list':([153,287,344,431,],[284,387,425,479,]),'new_expr_nobf':([0,7,44,68,90,126,171,174,181,192,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,]),'try_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,]),'element_list':([4,],[91,]),'relational_expr':([26,83,88,89,92,146,149,158,160,168,170,175,182,183,184,185,207,210,226,238,243,260,261,265,266,267,268,269,272,275,276,277,278,342,343,352,400,401,402,403,404,407,412,420,434,443,446,488,491,495,509,],[129,129,129,129,129,129,129,129,129,129,129,129,326,327,328,329,129,129,129,129,129,129,129,129,372,373,374,375,129,129,129,129,129,129,129,129,455,456,457,458,129,129,129,129,129,129,129,129,129,129,129,]),'primary_expr_no_brace':([0,6,7,10,11,23,25,26,30,40,44,48,54,68,83,84,88,89,90,92,98,120,121,122,123,124,126,146,149,150,151,152,155,158,160,168,170,171,174,175,181,182,183,184,185,186,187,188,189,190,191,192,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,418,420,421,434,436,441,443,446,447,477,483,488,491,492,493,495,501,502,504,509,512,528,531,533,534,536,540,],[15,102,15,102,102,102,102,102,102,102,15,102,102,15,102,102,102,102,15,102,102,102,102,102,102,102,15,102,102,102,102,102,102,102,102,102,102,15,15,102,15,102,102,102,102,102,102,102,102,102,102,15,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,15,15,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,15,102,15,102,15,15,102,102,102,15,15,102,102,15,102,102,15,15,15,102,15,15,15,15,15,15,15,]),'variable_declaration_list_noin':([289,],[391,]),'null_literal':([0,6,7,10,11,23,25,26,30,40,44,48,54,68,83,84,88,89,90,92,98,120,121,122,123,124,126,146,149,150,151,152,155,158,160,168,170,171,174,175,181,182,183,184,185,186,187,188,189,190,191,192,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,418,420,421,434,436,441,443,446,447,477,483,488,491,492,493,495,501,502,504,509,512,528,531,533,534,536,540,],[16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,]),'labelled_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,]),'expr_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,]),'logical_and_expr':([26,83,88,89,92,146,149,158,160,168,170,175,207,210,226,238,243,272,275,276,277,342,343,352,404,407,412,420,434,443,446,488,491,495,509,],[131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,380,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,]),'additive_expr_nobf':([0,7,44,68,90,126,171,174,181,192,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,]),'primary_expr':([6,10,11,23,25,26,30,40,48,54,83,84,88,89,92,98,120,121,122,123,124,146,149,150,151,152,155,158,160,168,170,175,182,183,184,185,186,187,188,189,190,191,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,420,434,443,446,447,488,491,493,495,509,],[108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,]),'identifier':([0,6,7,10,11,17,23,25,26,30,37,40,44,47,48,54,66,68,83,84,88,89,90,92,96,98,104,120,121,122,123,124,126,146,149,150,151,152,153,155,157,158,160,167,168,170,171,174,175,181,182,183,184,185,186,187,188,189,190,191,192,207,210,225,226,228,231,237,238,242,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,287,289,342,343,344,349,352,383,385,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,415,418,420,421,431,434,436,441,443,444,446,447,477,483,488,491,492,493,495,501,502,504,509,512,528,531,533,534,536,540,],[24,110,24,110,110,118,110,110,110,110,154,110,24,165,110,110,178,24,110,110,110,110,110,110,220,110,234,110,110,110,110,110,24,110,110,110,110,110,286,110,303,110,110,315,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,346,110,234,234,353,110,118,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,286,392,110,110,286,234,110,24,437,24,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,470,24,110,24,286,110,24,24,110,487,110,110,24,24,110,110,24,110,110,24,24,24,110,24,24,24,24,24,24,24,]),'bitwise_xor_expr_nobf':([0,7,44,68,90,126,174,181,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[53,53,53,53,53,53,53,325,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,]),'relational_expr_noin':([155,389,398,399,405,406,408,409,410,447,493,],[290,290,290,290,290,290,290,290,290,290,290,]),'with_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,]),'case_clauses_opt':([473,520,],[497,535,]),'initializer':([118,],[244,]),'break_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,]),'bitwise_and_expr_noin':([155,389,399,405,406,408,409,410,447,493,],[291,291,454,291,291,291,291,291,291,291,]),'switch_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,]),'property_list':([104,],[230,]),'postfix_expr':([6,10,11,23,25,26,30,40,48,83,84,88,89,92,120,121,122,123,124,146,149,150,151,152,155,158,160,168,170,175,182,183,184,185,186,187,188,189,190,191,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,420,434,443,446,447,488,491,493,495,509,],[100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,]),'source_elements':([0,44,386,436,441,477,483,502,504,528,534,536,],[33,162,438,438,438,438,438,438,438,438,543,545,]),'shift_expr':([26,83,88,89,92,146,149,155,158,160,168,170,175,182,183,184,185,186,187,188,189,190,191,207,210,226,238,243,254,255,256,257,258,259,260,261,265,266,267,268,269,272,275,276,277,278,342,343,352,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,420,434,443,446,447,488,491,493,495,509,],[132,132,132,132,132,132,132,292,132,132,132,132,132,132,132,132,132,330,331,332,333,334,335,132,132,132,132,132,360,361,362,363,364,365,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,292,448,449,450,451,452,292,292,132,132,132,132,132,292,292,132,292,292,292,132,132,132,132,132,292,132,132,292,132,132,]),'expr_nobf':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,]),'expr_opt':([404,443,491,509,],[459,485,513,530,]),'multiplicative_expr_nobf':([0,7,44,68,90,126,171,174,181,192,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,]),'continue_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,]),'argument_list':([160,],[306,]),'expr_noin_opt':([155,],[296,]),'string_literal':([0,6,7,10,11,23,25,26,30,40,44,48,54,68,83,84,88,89,90,92,98,104,120,121,122,123,124,126,146,149,150,151,152,155,158,160,168,170,171,174,175,181,182,183,184,185,186,187,188,189,190,191,192,207,210,226,228,231,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,349,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,418,420,421,434,436,441,443,446,447,477,483,488,491,492,493,495,501,502,504,509,512,528,531,533,534,536,540,],[38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,236,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,236,236,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,236,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,]),'call_expr_nobf':([0,7,44,68,90,126,171,174,181,192,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,]),'bitwise_xor_expr_noin':([155,389,405,406,408,409,410,447,493,],[293,293,462,293,293,293,293,293,293,]),'variable_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,]),'object_literal':([6,10,11,23,25,26,30,40,48,54,83,84,88,89,92,98,120,121,122,123,124,146,149,150,151,152,155,158,160,168,170,175,182,183,184,185,186,187,188,189,190,191,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,420,434,443,446,447,488,491,493,495,509,],[105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,]),'function_declaration':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[45,45,45,179,179,179,45,179,179,45,45,45,45,179,179,45,45,179,45,179,179,45,45,179,]),'unary_expr_common':([0,6,7,10,11,23,25,26,30,40,44,48,68,83,84,88,89,90,92,120,121,122,123,124,126,146,149,150,151,152,155,158,160,168,170,171,174,175,181,182,183,184,185,186,187,188,189,190,191,192,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,418,420,421,434,436,441,443,446,447,477,483,488,491,492,493,495,501,502,504,509,512,528,531,533,534,536,540,],[46,106,46,106,106,106,106,106,106,106,46,106,46,106,106,106,106,46,106,106,106,106,106,106,46,106,106,106,106,106,106,106,106,106,106,46,46,106,46,106,106,106,106,106,106,106,106,106,106,46,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,46,46,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,46,106,46,106,46,46,106,106,106,46,46,106,106,46,106,106,46,46,46,106,46,46,46,46,46,46,46,]),'additive_expr':([26,83,88,89,92,120,121,122,146,149,155,158,160,168,170,175,182,183,184,185,186,187,188,189,190,191,207,210,226,238,243,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,275,276,277,278,342,343,352,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,420,434,443,446,447,488,491,493,495,509,],[140,140,140,140,140,245,246,247,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,368,369,370,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,]),'assignment_operator':([82,142,301,464,],[207,277,408,408,]),'case_clause':([473,496,520,],[498,518,498,]),'member_expr_nobf':([0,7,44,68,90,126,171,174,181,192,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,]),'numeric_literal':([0,6,7,10,11,23,25,26,30,40,44,48,54,68,83,84,88,89,90,92,98,104,120,121,122,123,124,126,146,149,150,151,152,155,158,160,168,170,171,174,175,181,182,183,184,185,186,187,188,189,190,191,192,207,210,226,228,231,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,349,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,418,420,421,434,436,441,443,446,447,477,483,488,491,492,493,495,501,502,504,509,512,528,531,533,534,536,540,],[87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,232,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,232,232,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,232,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,]),'assignment_expr_nobf':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,]),'equality_expr_noin':([155,389,398,399,405,406,408,409,410,447,493,],[294,294,453,294,294,294,294,294,294,294,294,]),'unary_expr':([6,10,11,23,25,26,30,40,48,83,84,88,89,92,120,121,122,123,124,146,149,150,151,152,155,158,160,168,170,175,182,183,184,185,186,187,188,189,190,191,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,420,434,443,446,447,488,491,493,495,509,],[97,112,113,125,127,134,145,156,166,134,209,134,134,134,134,134,134,134,134,134,134,281,282,283,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,357,358,359,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,]),'unary_expr_nobf':([0,7,44,68,90,126,171,174,181,192,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,]),'function_body':([386,436,441,477,483,502,504,528,],[439,481,484,503,507,525,527,539,]),'variable_declaration':([17,242,],[119,355,]),'bitwise_xor_expr':([26,83,88,89,92,146,149,158,160,168,170,175,207,210,226,238,243,260,261,272,275,276,277,342,343,352,404,407,412,420,434,443,446,488,491,495,509,],[135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,366,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,]),'conditional_expr_nobf':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,]),'equality_expr':([26,83,88,89,92,146,149,158,160,168,170,175,207,210,226,238,243,260,261,265,272,275,276,277,278,342,343,352,404,407,412,420,434,443,446,488,491,495,509,],[136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,382,136,136,136,136,136,136,136,136,136,136,136,136,136,136,]),'literal':([0,6,7,10,11,23,25,26,30,40,44,48,54,68,83,84,88,89,90,92,98,120,121,122,123,124,126,146,149,150,151,152,155,158,160,168,170,171,174,175,181,182,183,184,185,186,187,188,189,190,191,192,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,418,420,421,434,436,441,443,446,447,477,483,488,491,492,493,495,501,502,504,509,512,528,531,533,534,536,540,],[80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,]),'logical_and_expr_nobf':([0,7,44,68,90,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[56,56,56,56,214,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,]),'shift_expr_nobf':([0,7,44,68,90,126,171,174,181,192,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,]),'elision':([4,215,],[94,94,]),'statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[58,58,58,180,250,435,58,471,475,58,58,58,58,514,524,58,58,532,58,541,542,58,58,547,]),'empty':([0,4,44,155,215,386,404,436,441,443,473,477,483,491,502,504,509,520,528,534,536,],[59,95,59,300,95,59,460,59,59,460,499,59,59,460,59,59,460,499,59,59,59,]),'new_expr':([6,10,11,23,25,26,30,40,48,54,83,84,88,89,92,98,120,121,122,123,124,146,149,150,151,152,155,158,160,168,170,175,182,183,184,185,186,187,188,189,190,191,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,420,434,443,446,447,488,491,493,495,509,],[101,101,101,101,101,101,101,101,101,172,101,101,101,101,101,221,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,]),'postfix_expr_nobf':([0,7,44,68,90,126,171,174,181,192,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,]),'regex_literal':([0,6,7,10,11,23,25,26,30,40,44,48,54,68,83,84,88,89,90,92,98,120,121,122,123,124,126,146,149,150,151,152,155,158,160,168,170,171,174,175,181,182,183,184,185,186,187,188,189,190,191,192,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,418,420,421,434,436,441,443,446,447,477,483,488,491,492,493,495,501,502,504,509,512,528,531,533,534,536,540,],[61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,]),'conditional_expr_noin':([155,406,408,409,447,493,],[298,298,298,298,298,298,]),'variable_declaration_list':([17,],[117,]),'catch':([161,],[311,]),'expr_noin':([155,],[299,]),'conditional_expr':([26,83,88,89,92,146,149,158,160,168,170,175,207,210,226,238,243,272,275,277,342,343,352,404,407,412,420,434,443,446,488,491,495,509,],[137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,]),'default_clause':([497,],[520,]),'expr':([26,83,88,146,158,168,170,175,210,226,238,404,407,420,443,446,488,491,495,509,],[138,208,211,279,304,316,317,321,339,347,354,461,465,474,461,489,511,461,517,461,]),'empty_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,]),'bitwise_or_expr_noin':([155,389,406,408,409,410,447,493,],[297,442,297,297,297,297,297,297,]),'member_expr':([6,10,11,23,25,26,30,40,48,54,83,84,88,89,92,98,120,121,122,123,124,146,149,150,151,152,155,158,160,168,170,175,182,183,184,185,186,187,188,189,190,191,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,420,434,443,446,447,488,491,493,495,509,],[109,109,109,109,109,109,109,109,109,173,109,109,109,109,109,222,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,]),'assignment_expr':([26,83,88,89,92,146,149,158,160,168,170,175,207,210,226,238,243,272,275,277,342,343,352,404,407,412,420,434,443,446,488,491,495,509,],[139,139,139,212,217,139,280,139,307,139,139,139,337,139,139,139,356,376,379,381,422,423,432,139,139,469,139,480,139,139,139,139,139,139,]),'initializer_noin':([392,487,],[445,510,]),'source_element':([0,7,44,386,436,441,477,483,502,504,528,534,536,],[67,111,67,67,67,67,67,67,67,67,67,67,67,]),'bitwise_or_expr_nobf':([0,7,44,68,90,126,174,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[69,69,69,69,69,69,320,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,]),'case_clauses':([473,520,],[496,496,]),'logical_or_expr':([26,83,88,89,92,146,149,158,160,168,170,175,207,210,226,238,243,272,275,277,342,343,352,404,407,412,420,434,443,446,488,491,495,509,],[141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,]),'left_hand_side_expr':([6,10,11,23,25,26,30,40,48,83,84,88,89,92,120,121,122,123,124,146,149,150,151,152,155,158,160,168,170,175,182,183,184,185,186,187,188,189,190,191,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,420,434,443,446,447,488,491,493,495,509,],[99,99,99,99,99,142,99,99,99,142,99,142,142,142,99,99,99,99,99,142,142,99,99,99,301,142,142,142,142,142,99,99,99,99,99,99,99,99,99,99,142,142,142,142,142,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,142,99,99,142,99,142,99,142,142,142,99,99,99,99,99,99,99,99,99,99,99,99,142,99,464,142,464,464,99,142,142,142,142,142,464,142,142,464,142,142,]),'property_name':([104,228,231,349,],[235,348,351,235,]),'equality_expr_nobf':([0,7,44,68,90,126,171,174,181,192,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[73,73,73,73,73,73,73,73,73,336,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,]),'relational_expr_nobf':([0,7,44,68,90,126,171,174,181,192,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,]),'return_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,]),'bitwise_and_expr_nobf':([0,7,44,68,90,126,171,174,181,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[76,76,76,76,76,76,318,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,]),'arguments':([41,49,103,109,173,222,],[159,169,227,239,319,345,]),'if_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,]),'logical_or_expr_noin':([155,406,408,409,447,493,],[302,302,302,302,302,302,]),'auto_semi':([14,26,34,47,66,117,138,165,178,211,500,],[114,133,148,164,177,241,271,314,322,341,523,]),'call_expr':([6,10,11,23,25,26,30,40,48,83,84,88,89,92,120,121,122,123,124,146,149,150,151,152,155,158,160,168,170,175,182,183,184,185,186,187,188,189,190,191,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,420,434,443,446,447,488,491,493,495,509,],[103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,]),'array_literal':([0,6,7,10,11,23,25,26,30,40,44,48,54,68,83,84,88,89,90,92,98,120,121,122,123,124,126,146,149,150,151,152,155,158,160,168,170,171,174,175,181,182,183,184,185,186,187,188,189,190,191,192,207,210,226,238,243,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,272,273,274,275,276,277,278,342,343,352,383,386,389,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,412,418,420,421,434,436,441,443,446,447,477,483,488,491,492,493,495,501,502,504,509,512,528,531,533,534,536,540,],[81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,]),'left_hand_side_expr_nobf':([0,7,44,68,90,126,171,174,181,192,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[82,82,82,82,213,82,213,213,213,213,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,]),'assignment_expr_noin':([155,406,408,409,447,493,],[295,463,466,467,490,515,]),'elision_opt':([4,215,],[92,343,]),'bitwise_and_expr':([26,83,88,89,92,146,149,158,160,168,170,175,207,210,226,238,243,260,261,265,272,275,276,277,342,343,352,404,407,412,420,434,443,446,488,491,495,509,],[144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,371,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,]),'block':([0,7,42,44,68,126,308,383,386,418,421,436,441,477,483,492,494,501,502,504,512,528,531,533,534,536,540,],[50,50,161,50,50,50,414,50,50,50,50,50,50,50,50,50,516,50,50,50,50,50,50,50,50,50,50,]),'debugger_statement':([0,7,44,68,126,383,386,418,421,436,441,477,483,492,501,502,504,512,528,531,533,534,536,540,],[62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,]),}
+
+_lr_goto = { }
+for _k, _v in _lr_goto_items.items():
+   for _x,_y in zip(_v[0],_v[1]):
+       if not _x in _lr_goto: _lr_goto[_x] = { }
+       _lr_goto[_x][_k] = _y
+del _lr_goto_items
+_lr_productions = [
+  ("S' -> program","S'",1,None,None,None),
+  ('empty -> <empty>','empty',0,'p_empty','/home/alienoid/dev/python/slimit/src/slimit/parser.py',96),
+  ('auto_semi -> error','auto_semi',1,'p_auto_semi','/home/alienoid/dev/python/slimit/src/slimit/parser.py',100),
+  ('program -> source_elements','program',1,'p_program','/home/alienoid/dev/python/slimit/src/slimit/parser.py',130),
+  ('source_elements -> empty','source_elements',1,'p_source_elements','/home/alienoid/dev/python/slimit/src/slimit/parser.py',134),
+  ('source_elements -> source_element_list','source_elements',1,'p_source_elements','/home/alienoid/dev/python/slimit/src/slimit/parser.py',135),
+  ('source_element_list -> source_element','source_element_list',1,'p_source_element_list','/home/alienoid/dev/python/slimit/src/slimit/parser.py',140),
+  ('source_element_list -> source_element_list source_element','source_element_list',2,'p_source_element_list','/home/alienoid/dev/python/slimit/src/slimit/parser.py',141),
+  ('source_element -> statement','source_element',1,'p_source_element','/home/alienoid/dev/python/slimit/src/slimit/parser.py',150),
+  ('source_element -> function_declaration','source_element',1,'p_source_element','/home/alienoid/dev/python/slimit/src/slimit/parser.py',151),
+  ('statement -> block','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',156),
+  ('statement -> variable_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',157),
+  ('statement -> empty_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',158),
+  ('statement -> expr_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',159),
+  ('statement -> if_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',160),
+  ('statement -> iteration_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',161),
+  ('statement -> continue_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',162),
+  ('statement -> break_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',163),
+  ('statement -> return_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',164),
+  ('statement -> with_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',165),
+  ('statement -> switch_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',166),
+  ('statement -> labelled_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',167),
+  ('statement -> throw_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',168),
+  ('statement -> try_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',169),
+  ('statement -> debugger_statement','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',170),
+  ('statement -> function_declaration','statement',1,'p_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',171),
+  ('block -> LBRACE source_elements RBRACE','block',3,'p_block','/home/alienoid/dev/python/slimit/src/slimit/parser.py',178),
+  ('literal -> null_literal','literal',1,'p_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',182),
+  ('literal -> boolean_literal','literal',1,'p_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',183),
+  ('literal -> numeric_literal','literal',1,'p_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',184),
+  ('literal -> string_literal','literal',1,'p_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',185),
+  ('literal -> regex_literal','literal',1,'p_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',186),
+  ('boolean_literal -> TRUE','boolean_literal',1,'p_boolean_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',191),
+  ('boolean_literal -> FALSE','boolean_literal',1,'p_boolean_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',192),
+  ('null_literal -> NULL','null_literal',1,'p_null_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',197),
+  ('numeric_literal -> NUMBER','numeric_literal',1,'p_numeric_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',201),
+  ('string_literal -> STRING','string_literal',1,'p_string_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',205),
+  ('regex_literal -> REGEX','regex_literal',1,'p_regex_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',209),
+  ('identifier -> ID','identifier',1,'p_identifier','/home/alienoid/dev/python/slimit/src/slimit/parser.py',213),
+  ('primary_expr -> primary_expr_no_brace','primary_expr',1,'p_primary_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',220),
+  ('primary_expr -> object_literal','primary_expr',1,'p_primary_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',221),
+  ('primary_expr_no_brace -> identifier','primary_expr_no_brace',1,'p_primary_expr_no_brace_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',226),
+  ('primary_expr_no_brace -> THIS','primary_expr_no_brace',1,'p_primary_expr_no_brace_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',232),
+  ('primary_expr_no_brace -> literal','primary_expr_no_brace',1,'p_primary_expr_no_brace_3','/home/alienoid/dev/python/slimit/src/slimit/parser.py',236),
+  ('primary_expr_no_brace -> array_literal','primary_expr_no_brace',1,'p_primary_expr_no_brace_3','/home/alienoid/dev/python/slimit/src/slimit/parser.py',237),
+  ('primary_expr_no_brace -> LPAREN expr RPAREN','primary_expr_no_brace',3,'p_primary_expr_no_brace_4','/home/alienoid/dev/python/slimit/src/slimit/parser.py',242),
+  ('array_literal -> LBRACKET elision_opt RBRACKET','array_literal',3,'p_array_literal_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',247),
+  ('array_literal -> LBRACKET element_list RBRACKET','array_literal',3,'p_array_literal_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',251),
+  ('array_literal -> LBRACKET element_list COMMA elision_opt RBRACKET','array_literal',5,'p_array_literal_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',252),
+  ('element_list -> elision_opt assignment_expr','element_list',2,'p_element_list','/home/alienoid/dev/python/slimit/src/slimit/parser.py',261),
+  ('element_list -> element_list COMMA elision_opt assignment_expr','element_list',4,'p_element_list','/home/alienoid/dev/python/slimit/src/slimit/parser.py',262),
+  ('elision_opt -> empty','elision_opt',1,'p_elision_opt_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',272),
+  ('elision_opt -> elision','elision_opt',1,'p_elision_opt_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',276),
+  ('elision -> COMMA','elision',1,'p_elision','/home/alienoid/dev/python/slimit/src/slimit/parser.py',280),
+  ('elision -> elision COMMA','elision',2,'p_elision','/home/alienoid/dev/python/slimit/src/slimit/parser.py',281),
+  ('object_literal -> LBRACE RBRACE','object_literal',2,'p_object_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',290),
+  ('object_literal -> LBRACE property_list RBRACE','object_literal',3,'p_object_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',291),
+  ('object_literal -> LBRACE property_list COMMA RBRACE','object_literal',4,'p_object_literal','/home/alienoid/dev/python/slimit/src/slimit/parser.py',292),
+  ('property_list -> property_assignment','property_list',1,'p_property_list','/home/alienoid/dev/python/slimit/src/slimit/parser.py',300),
+  ('property_list -> property_list COMMA property_assignment','property_list',3,'p_property_list','/home/alienoid/dev/python/slimit/src/slimit/parser.py',301),
+  ('property_assignment -> property_name COLON assignment_expr','property_assignment',3,'p_property_assignment','/home/alienoid/dev/python/slimit/src/slimit/parser.py',311),
+  ('property_assignment -> GETPROP property_name LPAREN RPAREN LBRACE function_body RBRACE','property_assignment',7,'p_property_assignment','/home/alienoid/dev/python/slimit/src/slimit/parser.py',312),
+  ('property_assignment -> SETPROP property_name LPAREN formal_parameter_list RPAREN LBRACE function_body RBRACE','property_assignment',8,'p_property_assignment','/home/alienoid/dev/python/slimit/src/slimit/parser.py',313),
+  ('property_name -> identifier','property_name',1,'p_property_name','/home/alienoid/dev/python/slimit/src/slimit/parser.py',326),
+  ('property_name -> string_literal','property_name',1,'p_property_name','/home/alienoid/dev/python/slimit/src/slimit/parser.py',327),
+  ('property_name -> numeric_literal','property_name',1,'p_property_name','/home/alienoid/dev/python/slimit/src/slimit/parser.py',328),
+  ('member_expr -> primary_expr','member_expr',1,'p_member_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',334),
+  ('member_expr -> function_expr','member_expr',1,'p_member_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',335),
+  ('member_expr -> member_expr LBRACKET expr RBRACKET','member_expr',4,'p_member_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',336),
+  ('member_expr -> member_expr PERIOD identifier','member_expr',3,'p_member_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',337),
+  ('member_expr -> NEW member_expr arguments','member_expr',3,'p_member_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',338),
+  ('member_expr_nobf -> primary_expr_no_brace','member_expr_nobf',1,'p_member_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',350),
+  ('member_expr_nobf -> function_expr','member_expr_nobf',1,'p_member_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',351),
+  ('member_expr_nobf -> member_expr_nobf LBRACKET expr RBRACKET','member_expr_nobf',4,'p_member_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',352),
+  ('member_expr_nobf -> member_expr_nobf PERIOD identifier','member_expr_nobf',3,'p_member_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',353),
+  ('member_expr_nobf -> NEW member_expr arguments','member_expr_nobf',3,'p_member_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',354),
+  ('new_expr -> member_expr','new_expr',1,'p_new_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',366),
+  ('new_expr -> NEW new_expr','new_expr',2,'p_new_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',367),
+  ('new_expr_nobf -> member_expr_nobf','new_expr_nobf',1,'p_new_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',375),
+  ('new_expr_nobf -> NEW new_expr','new_expr_nobf',2,'p_new_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',376),
+  ('call_expr -> member_expr arguments','call_expr',2,'p_call_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',384),
+  ('call_expr -> call_expr arguments','call_expr',2,'p_call_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',385),
+  ('call_expr -> call_expr LBRACKET expr RBRACKET','call_expr',4,'p_call_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',386),
+  ('call_expr -> call_expr PERIOD identifier','call_expr',3,'p_call_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',387),
+  ('call_expr_nobf -> member_expr_nobf arguments','call_expr_nobf',2,'p_call_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',397),
+  ('call_expr_nobf -> call_expr_nobf arguments','call_expr_nobf',2,'p_call_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',398),
+  ('call_expr_nobf -> call_expr_nobf LBRACKET expr RBRACKET','call_expr_nobf',4,'p_call_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',399),
+  ('call_expr_nobf -> call_expr_nobf PERIOD identifier','call_expr_nobf',3,'p_call_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',400),
+  ('arguments -> LPAREN RPAREN','arguments',2,'p_arguments','/home/alienoid/dev/python/slimit/src/slimit/parser.py',410),
+  ('arguments -> LPAREN argument_list RPAREN','arguments',3,'p_arguments','/home/alienoid/dev/python/slimit/src/slimit/parser.py',411),
+  ('argument_list -> assignment_expr','argument_list',1,'p_argument_list','/home/alienoid/dev/python/slimit/src/slimit/parser.py',417),
+  ('argument_list -> argument_list COMMA assignment_expr','argument_list',3,'p_argument_list','/home/alienoid/dev/python/slimit/src/slimit/parser.py',418),
+  ('left_hand_side_expr -> new_expr','left_hand_side_expr',1,'p_lef_hand_side_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',427),
+  ('left_hand_side_expr -> call_expr','left_hand_side_expr',1,'p_lef_hand_side_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',428),
+  ('left_hand_side_expr_nobf -> new_expr_nobf','left_hand_side_expr_nobf',1,'p_lef_hand_side_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',433),
+  ('left_hand_side_expr_nobf -> call_expr_nobf','left_hand_side_expr_nobf',1,'p_lef_hand_side_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',434),
+  ('postfix_expr -> left_hand_side_expr','postfix_expr',1,'p_postfix_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',440),
+  ('postfix_expr -> left_hand_side_expr PLUSPLUS','postfix_expr',2,'p_postfix_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',441),
+  ('postfix_expr -> left_hand_side_expr MINUSMINUS','postfix_expr',2,'p_postfix_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',442),
+  ('postfix_expr_nobf -> left_hand_side_expr_nobf','postfix_expr_nobf',1,'p_postfix_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',450),
+  ('postfix_expr_nobf -> left_hand_side_expr_nobf PLUSPLUS','postfix_expr_nobf',2,'p_postfix_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',451),
+  ('postfix_expr_nobf -> left_hand_side_expr_nobf MINUSMINUS','postfix_expr_nobf',2,'p_postfix_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',452),
+  ('unary_expr -> postfix_expr','unary_expr',1,'p_unary_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',461),
+  ('unary_expr -> unary_expr_common','unary_expr',1,'p_unary_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',462),
+  ('unary_expr_nobf -> postfix_expr_nobf','unary_expr_nobf',1,'p_unary_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',467),
+  ('unary_expr_nobf -> unary_expr_common','unary_expr_nobf',1,'p_unary_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',468),
+  ('unary_expr_common -> DELETE unary_expr','unary_expr_common',2,'p_unary_expr_common','/home/alienoid/dev/python/slimit/src/slimit/parser.py',473),
+  ('unary_expr_common -> VOID unary_expr','unary_expr_common',2,'p_unary_expr_common','/home/alienoid/dev/python/slimit/src/slimit/parser.py',474),
+  ('unary_expr_common -> TYPEOF unary_expr','unary_expr_common',2,'p_unary_expr_common','/home/alienoid/dev/python/slimit/src/slimit/parser.py',475),
+  ('unary_expr_common -> PLUSPLUS unary_expr','unary_expr_common',2,'p_unary_expr_common','/home/alienoid/dev/python/slimit/src/slimit/parser.py',476),
+  ('unary_expr_common -> MINUSMINUS unary_expr','unary_expr_common',2,'p_unary_expr_common','/home/alienoid/dev/python/slimit/src/slimit/parser.py',477),
+  ('unary_expr_common -> PLUS unary_expr','unary_expr_common',2,'p_unary_expr_common','/home/alienoid/dev/python/slimit/src/slimit/parser.py',478),
+  ('unary_expr_common -> MINUS unary_expr','unary_expr_common',2,'p_unary_expr_common','/home/alienoid/dev/python/slimit/src/slimit/parser.py',479),
+  ('unary_expr_common -> BNOT unary_expr','unary_expr_common',2,'p_unary_expr_common','/home/alienoid/dev/python/slimit/src/slimit/parser.py',480),
+  ('unary_expr_common -> NOT unary_expr','unary_expr_common',2,'p_unary_expr_common','/home/alienoid/dev/python/slimit/src/slimit/parser.py',481),
+  ('multiplicative_expr -> unary_expr','multiplicative_expr',1,'p_multiplicative_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',487),
+  ('multiplicative_expr -> multiplicative_expr MULT unary_expr','multiplicative_expr',3,'p_multiplicative_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',488),
+  ('multiplicative_expr -> multiplicative_expr DIV unary_expr','multiplicative_expr',3,'p_multiplicative_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',489),
+  ('multiplicative_expr -> multiplicative_expr MOD unary_expr','multiplicative_expr',3,'p_multiplicative_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',490),
+  ('multiplicative_expr_nobf -> unary_expr_nobf','multiplicative_expr_nobf',1,'p_multiplicative_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',498),
+  ('multiplicative_expr_nobf -> multiplicative_expr_nobf MULT unary_expr','multiplicative_expr_nobf',3,'p_multiplicative_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',499),
+  ('multiplicative_expr_nobf -> multiplicative_expr_nobf DIV unary_expr','multiplicative_expr_nobf',3,'p_multiplicative_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',500),
+  ('multiplicative_expr_nobf -> multiplicative_expr_nobf MOD unary_expr','multiplicative_expr_nobf',3,'p_multiplicative_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',501),
+  ('additive_expr -> multiplicative_expr','additive_expr',1,'p_additive_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',510),
+  ('additive_expr -> additive_expr PLUS multiplicative_expr','additive_expr',3,'p_additive_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',511),
+  ('additive_expr -> additive_expr MINUS multiplicative_expr','additive_expr',3,'p_additive_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',512),
+  ('additive_expr_nobf -> multiplicative_expr_nobf','additive_expr_nobf',1,'p_additive_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',520),
+  ('additive_expr_nobf -> additive_expr_nobf PLUS multiplicative_expr','additive_expr_nobf',3,'p_additive_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',521),
+  ('additive_expr_nobf -> additive_expr_nobf MINUS multiplicative_expr','additive_expr_nobf',3,'p_additive_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',522),
+  ('shift_expr -> additive_expr','shift_expr',1,'p_shift_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',531),
+  ('shift_expr -> shift_expr LSHIFT additive_expr','shift_expr',3,'p_shift_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',532),
+  ('shift_expr -> shift_expr RSHIFT additive_expr','shift_expr',3,'p_shift_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',533),
+  ('shift_expr -> shift_expr URSHIFT additive_expr','shift_expr',3,'p_shift_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',534),
+  ('shift_expr_nobf -> additive_expr_nobf','shift_expr_nobf',1,'p_shift_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',542),
+  ('shift_expr_nobf -> shift_expr_nobf LSHIFT additive_expr','shift_expr_nobf',3,'p_shift_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',543),
+  ('shift_expr_nobf -> shift_expr_nobf RSHIFT additive_expr','shift_expr_nobf',3,'p_shift_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',544),
+  ('shift_expr_nobf -> shift_expr_nobf URSHIFT additive_expr','shift_expr_nobf',3,'p_shift_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',545),
+  ('relational_expr -> shift_expr','relational_expr',1,'p_relational_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',555),
+  ('relational_expr -> relational_expr LT shift_expr','relational_expr',3,'p_relational_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',556),
+  ('relational_expr -> relational_expr GT shift_expr','relational_expr',3,'p_relational_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',557),
+  ('relational_expr -> relational_expr LE shift_expr','relational_expr',3,'p_relational_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',558),
+  ('relational_expr -> relational_expr GE shift_expr','relational_expr',3,'p_relational_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',559),
+  ('relational_expr -> relational_expr INSTANCEOF shift_expr','relational_expr',3,'p_relational_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',560),
+  ('relational_expr -> relational_expr IN shift_expr','relational_expr',3,'p_relational_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',561),
+  ('relational_expr_noin -> shift_expr','relational_expr_noin',1,'p_relational_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',569),
+  ('relational_expr_noin -> relational_expr_noin LT shift_expr','relational_expr_noin',3,'p_relational_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',570),
+  ('relational_expr_noin -> relational_expr_noin GT shift_expr','relational_expr_noin',3,'p_relational_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',571),
+  ('relational_expr_noin -> relational_expr_noin LE shift_expr','relational_expr_noin',3,'p_relational_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',572),
+  ('relational_expr_noin -> relational_expr_noin GE shift_expr','relational_expr_noin',3,'p_relational_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',573),
+  ('relational_expr_noin -> relational_expr_noin INSTANCEOF shift_expr','relational_expr_noin',3,'p_relational_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',574),
+  ('relational_expr_nobf -> shift_expr_nobf','relational_expr_nobf',1,'p_relational_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',582),
+  ('relational_expr_nobf -> relational_expr_nobf LT shift_expr','relational_expr_nobf',3,'p_relational_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',583),
+  ('relational_expr_nobf -> relational_expr_nobf GT shift_expr','relational_expr_nobf',3,'p_relational_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',584),
+  ('relational_expr_nobf -> relational_expr_nobf LE shift_expr','relational_expr_nobf',3,'p_relational_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',585),
+  ('relational_expr_nobf -> relational_expr_nobf GE shift_expr','relational_expr_nobf',3,'p_relational_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',586),
+  ('relational_expr_nobf -> relational_expr_nobf INSTANCEOF shift_expr','relational_expr_nobf',3,'p_relational_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',587),
+  ('relational_expr_nobf -> relational_expr_nobf IN shift_expr','relational_expr_nobf',3,'p_relational_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',588),
+  ('equality_expr -> relational_expr','equality_expr',1,'p_equality_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',597),
+  ('equality_expr -> equality_expr EQEQ relational_expr','equality_expr',3,'p_equality_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',598),
+  ('equality_expr -> equality_expr NE relational_expr','equality_expr',3,'p_equality_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',599),
+  ('equality_expr -> equality_expr STREQ relational_expr','equality_expr',3,'p_equality_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',600),
+  ('equality_expr -> equality_expr STRNEQ relational_expr','equality_expr',3,'p_equality_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',601),
+  ('equality_expr_noin -> relational_expr_noin','equality_expr_noin',1,'p_equality_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',609),
+  ('equality_expr_noin -> equality_expr_noin EQEQ relational_expr','equality_expr_noin',3,'p_equality_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',610),
+  ('equality_expr_noin -> equality_expr_noin NE relational_expr','equality_expr_noin',3,'p_equality_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',611),
+  ('equality_expr_noin -> equality_expr_noin STREQ relational_expr','equality_expr_noin',3,'p_equality_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',612),
+  ('equality_expr_noin -> equality_expr_noin STRNEQ relational_expr','equality_expr_noin',3,'p_equality_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',613),
+  ('equality_expr_nobf -> relational_expr_nobf','equality_expr_nobf',1,'p_equality_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',621),
+  ('equality_expr_nobf -> equality_expr_nobf EQEQ relational_expr','equality_expr_nobf',3,'p_equality_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',622),
+  ('equality_expr_nobf -> equality_expr_nobf NE relational_expr','equality_expr_nobf',3,'p_equality_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',623),
+  ('equality_expr_nobf -> equality_expr_nobf STREQ relational_expr','equality_expr_nobf',3,'p_equality_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',624),
+  ('equality_expr_nobf -> equality_expr_nobf STRNEQ relational_expr','equality_expr_nobf',3,'p_equality_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',625),
+  ('bitwise_and_expr -> equality_expr','bitwise_and_expr',1,'p_bitwise_and_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',634),
+  ('bitwise_and_expr -> bitwise_and_expr BAND equality_expr','bitwise_and_expr',3,'p_bitwise_and_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',635),
+  ('bitwise_and_expr_noin -> equality_expr_noin','bitwise_and_expr_noin',1,'p_bitwise_and_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',643),
+  ('bitwise_and_expr_noin -> bitwise_and_expr_noin BAND equality_expr_noin','bitwise_and_expr_noin',3,'p_bitwise_and_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',644),
+  ('bitwise_and_expr_nobf -> equality_expr_nobf','bitwise_and_expr_nobf',1,'p_bitwise_and_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',653),
+  ('bitwise_and_expr_nobf -> bitwise_and_expr_nobf BAND equality_expr_nobf','bitwise_and_expr_nobf',3,'p_bitwise_and_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',654),
+  ('bitwise_xor_expr -> bitwise_and_expr','bitwise_xor_expr',1,'p_bitwise_xor_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',663),
+  ('bitwise_xor_expr -> bitwise_xor_expr BXOR bitwise_and_expr','bitwise_xor_expr',3,'p_bitwise_xor_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',664),
+  ('bitwise_xor_expr_noin -> bitwise_and_expr_noin','bitwise_xor_expr_noin',1,'p_bitwise_xor_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',673),
+  ('bitwise_xor_expr_noin -> bitwise_xor_expr_noin BXOR bitwise_and_expr_noin','bitwise_xor_expr_noin',3,'p_bitwise_xor_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',674),
+  ('bitwise_xor_expr_nobf -> bitwise_and_expr_nobf','bitwise_xor_expr_nobf',1,'p_bitwise_xor_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',684),
+  ('bitwise_xor_expr_nobf -> bitwise_xor_expr_nobf BXOR bitwise_and_expr_nobf','bitwise_xor_expr_nobf',3,'p_bitwise_xor_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',685),
+  ('bitwise_or_expr -> bitwise_xor_expr','bitwise_or_expr',1,'p_bitwise_or_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',694),
+  ('bitwise_or_expr -> bitwise_or_expr BOR bitwise_xor_expr','bitwise_or_expr',3,'p_bitwise_or_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',695),
+  ('bitwise_or_expr_noin -> bitwise_xor_expr_noin','bitwise_or_expr_noin',1,'p_bitwise_or_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',704),
+  ('bitwise_or_expr_noin -> bitwise_or_expr_noin BOR bitwise_xor_expr_noin','bitwise_or_expr_noin',3,'p_bitwise_or_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',705),
+  ('bitwise_or_expr_nobf -> bitwise_xor_expr_nobf','bitwise_or_expr_nobf',1,'p_bitwise_or_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',715),
+  ('bitwise_or_expr_nobf -> bitwise_or_expr_nobf BOR bitwise_xor_expr_nobf','bitwise_or_expr_nobf',3,'p_bitwise_or_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',716),
+  ('logical_and_expr -> bitwise_or_expr','logical_and_expr',1,'p_logical_and_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',726),
+  ('logical_and_expr -> logical_and_expr AND bitwise_or_expr','logical_and_expr',3,'p_logical_and_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',727),
+  ('logical_and_expr_noin -> bitwise_or_expr_noin','logical_and_expr_noin',1,'p_logical_and_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',736),
+  ('logical_and_expr_noin -> logical_and_expr_noin AND bitwise_or_expr_noin','logical_and_expr_noin',3,'p_logical_and_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',737),
+  ('logical_and_expr_nobf -> bitwise_or_expr_nobf','logical_and_expr_nobf',1,'p_logical_and_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',746),
+  ('logical_and_expr_nobf -> logical_and_expr_nobf AND bitwise_or_expr_nobf','logical_and_expr_nobf',3,'p_logical_and_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',747),
+  ('logical_or_expr -> logical_and_expr','logical_or_expr',1,'p_logical_or_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',755),
+  ('logical_or_expr -> logical_or_expr OR logical_and_expr','logical_or_expr',3,'p_logical_or_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',756),
+  ('logical_or_expr_noin -> logical_and_expr_noin','logical_or_expr_noin',1,'p_logical_or_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',764),
+  ('logical_or_expr_noin -> logical_or_expr_noin OR logical_and_expr_noin','logical_or_expr_noin',3,'p_logical_or_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',765),
+  ('logical_or_expr_nobf -> logical_and_expr_nobf','logical_or_expr_nobf',1,'p_logical_or_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',773),
+  ('logical_or_expr_nobf -> logical_or_expr_nobf OR logical_and_expr_nobf','logical_or_expr_nobf',3,'p_logical_or_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',774),
+  ('conditional_expr -> logical_or_expr','conditional_expr',1,'p_conditional_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',784),
+  ('conditional_expr -> logical_or_expr CONDOP assignment_expr COLON assignment_expr','conditional_expr',5,'p_conditional_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',785),
+  ('conditional_expr_noin -> logical_or_expr_noin','conditional_expr_noin',1,'p_conditional_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',796),
+  ('conditional_expr_noin -> logical_or_expr_noin CONDOP assignment_expr_noin COLON assignment_expr_noin','conditional_expr_noin',5,'p_conditional_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',797),
+  ('conditional_expr_nobf -> logical_or_expr_nobf','conditional_expr_nobf',1,'p_conditional_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',809),
+  ('conditional_expr_nobf -> logical_or_expr_nobf CONDOP assignment_expr COLON assignment_expr','conditional_expr_nobf',5,'p_conditional_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',810),
+  ('assignment_expr -> conditional_expr','assignment_expr',1,'p_assignment_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',822),
+  ('assignment_expr -> left_hand_side_expr assignment_operator assignment_expr','assignment_expr',3,'p_assignment_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',823),
+  ('assignment_expr_noin -> conditional_expr_noin','assignment_expr_noin',1,'p_assignment_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',833),
+  ('assignment_expr_noin -> left_hand_side_expr assignment_operator assignment_expr_noin','assignment_expr_noin',3,'p_assignment_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',834),
+  ('assignment_expr_nobf -> conditional_expr_nobf','assignment_expr_nobf',1,'p_assignment_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',844),
+  ('assignment_expr_nobf -> left_hand_side_expr_nobf assignment_operator assignment_expr','assignment_expr_nobf',3,'p_assignment_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',845),
+  ('assignment_operator -> EQ','assignment_operator',1,'p_assignment_operator','/home/alienoid/dev/python/slimit/src/slimit/parser.py',854),
+  ('assignment_operator -> MULTEQUAL','assignment_operator',1,'p_assignment_operator','/home/alienoid/dev/python/slimit/src/slimit/parser.py',855),
+  ('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','/home/alienoid/dev/python/slimit/src/slimit/parser.py',856),
+  ('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','/home/alienoid/dev/python/slimit/src/slimit/parser.py',857),
+  ('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','/home/alienoid/dev/python/slimit/src/slimit/parser.py',858),
+  ('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','/home/alienoid/dev/python/slimit/src/slimit/parser.py',859),
+  ('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','/home/alienoid/dev/python/slimit/src/slimit/parser.py',860),
+  ('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','/home/alienoid/dev/python/slimit/src/slimit/parser.py',861),
+  ('assignment_operator -> URSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','/home/alienoid/dev/python/slimit/src/slimit/parser.py',862),
+  ('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','/home/alienoid/dev/python/slimit/src/slimit/parser.py',863),
+  ('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','/home/alienoid/dev/python/slimit/src/slimit/parser.py',864),
+  ('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','/home/alienoid/dev/python/slimit/src/slimit/parser.py',865),
+  ('expr -> assignment_expr','expr',1,'p_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',871),
+  ('expr -> expr COMMA assignment_expr','expr',3,'p_expr','/home/alienoid/dev/python/slimit/src/slimit/parser.py',872),
+  ('expr_noin -> assignment_expr_noin','expr_noin',1,'p_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',880),
+  ('expr_noin -> expr_noin COMMA assignment_expr_noin','expr_noin',3,'p_expr_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',881),
+  ('expr_nobf -> assignment_expr_nobf','expr_nobf',1,'p_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',889),
+  ('expr_nobf -> expr_nobf COMMA assignment_expr','expr_nobf',3,'p_expr_nobf','/home/alienoid/dev/python/slimit/src/slimit/parser.py',890),
+  ('variable_statement -> VAR variable_declaration_list SEMI','variable_statement',3,'p_variable_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',899),
+  ('variable_statement -> VAR variable_declaration_list auto_semi','variable_statement',3,'p_variable_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',900),
+  ('variable_declaration_list -> variable_declaration','variable_declaration_list',1,'p_variable_declaration_list','/home/alienoid/dev/python/slimit/src/slimit/parser.py',906),
+  ('variable_declaration_list -> variable_declaration_list COMMA variable_declaration','variable_declaration_list',3,'p_variable_declaration_list','/home/alienoid/dev/python/slimit/src/slimit/parser.py',907),
+  ('variable_declaration_list_noin -> variable_declaration_noin','variable_declaration_list_noin',1,'p_variable_declaration_list_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',918),
+  ('variable_declaration_list_noin -> variable_declaration_list_noin COMMA variable_declaration_noin','variable_declaration_list_noin',3,'p_variable_declaration_list_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',919),
+  ('variable_declaration -> identifier','variable_declaration',1,'p_variable_declaration','/home/alienoid/dev/python/slimit/src/slimit/parser.py',929),
+  ('variable_declaration -> identifier initializer','variable_declaration',2,'p_variable_declaration','/home/alienoid/dev/python/slimit/src/slimit/parser.py',930),
+  ('variable_declaration_noin -> identifier','variable_declaration_noin',1,'p_variable_declaration_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',938),
+  ('variable_declaration_noin -> identifier initializer_noin','variable_declaration_noin',2,'p_variable_declaration_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',939),
+  ('initializer -> EQ assignment_expr','initializer',2,'p_initializer','/home/alienoid/dev/python/slimit/src/slimit/parser.py',947),
+  ('initializer_noin -> EQ assignment_expr_noin','initializer_noin',2,'p_initializer_noin','/home/alienoid/dev/python/slimit/src/slimit/parser.py',951),
+  ('empty_statement -> SEMI','empty_statement',1,'p_empty_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',956),
+  ('expr_statement -> expr_nobf SEMI','expr_statement',2,'p_expr_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',961),
+  ('expr_statement -> expr_nobf auto_semi','expr_statement',2,'p_expr_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',962),
+  ('if_statement -> IF LPAREN expr RPAREN statement','if_statement',5,'p_if_statement_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',968),
+  ('if_statement -> IF LPAREN expr RPAREN statement ELSE statement','if_statement',7,'p_if_statement_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',972),
+  ('iteration_statement -> DO statement WHILE LPAREN expr RPAREN SEMI','iteration_statement',7,'p_iteration_statement_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',978),
+  ('iteration_statement -> DO statement WHILE LPAREN expr RPAREN auto_semi','iteration_statement',7,'p_iteration_statement_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',979),
+  ('iteration_statement -> WHILE LPAREN expr RPAREN statement','iteration_statement',5,'p_iteration_statement_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',985),
+  ('iteration_statement -> FOR LPAREN expr_noin_opt SEMI expr_opt SEMI expr_opt RPAREN statement','iteration_statement',9,'p_iteration_statement_3','/home/alienoid/dev/python/slimit/src/slimit/parser.py',990),
+  ('iteration_statement -> FOR LPAREN VAR variable_declaration_list_noin SEMI expr_opt SEMI expr_opt RPAREN statement','iteration_statement',10,'p_iteration_statement_3','/home/alienoid/dev/python/slimit/src/slimit/parser.py',991),
+  ('iteration_statement -> FOR LPAREN left_hand_side_expr IN expr RPAREN statement','iteration_statement',7,'p_iteration_statement_4','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1004),
+  ('iteration_statement -> FOR LPAREN VAR identifier IN expr RPAREN statement','iteration_statement',8,'p_iteration_statement_5','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1011),
+  ('iteration_statement -> FOR LPAREN VAR identifier initializer_noin IN expr RPAREN statement','iteration_statement',9,'p_iteration_statement_6','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1018),
+  ('expr_opt -> empty','expr_opt',1,'p_expr_opt','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1025),
+  ('expr_opt -> expr','expr_opt',1,'p_expr_opt','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1026),
+  ('expr_noin_opt -> empty','expr_noin_opt',1,'p_expr_noin_opt','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1031),
+  ('expr_noin_opt -> expr_noin','expr_noin_opt',1,'p_expr_noin_opt','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1032),
+  ('continue_statement -> CONTINUE SEMI','continue_statement',2,'p_continue_statement_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1038),
+  ('continue_statement -> CONTINUE auto_semi','continue_statement',2,'p_continue_statement_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1039),
+  ('continue_statement -> CONTINUE identifier SEMI','continue_statement',3,'p_continue_statement_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1044),
+  ('continue_statement -> CONTINUE identifier auto_semi','continue_statement',3,'p_continue_statement_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1045),
+  ('break_statement -> BREAK SEMI','break_statement',2,'p_break_statement_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1051),
+  ('break_statement -> BREAK auto_semi','break_statement',2,'p_break_statement_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1052),
+  ('break_statement -> BREAK identifier SEMI','break_statement',3,'p_break_statement_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1057),
+  ('break_statement -> BREAK identifier auto_semi','break_statement',3,'p_break_statement_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1058),
+  ('return_statement -> RETURN SEMI','return_statement',2,'p_return_statement_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1065),
+  ('return_statement -> RETURN auto_semi','return_statement',2,'p_return_statement_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1066),
+  ('return_statement -> RETURN expr SEMI','return_statement',3,'p_return_statement_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1071),
+  ('return_statement -> RETURN expr auto_semi','return_statement',3,'p_return_statement_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1072),
+  ('with_statement -> WITH LPAREN expr RPAREN statement','with_statement',5,'p_with_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1078),
+  ('switch_statement -> SWITCH LPAREN expr RPAREN case_block','switch_statement',5,'p_switch_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1083),
+  ('case_block -> LBRACE case_clauses_opt RBRACE','case_block',3,'p_case_block','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1097),
+  ('case_block -> LBRACE case_clauses_opt default_clause case_clauses_opt RBRACE','case_block',5,'p_case_block','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1098),
+  ('case_clauses_opt -> empty','case_clauses_opt',1,'p_case_clauses_opt','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1104),
+  ('case_clauses_opt -> case_clauses','case_clauses_opt',1,'p_case_clauses_opt','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1105),
+  ('case_clauses -> case_clause','case_clauses',1,'p_case_clauses','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1110),
+  ('case_clauses -> case_clauses case_clause','case_clauses',2,'p_case_clauses','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1111),
+  ('case_clause -> CASE expr COLON source_elements','case_clause',4,'p_case_clause','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1120),
+  ('default_clause -> DEFAULT COLON source_elements','default_clause',3,'p_default_clause','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1124),
+  ('labelled_statement -> identifier COLON statement','labelled_statement',3,'p_labelled_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1129),
+  ('throw_statement -> THROW expr SEMI','throw_statement',3,'p_throw_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1134),
+  ('throw_statement -> THROW expr auto_semi','throw_statement',3,'p_throw_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1135),
+  ('try_statement -> TRY block catch','try_statement',3,'p_try_statement_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1141),
+  ('try_statement -> TRY block finally','try_statement',3,'p_try_statement_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1145),
+  ('try_statement -> TRY block catch finally','try_statement',4,'p_try_statement_3','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1149),
+  ('catch -> CATCH LPAREN identifier RPAREN block','catch',5,'p_catch','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1153),
+  ('finally -> FINALLY block','finally',2,'p_finally','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1157),
+  ('debugger_statement -> DEBUGGER SEMI','debugger_statement',2,'p_debugger_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1162),
+  ('debugger_statement -> DEBUGGER auto_semi','debugger_statement',2,'p_debugger_statement','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1163),
+  ('function_declaration -> FUNCTION identifier LPAREN RPAREN LBRACE function_body RBRACE','function_declaration',7,'p_function_declaration','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1170),
+  ('function_declaration -> FUNCTION identifier LPAREN formal_parameter_list RPAREN LBRACE function_body RBRACE','function_declaration',8,'p_function_declaration','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1171),
+  ('function_expr -> FUNCTION LPAREN RPAREN LBRACE function_body RBRACE','function_expr',6,'p_function_expr_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1184),
+  ('function_expr -> FUNCTION LPAREN formal_parameter_list RPAREN LBRACE function_body RBRACE','function_expr',7,'p_function_expr_1','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1185),
+  ('function_expr -> FUNCTION identifier LPAREN RPAREN LBRACE function_body RBRACE','function_expr',7,'p_function_expr_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1198),
+  ('function_expr -> FUNCTION identifier LPAREN formal_parameter_list RPAREN LBRACE function_body RBRACE','function_expr',8,'p_function_expr_2','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1199),
+  ('formal_parameter_list -> identifier','formal_parameter_list',1,'p_formal_parameter_list','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1212),
+  ('formal_parameter_list -> formal_parameter_list COMMA identifier','formal_parameter_list',3,'p_formal_parameter_list','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1213),
+  ('function_body -> source_elements','function_body',1,'p_function_body','/home/alienoid/dev/python/slimit/src/slimit/parser.py',1222),
+]